From dfc94207fec2d84314b1a5410cface22e8b369bd Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Thu, 20 Apr 2023 11:43:17 +0000 Subject: Add latest changes from gitlab-org/gitlab@15-11-stable-ee --- spec/channels/awareness_channel_spec.rb | 81 -- spec/commands/sidekiq_cluster/cli_spec.rb | 4 +- .../admin/applications_controller_spec.rb | 8 +- spec/controllers/admin/clusters_controller_spec.rb | 10 +- spec/controllers/application_controller_spec.rb | 4 +- spec/controllers/concerns/kas_cookie_spec.rb | 69 +- .../concerns/product_analytics_tracking_spec.rb | 14 - .../dashboard/projects_controller_spec.rb | 30 +- .../explore/projects_controller_spec.rb | 20 + .../controllers/groups/clusters_controller_spec.rb | 8 +- spec/controllers/groups/runners_controller_spec.rb | 134 +++- .../settings/applications_controller_spec.rb | 8 +- spec/controllers/groups_controller_spec.rb | 23 - spec/controllers/help_controller_spec.rb | 9 +- spec/controllers/invites_controller_spec.rb | 20 + .../oauth/applications_controller_spec.rb | 8 +- spec/controllers/projects/blame_controller_spec.rb | 40 +- spec/controllers/projects/blob_controller_spec.rb | 2 - .../projects/clusters_controller_spec.rb | 8 +- .../controllers/projects/commit_controller_spec.rb | 47 +- .../projects/commits_controller_spec.rb | 6 + .../projects/compare_controller_spec.rb | 27 +- .../designs/raw_images_controller_spec.rb | 6 +- .../projects/imports_controller_spec.rb | 2 +- .../controllers/projects/issues_controller_spec.rb | 2 +- spec/controllers/projects/jobs_controller_spec.rb | 48 +- .../projects/merge_requests_controller_spec.rb | 40 + .../projects/pipelines_controller_spec.rb | 175 ---- .../projects/project_members_controller_spec.rb | 8 - .../projects/service_desk_controller_spec.rb | 4 +- .../projects/work_items_controller_spec.rb | 156 ++++ spec/controllers/projects_controller_spec.rb | 4 +- spec/db/schema_spec.rb | 50 +- ...ation_for_namespace_creation_experiment_spec.rb | 49 -- ...ity_reports_mr_widget_prompt_experiment_spec.rb | 9 - spec/factories/abuse/trust_score.rb | 10 + spec/factories/abuse_reports.rb | 4 + spec/factories/ci/pipelines.rb | 12 +- spec/factories/ci/reports/security/findings.rb | 1 + spec/factories/ci/reports/security/reports.rb | 13 + spec/factories/ci/runner_machine_builds.rb | 4 +- spec/factories/ci/runner_machines.rb | 13 - spec/factories/ci/runner_managers.rb | 13 + spec/factories/ci/runners.rb | 4 +- .../ci_access/group_authorizations.rb | 18 + .../ci_access/project_authorizations.rb | 18 + .../user_access/group_authorizations.rb | 10 + .../user_access/project_authorizations.rb | 10 + .../clusters/agents/group_authorizations.rb | 18 - .../clusters/agents/project_authorizations.rb | 18 - spec/factories/clusters/applications/helm.rb | 115 --- spec/factories/clusters/clusters.rb | 12 - .../background_migration/schema_inconsistencies.rb | 11 + spec/factories/group_members.rb | 6 + spec/factories/integrations.rb | 3 + spec/factories/issues.rb | 15 + spec/factories/member_roles.rb | 11 - spec/factories/ml/candidates.rb | 12 +- spec/factories/notes.rb | 4 + spec/factories/packages/debian/file_metadatum.rb | 70 +- spec/factories/packages/npm/metadata_cache.rb | 10 + spec/factories/packages/package_files.rb | 1 + spec/factories/packages/packages.rb | 6 +- spec/factories/project_members.rb | 6 + spec/factories/projects.rb | 7 + spec/factories/projects/data_transfers.rb | 4 + .../resource_events/issue_assignment_events.rb | 9 + .../merge_request_assignment_events.rb | 9 + spec/factories/search_index.rb | 10 + .../service_desk/custom_email_credential.rb | 11 + spec/factories/users.rb | 4 + spec/factories/work_items/resource_link_events.rb | 10 + spec/features/abuse_report_spec.rb | 2 +- spec/features/admin/admin_abuse_reports_spec.rb | 45 ++ spec/features/admin/admin_groups_spec.rb | 4 +- spec/features/admin/admin_health_check_spec.rb | 2 +- spec/features/admin/admin_hook_logs_spec.rb | 2 +- spec/features/admin/admin_hooks_spec.rb | 4 +- spec/features/admin/admin_mode/logout_spec.rb | 2 +- spec/features/admin/admin_mode/workers_spec.rb | 2 +- spec/features/admin/admin_mode_spec.rb | 2 +- spec/features/admin/admin_projects_spec.rb | 19 +- spec/features/admin/admin_runners_spec.rb | 51 +- spec/features/admin/admin_settings_spec.rb | 2 +- .../admin/admin_users_impersonation_tokens_spec.rb | 2 +- spec/features/admin/users/user_spec.rb | 2 +- spec/features/admin/users/users_spec.rb | 36 +- spec/features/admin_variables_spec.rb | 2 +- spec/features/boards/board_filters_spec.rb | 2 +- spec/features/canonical_link_spec.rb | 2 +- spec/features/clusters/cluster_detail_page_spec.rb | 2 +- .../clusters/cluster_health_dashboard_spec.rb | 2 +- spec/features/clusters/create_agent_spec.rb | 2 +- .../commits/user_uses_quick_actions_spec.rb | 2 +- spec/features/commits_spec.rb | 22 +- spec/features/dashboard/activity_spec.rb | 2 +- ...ard_with_external_authorization_service_spec.rb | 2 +- spec/features/dashboard/groups_list_spec.rb | 2 +- spec/features/dashboard/issues_filter_spec.rb | 4 +- spec/features/dashboard/issues_spec.rb | 2 +- spec/features/dashboard/merge_requests_spec.rb | 4 +- spec/features/dashboard/milestones_spec.rb | 2 +- spec/features/dashboard/projects_spec.rb | 4 +- spec/features/dashboard/snippets_spec.rb | 5 +- spec/features/dashboard/todos/todos_spec.rb | 2 +- spec/features/emails/issues_spec.rb | 110 +++ .../explore/user_explores_projects_spec.rb | 12 + .../frequently_visited_projects_and_groups_spec.rb | 2 +- spec/features/global_search_spec.rb | 58 +- spec/features/group_variables_spec.rb | 2 +- spec/features/groups/container_registry_spec.rb | 4 + spec/features/groups/group_runners_spec.rb | 38 +- spec/features/groups/group_settings_spec.rb | 83 +- spec/features/groups/issues_spec.rb | 18 +- .../features/groups/members/filter_members_spec.rb | 2 +- spec/features/groups/members/leave_group_spec.rb | 2 +- spec/features/groups/members/list_members_spec.rb | 2 +- spec/features/groups/members/manage_groups_spec.rb | 4 +- .../features/groups/members/manage_members_spec.rb | 4 +- ...master_adds_member_with_expiration_date_spec.rb | 4 +- .../features/groups/members/search_members_spec.rb | 2 +- spec/features/groups/members/sort_members_spec.rb | 2 +- spec/features/groups/new_group_page_spec.rb | 2 +- .../settings/packages_and_registries_spec.rb | 3 +- spec/features/groups/show_spec.rb | 2 +- spec/features/groups_spec.rb | 14 + spec/features/ide_spec.rb | 2 +- .../incidents/incident_timeline_events_spec.rb | 4 +- .../incidents/user_uses_quick_actions_spec.rb | 2 +- spec/features/invites_spec.rb | 4 +- .../internal_references_spec.rb | 2 +- ..._issue_for_discussions_in_merge_request_spec.rb | 11 +- spec/features/issues/form_spec.rb | 15 +- spec/features/issues/incident_issue_spec.rb | 4 +- spec/features/issues/issue_sidebar_spec.rb | 8 +- spec/features/issues/move_spec.rb | 4 +- spec/features/issues/rss_spec.rb | 10 +- spec/features/issues/service_desk_spec.rb | 4 +- .../features/issues/user_comments_on_issue_spec.rb | 2 +- .../user_creates_branch_and_merge_request_spec.rb | 4 +- spec/features/issues/user_creates_issue_spec.rb | 20 +- spec/features/issues/user_edits_issue_spec.rb | 6 +- .../issues/user_uses_quick_actions_spec.rb | 2 +- spec/features/jira_connect/branches_spec.rb | 2 +- spec/features/markdown/metrics_spec.rb | 2 +- .../merge_request/maintainer_edits_fork_spec.rb | 2 +- .../user_accepts_merge_request_spec.rb | 20 +- .../user_edits_assignees_sidebar_spec.rb | 2 +- .../merge_request/user_edits_merge_request_spec.rb | 2 + .../user_merges_only_if_pipeline_succeeds_spec.rb | 160 +++- .../user_merges_when_pipeline_succeeds_spec.rb | 108 +++ .../merge_request/user_resolves_conflicts_spec.rb | 2 +- .../user_reverts_merge_request_spec.rb | 4 +- spec/features/merge_request/user_sees_diff_spec.rb | 2 +- ...ton_depending_on_unresolved_discussions_spec.rb | 8 +- .../user_sees_merge_request_pipelines_spec.rb | 46 ++ .../merge_request/user_sees_merge_widget_spec.rb | 6 +- .../merge_request/user_uses_quick_actions_spec.rb | 2 +- .../user_views_open_merge_request_spec.rb | 18 +- .../user_sorts_merge_requests_spec.rb | 2 +- .../milestones/user_deletes_milestone_spec.rb | 2 + spec/features/nav/pinned_nav_items_spec.rb | 179 +++++ spec/features/nav/top_nav_responsive_spec.rb | 6 +- spec/features/nav/top_nav_spec.rb | 2 +- .../populate_new_pipeline_vars_with_params_spec.rb | 2 +- spec/features/profiles/chat_names_spec.rb | 16 +- .../profiles/list_users_comment_template_spec.rb | 21 + .../profiles/list_users_saved_replies_spec.rb | 21 - .../profiles/personal_access_tokens_spec.rb | 2 +- .../profiles/user_creates_comment_template_spec.rb | 29 + .../profiles/user_creates_saved_reply_spec.rb | 29 - .../profiles/user_deletes_comment_template_spec.rb | 28 + .../profiles/user_deletes_saved_reply_spec.rb | 27 - spec/features/profiles/user_edit_profile_spec.rb | 14 +- .../profiles/user_updates_comment_template_spec.rb | 30 + .../profiles/user_updates_saved_reply_spec.rb | 28 - .../profiles/user_uses_comment_template_spec.rb | 29 + .../profiles/user_uses_saved_reply_spec.rb | 29 - spec/features/project_group_variables_spec.rb | 2 +- spec/features/project_variables_spec.rb | 2 +- spec/features/projects/blobs/blame_spec.rb | 6 +- spec/features/projects/blobs/edit_spec.rb | 4 +- .../user_views_pipeline_editor_button_spec.rb | 2 +- .../projects/branches/user_creates_branch_spec.rb | 6 +- spec/features/projects/ci/editor_spec.rb | 2 +- spec/features/projects/ci/lint_spec.rb | 2 +- spec/features/projects/clusters/gcp_spec.rb | 2 +- spec/features/projects/clusters/user_spec.rb | 2 +- .../commit/comments/user_adds_comment_spec.rb | 2 +- .../commit/comments/user_deletes_comments_spec.rb | 2 +- .../commit/comments/user_edits_comments_spec.rb | 2 +- spec/features/projects/commit/diff_notes_spec.rb | 13 +- .../commit/user_comments_on_commit_spec.rb | 2 +- spec/features/projects/compare_spec.rb | 19 +- spec/features/projects/container_registry_spec.rb | 4 + .../projects/environments/environment_spec.rb | 30 + .../projects/files/dockerfile_dropdown_spec.rb | 2 +- .../features/projects/files/editing_a_file_spec.rb | 50 +- .../projects/files/gitignore_dropdown_spec.rb | 2 +- .../projects/files/gitlab_ci_yml_dropdown_spec.rb | 2 +- ...to_create_license_file_in_empty_project_spec.rb | 2 +- .../projects/files/user_creates_files_spec.rb | 4 +- .../projects/files/user_edits_files_spec.rb | 4 +- spec/features/projects/fork_spec.rb | 28 +- .../projects/import_export/export_file_spec.rb | 61 +- .../import_export/test_project_export.tar.gz | Bin 3176 -> 4799 bytes .../projects/integrations/apple_app_store_spec.rb | 6 +- .../projects/integrations/google_play_spec.rb | 7 +- .../issues/viewing_relocated_issues_spec.rb | 2 +- spec/features/projects/jobs_spec.rb | 4 +- .../members/anonymous_user_sees_members_spec.rb | 2 +- .../projects/members/group_members_spec.rb | 2 +- .../members/groups_with_access_list_spec.rb | 4 +- .../projects/members/manage_groups_spec.rb | 4 +- .../projects/members/manage_members_spec.rb | 4 +- ...master_adds_member_with_expiration_date_spec.rb | 4 +- .../projects/members/member_leaves_project_spec.rb | 2 +- spec/features/projects/members/sorting_spec.rb | 2 +- spec/features/projects/members/tabs_spec.rb | 2 +- spec/features/projects/navbar_spec.rb | 15 +- spec/features/projects/network_graph_spec.rb | 9 + spec/features/projects/new_project_spec.rb | 4 +- spec/features/projects/pipeline_schedules_spec.rb | 474 ++++++----- spec/features/projects/pipelines/pipeline_spec.rb | 11 - spec/features/projects/pipelines/pipelines_spec.rb | 19 +- .../projects/releases/user_creates_release_spec.rb | 10 +- .../settings/branch_rules_settings_spec.rb | 11 + .../registry_settings_cleanup_tags_spec.rb | 4 +- .../projects/settings/registry_settings_spec.rb | 4 +- .../projects/settings/repository_settings_spec.rb | 19 + .../projects/settings/service_desk_setting_spec.rb | 10 +- .../settings/user_manages_project_members_spec.rb | 2 +- .../projects/settings/webhooks_settings_spec.rb | 2 +- .../projects/snippets/create_snippet_spec.rb | 2 +- .../projects/snippets/user_updates_snippet_spec.rb | 2 +- .../projects/tree/create_directory_spec.rb | 2 +- spec/features/projects/tree/create_file_spec.rb | 2 +- spec/features/projects/tree/tree_show_spec.rb | 2 +- spec/features/projects/tree/upload_file_spec.rb | 2 +- .../projects/user_sees_user_popover_spec.rb | 2 +- .../projects/user_views_empty_project_spec.rb | 2 +- .../features/projects/work_items/work_item_spec.rb | 62 +- spec/features/protected_branches_spec.rb | 9 + .../features/search/user_searches_for_code_spec.rb | 55 +- .../search/user_uses_header_search_field_spec.rb | 2 +- .../snippets/notes_on_personal_snippets_spec.rb | 36 +- spec/features/snippets/show_spec.rb | 4 +- .../features/snippets/user_creates_snippet_spec.rb | 4 +- spec/features/snippets/user_edits_snippet_spec.rb | 2 +- spec/features/tags/developer_views_tags_spec.rb | 1 - .../user_can_display_performance_bar_spec.rb | 2 +- spec/features/user_sees_revert_modal_spec.rb | 4 +- spec/features/user_sorts_things_spec.rb | 2 +- spec/features/users/login_spec.rb | 69 +- spec/features/users/show_spec.rb | 40 + spec/features/users/signup_spec.rb | 10 +- spec/features/webauthn_spec.rb | 2 +- spec/finders/abuse_reports_finder_spec.rb | 18 + spec/finders/access_requests_finder_spec.rb | 9 - .../achievements/achievements_finder_spec.rb | 26 + .../finders/alert_management/alerts_finder_spec.rb | 17 +- .../ci/pipelines_for_merge_request_finder_spec.rb | 68 +- .../clusters/agent_authorizations_finder_spec.rb | 140 ---- .../agents/authorizations/ci_access/finder_spec.rb | 140 ++++ spec/finders/context_commits_finder_spec.rb | 21 +- .../group_data_transfer_finder_spec.rb | 84 ++ .../data_transfer/mocked_transfer_finder_spec.rb | 22 + .../project_data_transfer_finder_spec.rb | 80 ++ spec/finders/deployments_finder_spec.rb | 30 +- spec/finders/fork_targets_finder_spec.rb | 25 +- spec/finders/group_descendants_finder_spec.rb | 22 +- spec/finders/group_members_finder_spec.rb | 89 ++- .../accepting_group_transfers_finder_spec.rb | 14 +- .../accepting_project_creations_finder_spec.rb | 119 +++ .../groups/accepting_project_shares_finder_spec.rb | 122 +++ .../accepting_project_transfers_finder_spec.rb | 42 +- spec/finders/members_finder_spec.rb | 8 - spec/finders/merge_requests_finder_spec.rb | 34 +- spec/finders/notes_finder_spec.rb | 57 +- spec/finders/packages/npm/package_finder_spec.rb | 8 + spec/finders/projects_finder_spec.rb | 48 +- spec/finders/snippets_finder_spec.rb | 21 +- spec/finders/users_finder_spec.rb | 8 +- .../fixtures/api/schemas/entities/diff_viewer.json | 6 + .../api/schemas/internal/pages/lookup_path.json | 67 +- .../valid_reply_with_references_in_comma.eml | 42 + .../import_export/corrupted_project_export.tar.gz | Bin 4603 -> 5288 bytes .../lightweight_project_export.tar.gz | Bin 3758 -> 4950 bytes .../sample_metric.yml | 1 - .../sample_metric_with_ee.yml | 1 - .../sample_metric_with_name_suggestions.yml | 1 - .../lib/gitlab/import_export/complex/project.json | 35 +- .../tree/project/protected_environments.ndjson | 2 +- .../gitlab/import_export/designs/tree/project.json | 15 + .../designs/tree/project/issues.ndjson | 2 + .../designs/tree/project/project_members.ndjson | 2 + spec/fixtures/markdown.md.erb | 26 + spec/fixtures/pages_with_custom_root.zip | Bin 0 -> 631 bytes spec/fixtures/pages_with_custom_root.zip.meta | Bin 0 -> 175 bytes spec/fixtures/pages_with_custom_root.zip.meta0 | Bin 0 -> 197 bytes spec/fixtures/scripts/test_report.json | 2 +- .../feature-branch/gl-sast-report.json | 22 +- .../feature-branch/gl-secret-detection-report.json | 36 +- .../master/gl-sast-missing-scanner.json | 52 +- .../master/gl-sast-report-bandit.json | 13 +- .../master/gl-sast-report-gosec.json | 13 +- .../master/gl-sast-report-minimal.json | 18 +- .../master/gl-sast-report-semgrep-for-bandit.json | 13 +- .../master/gl-sast-report-semgrep-for-gosec.json | 13 +- ...-sast-report-semgrep-for-multiple-findings.json | 13 +- .../security_reports/master/gl-sast-report.json | 22 +- .../master/gl-secret-detection-report.json | 35 +- spec/fixtures/service_account.json | 2 +- spec/fixtures/structure.sql | 69 ++ spec/frontend/__helpers__/assert_props.js | 24 + spec/frontend/__helpers__/wait_for_text.js | 2 +- spec/frontend/access_tokens/index_spec.js | 2 +- .../add_context_commits_modal_spec.js.snap | 11 +- .../components/add_context_commits_modal_spec.js | 33 +- .../components/abuse_report_actions_spec.js | 166 ++++ .../components/abuse_report_details_spec.js | 53 ++ .../components/abuse_report_row_spec.js | 55 +- .../abuse_reports_filtered_search_bar_spec.js | 33 +- spec/frontend/admin/abuse_reports/mock_data.js | 20 +- spec/frontend/admin/abuse_reports/utils_spec.js | 22 +- .../admin/users/components/actions/actions_spec.js | 8 +- .../actions/delete_with_contributions_spec.js | 12 +- .../admin/users/components/user_actions_spec.js | 12 +- spec/frontend/admin/users/new_spec.js | 7 +- .../__snapshots__/alerts_form_spec.js.snap | 1 + .../components/alert_mapping_builder_spec.js | 2 +- .../components/alerts_settings_form_spec.js | 2 +- .../components/alerts_settings_wrapper_spec.js | 2 +- .../analytics/cycle_analytics/base_spec.js | 1 + .../analytics/cycle_analytics/filter_bar_spec.js | 2 +- .../analytics/cycle_analytics/mock_data.js | 2 +- .../analytics/cycle_analytics/utils_spec.js | 9 +- .../cycle_analytics/value_stream_filters_spec.js | 7 +- .../components/projects_dropdown_filter_spec.js | 26 +- spec/frontend/api/projects_api_spec.js | 22 +- spec/frontend/api/user_api_spec.js | 23 +- spec/frontend/artifacts/components/app_spec.js | 109 --- .../artifacts/components/artifact_row_spec.js | 105 --- .../components/artifacts_bulk_delete_spec.js | 96 --- .../components/artifacts_table_row_details_spec.js | 137 ---- .../artifacts/components/feedback_banner_spec.js | 59 -- .../components/job_artifacts_table_spec.js | 473 ----------- .../artifacts/components/job_checkbox_spec.js | 71 -- .../artifacts/graphql/cache_update_spec.js | 67 -- .../keep_latest_artifact_checkbox_spec.js | 14 +- .../password/components/password_input_spec.js | 49 ++ .../components/manage_two_factor_form_spec.js | 25 +- .../webauthn/components/registration_spec.js | 2 +- .../batch_comments/components/review_bar_spec.js | 4 +- spec/frontend/behaviors/gl_emoji_spec.js | 14 +- spec/frontend/behaviors/quick_submit_spec.js | 18 +- spec/frontend/behaviors/requires_input_spec.js | 5 +- .../behaviors/shortcuts/shortcuts_issuable_spec.js | 7 +- .../blob/components/blob_edit_header_spec.js | 20 +- .../components/blob_header_default_actions_spec.js | 5 +- spec/frontend/blob/components/mock_data.js | 2 + spec/frontend/blob/file_template_selector_spec.js | 2 +- spec/frontend/blob/sketch/index_spec.js | 5 +- spec/frontend/boards/board_card_inner_spec.js | 4 +- spec/frontend/boards/board_list_spec.js | 120 ++- spec/frontend/boards/components/board_app_spec.js | 38 +- spec/frontend/boards/components/board_card_spec.js | 45 +- .../boards/components/board_column_spec.js | 2 +- .../components/board_content_sidebar_spec.js | 89 ++- .../boards/components/board_content_spec.js | 6 + .../components/board_filtered_search_spec.js | 2 +- spec/frontend/boards/components/board_form_spec.js | 2 +- .../boards/components/board_list_header_spec.js | 4 +- .../boards/components/boards_selector_spec.js | 6 +- .../components/sidebar/board_sidebar_title_spec.js | 70 +- spec/frontend/boards/mock_data.js | 109 ++- spec/frontend/boards/stores/actions_spec.js | 2 +- .../components/delete_branch_modal_spec.js | 94 ++- .../components/delete_merged_branches_spec.js | 16 +- spec/frontend/captcha/captcha_modal_spec.js | 6 +- .../frontend/captcha/init_recaptcha_script_spec.js | 2 +- spec/frontend/ci/artifacts/components/app_spec.js | 109 +++ .../ci/artifacts/components/artifact_row_spec.js | 105 +++ .../components/artifacts_bulk_delete_spec.js | 48 ++ .../components/artifacts_table_row_details_spec.js | 137 ++++ .../artifacts/components/feedback_banner_spec.js | 59 ++ .../components/job_artifacts_table_spec.js | 651 +++++++++++++++ .../ci/artifacts/components/job_checkbox_spec.js | 71 ++ .../ci/artifacts/graphql/cache_update_spec.js | 67 ++ .../ci_variable_list/ci_variable_list_spec.js | 10 +- .../components/ci_admin_variables_spec.js | 22 +- .../components/ci_environments_dropdown_spec.js | 149 +++- .../components/ci_group_variables_spec.js | 22 +- .../components/ci_project_variables_spec.js | 27 +- .../components/ci_variable_modal_spec.js | 49 +- .../components/ci_variable_settings_spec.js | 47 +- .../components/ci_variable_shared_spec.js | 425 ++++++---- .../components/ci_variable_table_spec.js | 4 +- spec/frontend/ci/ci_variable_list/mocks.js | 5 + .../components/commit/commit_form_spec.js | 4 +- .../cards/pipeline_config_reference_card_spec.js | 2 +- .../components/editor/ci_editor_header_spec.js | 4 +- .../components/file-nav/branch_switcher_spec.js | 2 +- .../components/file-tree/container_spec.js | 6 +- .../header/pipeline_editor_mini_graph_spec.js | 2 +- .../components/header/pipeline_status_spec.js | 2 +- .../artifacts_and_cache_item_spec.js | 127 +++ .../accordion_items/job_setup_item_spec.js | 3 +- .../accordion_items/rules_item_spec.js | 70 ++ .../job_assistant_drawer_spec.js | 147 +++- .../components/pipeline_editor_tabs_spec.js | 4 +- .../components/popovers/file_tree_popover_spec.js | 4 +- .../popovers/validate_pipeline_popover_spec.js | 2 +- .../popovers/walkthrough_popover_spec.js | 2 +- .../components/ui/editor_tab_spec.js | 4 +- .../ci/pipeline_editor/graphql/resolvers_spec.js | 2 +- spec/frontend/ci/pipeline_editor/mock_data.js | 81 +- .../ci/pipeline_editor/pipeline_editor_app_spec.js | 13 +- .../components/pipeline_new_form_spec.js | 14 +- .../pipeline_new/components/refs_dropdown_spec.js | 6 +- .../delete_pipeline_schedule_modal_spec.js | 4 +- .../components/take_ownership_modal_spec.js | 4 +- .../admin_new_runner_app_spec.js | 30 +- .../admin_register_runner_app_spec.js | 4 +- .../admin_runner_show_app_spec.js | 6 +- .../runner/admin_runners/admin_runners_app_spec.js | 31 +- .../components/cells/runner_status_cell_spec.js | 8 +- .../components/cells/runner_summary_cell_spec.js | 49 +- .../registration/__snapshots__/utils_spec.js.snap | 9 +- .../registration/registration_dropdown_spec.js | 50 +- .../registration/registration_instructions_spec.js | 73 +- .../registration_token_reset_dropdown_item_spec.js | 6 +- .../registration/registration_token_spec.js | 20 +- .../runner/components/registration/utils_spec.js | 34 +- .../runner/components/runner_create_form_spec.js | 31 +- .../runner/components/runner_delete_button_spec.js | 12 +- .../components/runner_filtered_search_bar_spec.js | 14 +- .../components/runner_list_empty_state_spec.js | 3 +- .../ci/runner/components/runner_list_spec.js | 2 +- .../runner/components/runner_pause_button_spec.js | 2 +- .../runner_platforms_radio_group_spec.js | 2 +- .../components/runner_platforms_radio_spec.js | 4 +- .../ci/runner/components/runner_projects_spec.js | 2 +- .../ci/runner/components/runner_type_badge_spec.js | 3 +- .../ci/runner/components/runner_type_tabs_spec.js | 3 +- .../ci/runner/components/stat/runner_count_spec.js | 14 +- .../group_new_runner_app_spec.js | 132 +++ .../group_register_runner_app_spec.js | 120 +++ .../group_runner_show_app_spec.js | 4 +- .../runner/group_runners/group_runners_app_spec.js | 72 +- spec/frontend/ci/runner/mock_data.js | 130 +-- .../frontend/ci/runner/runner_search_utils_spec.js | 5 +- spec/frontend/ci/runner/sentry_utils_spec.js | 2 +- .../agents/components/create_token_modal_spec.js | 2 +- spec/frontend/clusters/clusters_bundle_spec.js | 5 +- .../clusters_list/components/agent_table_spec.js | 204 ++--- .../components/install_agent_modal_spec.js | 2 +- .../frontend/clusters_list/components/mock_data.js | 108 ++- .../__snapshots__/list_item_spec.js.snap | 140 ++++ .../comment_templates/components/form_spec.js | 145 ++++ .../comment_templates/components/list_item_spec.js | 154 ++++ .../comment_templates/components/list_spec.js | 46 ++ .../frontend/comment_templates/pages/index_spec.js | 45 ++ .../commit/commit_box_pipeline_mini_graph_spec.js | 2 +- .../components/commit_box_pipeline_status_spec.js | 2 +- .../__snapshots__/toolbar_link_button_spec.js.snap | 33 - .../bubble_menus/code_block_bubble_menu_spec.js | 6 +- .../bubble_menus/formatting_bubble_menu_spec.js | 2 +- .../bubble_menus/link_bubble_menu_spec.js | 63 +- .../bubble_menus/media_bubble_menu_spec.js | 8 +- .../components/content_editor_alert_spec.js | 2 +- .../components/formatting_toolbar_spec.js | 2 +- .../components/suggestions_dropdown_spec.js | 22 +- .../components/toolbar_attachment_button_spec.js | 57 ++ .../components/toolbar_image_button_spec.js | 96 --- .../components/toolbar_link_button_spec.js | 223 ------ .../components/toolbar_more_dropdown_spec.js | 2 +- .../components/wrappers/code_block_spec.js | 4 +- .../components/wrappers/details_spec.js | 2 +- .../wrappers/footnote_definition_spec.js | 2 +- .../components/wrappers/label_spec.js | 32 - .../components/wrappers/reference_label_spec.js | 32 + .../components/wrappers/reference_spec.js | 46 ++ .../components/wrappers/table_cell_base_spec.js | 6 +- .../components/wrappers/table_cell_body_spec.js | 2 +- .../components/wrappers/table_cell_header_spec.js | 2 +- .../components/wrappers/table_of_contents_spec.js | 2 +- .../extensions/paste_markdown_spec.js | 52 +- .../content_editor/markdown_snapshot_spec.js | 6 +- .../content_editor/services/content_editor_spec.js | 4 +- .../services/create_content_editor_spec.js | 2 +- .../services/gl_api_markdown_deserializer_spec.js | 2 +- .../services/markdown_serializer_spec.js | 89 ++- .../track_input_rules_and_shortcuts_spec.js | 4 +- .../__snapshots__/contributors_spec.js.snap | 2 + .../components/custom_metrics_form_fields_spec.js | 2 +- spec/frontend/deploy_keys/components/key_spec.js | 33 +- .../design_notes/design_discussion_spec.js | 2 +- .../components/design_notes/design_note_spec.js | 2 +- .../design_notes/design_reply_form_spec.js | 67 +- .../components/design_overlay_spec.js | 48 +- .../components/design_scaler_spec.js | 2 +- .../components/design_todo_button_spec.js | 4 +- .../__snapshots__/design_navigation_spec.js.snap | 2 +- .../components/toolbar/design_navigation_spec.js | 64 +- .../upload/design_version_dropdown_spec.js | 45 +- .../components/upload/mock_data/all_versions.js | 20 - .../design_management/mock_data/all_versions.js | 8 +- .../design_management/mock_data/apollo_mock.js | 197 ++--- .../pages/design/__snapshots__/index_spec.js.snap | 4 +- .../design_management/pages/design/index_spec.js | 2 +- .../frontend/design_management/pages/index_spec.js | 41 +- spec/frontend/diffs/components/app_spec.js | 54 +- spec/frontend/diffs/components/commit_item_spec.js | 22 + .../components/diff_code_quality_item_spec.js | 66 ++ .../diffs/components/diff_code_quality_spec.js | 40 +- .../diffs/components/diff_file_header_spec.js | 8 +- spec/frontend/diffs/components/diff_file_spec.js | 4 +- .../diffs/components/diff_line_note_form_spec.js | 17 - spec/frontend/diffs/components/diff_view_spec.js | 19 +- .../diffs/components/hidden_files_warning_spec.js | 4 +- .../__snapshots__/findings_drawer_spec.js.snap | 126 +++ .../components/shared/findings_drawer_spec.js | 19 + spec/frontend/diffs/create_diffs_store.js | 2 + spec/frontend/diffs/mock_data/diff_code_quality.js | 5 + spec/frontend/diffs/mock_data/findings_drawer.js | 21 + spec/frontend/diffs/store/actions_spec.js | 331 +++++++- spec/frontend/diffs/utils/merge_request_spec.js | 56 +- spec/frontend/drawio/drawio_editor_spec.js | 8 +- spec/frontend/dropzone_input_spec.js | 4 +- .../source_editor_toolbar_button_spec.js | 31 +- .../components/source_editor_toolbar_spec.js | 11 +- .../source_editor_markdown_livepreview_ext_spec.js | 102 ++- spec/frontend/editor/utils_spec.js | 20 +- .../emoji/awards_app/store/actions_spec.js | 4 +- spec/frontend/environment.js | 11 +- .../environments/deploy_board_component_spec.js | 4 +- .../environments/environment_actions_spec.js | 114 +-- .../components/deployment_actions_spec.js | 119 ++- .../environments/environment_details/page_spec.js | 35 +- .../environments/environment_folder_spec.js | 2 +- .../frontend/environments/environment_stop_spec.js | 2 +- .../frontend/environments/environments_app_spec.js | 2 +- spec/frontend/environments/graphql/mock_data.js | 10 +- .../environments/graphql/resolvers_spec.js | 57 ++ ...loyment_data_transformation_helper_spec.js.snap | 34 + .../environments/kubernetes_overview_spec.js | 56 +- spec/frontend/environments/kubernetes_pods_spec.js | 114 +++ spec/frontend/environments/mock_data.js | 3 + .../environments/new_environment_item_spec.js | 27 +- .../stop_stale_environments_modal_spec.js | 4 +- .../components/error_tracking_list_spec.js | 25 +- .../configure_feature_flags_modal_spec.js | 8 +- .../feature_flags/components/feature_flags_spec.js | 4 +- .../frontend/filtered_search/dropdown_user_spec.js | 7 +- .../filtered_search/dropdown_utils_spec.js | 7 +- spec/frontend/fixtures/api_projects.rb | 15 +- spec/frontend/fixtures/comment_templates.rb | 74 ++ spec/frontend/fixtures/issues.rb | 9 - spec/frontend/fixtures/job_artifacts.rb | 2 +- spec/frontend/fixtures/jobs.rb | 64 +- spec/frontend/fixtures/milestones.rb | 43 + spec/frontend/fixtures/pipelines.rb | 25 + spec/frontend/fixtures/projects.rb | 2 +- spec/frontend/fixtures/runner.rb | 28 +- spec/frontend/fixtures/saved_replies.rb | 74 -- spec/frontend/fixtures/startup_css.rb | 15 - .../fixtures/static/oauth_remember_me.html | 2 +- .../fixtures/static/search_autocomplete.html | 15 - spec/frontend/fixtures/timelogs.rb | 53 ++ .../frontend/frequent_items/components/app_spec.js | 1 - .../components/frequent_items_list_spec.js | 2 +- spec/frontend/frequent_items/store/actions_spec.js | 1 - spec/frontend/gfm_auto_complete_spec.js | 5 +- spec/frontend/groups/components/app_spec.js | 6 +- .../archived_projects_empty_state_spec.js | 4 +- .../shared_projects_empty_state_spec.js | 4 +- .../subgroups_and_projects_empty_state_spec.js | 1 + spec/frontend/groups/components/groups_spec.js | 2 +- .../groups/components/overview_tabs_spec.js | 1 + .../groups/components/transfer_group_form_spec.js | 2 +- .../components/group_settings_readme_spec.js | 112 +++ spec/frontend/groups/settings/mock_data.js | 6 + spec/frontend/header_search/components/app_spec.js | 72 +- spec/frontend/header_search/init_spec.js | 18 - .../helpers/init_simple_app_helper_spec.js | 6 +- spec/frontend/ide/components/activity_bar_spec.js | 55 +- .../ide/components/cannot_push_code_alert_spec.js | 2 +- .../ide/components/commit_sidebar/form_spec.js | 21 +- .../ide/components/commit_sidebar/list_spec.js | 2 +- spec/frontend/ide/components/ide_review_spec.js | 2 +- spec/frontend/ide/components/ide_spec.js | 4 +- spec/frontend/ide/components/ide_tree_spec.js | 76 +- .../ide/components/new_dropdown/index_spec.js | 56 +- .../ide/components/repo_commit_section_spec.js | 17 - spec/frontend/ide/components/repo_editor_spec.js | 14 +- .../components/shared/commit_message_field_spec.js | 2 +- spec/frontend/ide/init_gitlab_web_ide_spec.js | 3 + spec/frontend/ide/lib/languages/codeowners_spec.js | 85 ++ spec/frontend/ide/stores/actions_spec.js | 4 +- .../ide/stores/modules/commit/actions_spec.js | 66 -- .../details/components/import_details_app_spec.js | 23 + .../components/import_details_table_spec.js | 33 + spec/frontend/import/details/mock_data.js | 31 + .../components/import_status_spec.js | 72 +- .../components/import_actions_cell_spec.js | 4 +- .../import_groups/components/import_table_spec.js | 2 +- .../import_groups/graphql/client_factory_spec.js | 2 +- .../import_groups/services/status_poller_spec.js | 2 +- .../incidents/components/incidents_list_spec.js | 2 +- .../components/incidents_settings_service_spec.js | 2 +- .../edit/components/integration_form_spec.js | 2 +- .../edit/components/jira_issues_fields_spec.js | 5 +- .../components/invite_group_notification_spec.js | 14 +- .../components/invite_groups_modal_spec.js | 26 + .../components/invite_members_modal_spec.js | 278 +++---- .../invite_members/mock_data/member_modal.js | 31 + .../invite_members/utils/member_utils_spec.js | 30 +- .../utils/trigger_successful_invite_alert_spec.js | 4 +- spec/frontend/issuable/issuable_form_spec.js | 69 +- .../issuable/popover/components/mr_popover_spec.js | 4 +- .../components/add_issuable_form_spec.js | 2 +- spec/frontend/issues/issue_spec.js | 8 +- .../issues/list/components/issues_list_app_spec.js | 4 +- .../jira_issues_import_status_app_spec.js | 2 +- .../issues/new/components/type_select_spec.js | 141 ++++ spec/frontend/issues/show/components/app_spec.js | 19 +- .../issues/show/components/description_spec.js | 82 +- .../frontend/issues/show/components/edited_spec.js | 73 +- .../show/components/fields/description_spec.js | 7 +- .../issues/show/components/header_actions_spec.js | 2 +- .../incidents/create_timeline_events_form_spec.js | 1 - .../components/incidents/incident_tabs_spec.js | 2 +- .../incidents/timeline_events_form_spec.js | 14 +- .../issues/show/components/locked_warning_spec.js | 4 +- spec/frontend/issues/show/mock_data/mock_data.js | 21 +- .../components/source_branch_dropdown_spec.js | 4 +- .../sign_in_gitlab_multiversion/index_spec.js | 10 +- .../components/jira_import_form_spec.js | 4 +- .../components/job/manual_variables_form_spec.js | 4 +- .../jobs/components/job/sidebar_header_spec.js | 2 +- spec/frontend/jobs/components/job/sidebar_spec.js | 4 +- .../components/table/cells/actions_cell_spec.js | 2 +- .../jobs/components/table/job_table_app_spec.js | 37 + spec/frontend/jobs/mock_data.js | 12 + .../labels/components/delete_label_modal_spec.js | 87 +- .../apollo/indexed_db_persistent_storage_spec.js | 90 +++ .../cache_with_persist_directive_and_field.json | 151 ---- spec/frontend/lib/apollo/persist_link_spec.js | 4 +- spec/frontend/lib/utils/chart_utils_spec.js | 55 +- spec/frontend/lib/utils/color_utils_spec.js | 2 +- .../lib/utils/datetime/time_spent_utility_spec.js | 25 + spec/frontend/lib/utils/error_message_spec.js | 101 +-- .../lib/utils/intersection_observer_spec.js | 2 +- spec/frontend/lib/utils/poll_spec.js | 2 +- spec/frontend/lib/utils/secret_detection_spec.js | 68 ++ spec/frontend/lib/utils/web_ide_navigator_spec.js | 38 + .../components/table/expiration_datepicker_spec.js | 2 +- .../members/components/table/role_dropdown_spec.js | 86 +- spec/frontend/members/utils_spec.js | 2 +- .../components/delete_button_spec.js | 68 ++ .../__snapshots__/ml_candidates_show_spec.js.snap | 113 +-- .../candidates/show/ml_candidates_show_spec.js | 25 +- .../show/components/experiment_header_spec.js | 55 ++ .../experiments/show/ml_experiments_show_spec.js | 18 +- .../routes/experiments/show/mock_data.js | 2 + .../components/variables/dropdown_field_spec.js | 2 +- .../monitoring/pages/dashboard_page_spec.js | 3 +- spec/frontend/monitoring/store/actions_spec.js | 2 +- .../frontend/nav/components/new_nav_toggle_spec.js | 52 +- spec/frontend/new_branch_spec.js | 2 +- .../notebook/cells/output/dataframe_spec.js | 59 ++ .../notebook/cells/output/dataframe_util_spec.js | 113 +++ spec/frontend/notebook/cells/output/index_spec.js | 18 +- spec/frontend/notebook/mock_data.js | 44 + .../frontend/notes/components/comment_form_spec.js | 34 + .../notes/components/discussion_filter_spec.js | 122 ++- .../note_actions/timeline_event_button_spec.js | 2 +- .../notes/components/note_awards_list_spec.js | 236 +++--- spec/frontend/notes/components/note_body_spec.js | 8 - spec/frontend/notes/components/note_form_spec.js | 224 ++---- .../notes/components/noteable_note_spec.js | 11 + spec/frontend/notes/components/notes_app_spec.js | 2 +- spec/frontend/notes/deprecated_notes_spec.js | 11 +- spec/frontend/notes/stores/actions_spec.js | 6 +- .../components/custom_notifications_modal_spec.js | 2 +- .../components/oauth_secret_spec.js | 116 +++ spec/frontend/oauth_remember_me_spec.js | 9 +- spec/frontend/observability/index_spec.js | 2 +- .../components/metrics_settings_spec.js | 2 +- .../components/details_page/details_header_spec.js | 44 +- .../components/details_page/tags_list_spec.js | 289 ++++++- .../components/list_page/registry_header_spec.js | 6 +- .../container_registry/explorer/mock_data.js | 1 - .../explorer/pages/details_spec.js | 119 +-- .../dependency_proxy/app_spec.js | 33 - .../components/manifest_list_spec.js | 19 +- .../harbor_registry/pages/list_spec.js | 4 +- .../list/components/infrastructure_title_spec.js | 2 +- .../components/delete_modal_spec.js | 73 +- .../details/package_versions_list_spec.js | 165 +++- .../components/list/package_list_row_spec.js | 2 +- .../components/list/packages_list_spec.js | 49 +- .../components/list/packages_search_spec.js | 2 +- .../package_registry/mock_data.js | 69 +- .../package_registry/pages/details_spec.js | 101 ++- .../package_registry/pages/list_spec.js | 73 +- .../group/components/package_settings_spec.js | 4 +- .../packages_forwarding_settings_spec.js | 21 +- .../settings/components/cleanup_image_tags_spec.js | 2 +- .../container_expiration_policy_form_spec.js | 2 +- .../components/container_expiration_policy_spec.js | 2 +- .../packages_cleanup_policy_form_spec.js | 2 +- .../components/registry_settings_app_spec.js | 2 +- .../__snapshots__/registry_breadcrumb_spec.js.snap | 29 +- .../shared/components/persisted_search_spec.js | 2 +- .../shared/components/registry_list_spec.js | 2 +- .../admin/abuse_reports/abuse_reports_spec.js | 6 +- .../account_and_limits_spec.js | 7 +- .../metrics_and_profiling/usage_statistics_spec.js | 6 +- .../jobs/components/cancel_jobs_modal_spec.js | 66 ++ .../admin/jobs/components/cancel_jobs_spec.js | 57 ++ .../components/table/admin_job_table_app_spec.js | 100 +++ .../components/table/graphql/cache_config_spec.js | 106 +++ .../index/components/cancel_jobs_modal_spec.js | 66 -- .../jobs/index/components/cancel_jobs_spec.js | 57 -- .../projects/components/namespace_select_spec.js | 4 +- .../pages/dashboard/todos/index/todos_spec.js | 5 +- .../pages/groups/new/components/app_spec.js | 5 +- .../bitbucket_server_status_table_spec.js | 2 +- .../components/bulk_imports_history_app_spec.js | 2 +- .../history/components/import_history_app_spec.js | 3 +- .../forks/new/components/project_namespace_spec.js | 2 +- .../permissions/components/settings_panel_spec.js | 2 +- .../sessions/new/preserve_url_fragment_spec.js | 5 +- .../sessions/new/signin_tabs_memoizer_spec.js | 6 +- .../shared/wikis/components/wiki_form_spec.js | 9 +- .../components/performance_bar_app_spec.js | 55 +- spec/frontend/performance_bar/index_spec.js | 10 +- .../services/performance_bar_service_spec.js | 2 +- .../stores/performance_bar_store_spec.js | 8 + .../pipeline_wizard/components/commit_spec.js | 8 +- .../pipeline_wizard/components/step_nav_spec.js | 6 +- .../pipeline_wizard/components/step_spec.js | 2 +- .../components/widgets/list_spec.js | 4 +- .../components/widgets/text_spec.js | 2 +- .../pipeline_wizard/components/wrapper_spec.js | 8 +- spec/frontend/pipelines/components/dag/dag_spec.js | 12 +- .../pipelines/components/jobs/jobs_app_spec.js | 2 +- .../pipeline_mini_graph/pipeline_stage_spec.js | 2 +- .../empty_state/pipelines_ci_templates_spec.js | 104 +-- .../graph/graph_component_wrapper_spec.js | 171 ++-- .../pipelines/graph/graph_view_selector_spec.js | 15 + .../pipelines/graph/linked_pipeline_spec.js | 168 ++-- .../frontend/pipelines/pipeline_operations_spec.js | 77 ++ spec/frontend/pipelines/pipelines_actions_spec.js | 168 ---- .../pipelines/pipelines_manual_actions_spec.js | 216 +++++ spec/frontend/pipelines/pipelines_spec.js | 14 +- .../test_reports/stores/mutations_spec.js | 2 +- .../account/components/update_username_spec.js | 2 +- .../profile/components/overview_tab_spec.js | 53 +- .../profile/components/profile_tabs_spec.js | 57 +- .../commit/components/branches_dropdown_spec.js | 6 +- .../components/commit_options_dropdown_spec.js | 2 +- .../projects/commit/components/form_modal_spec.js | 74 +- .../frontend/projects/commit/store/actions_spec.js | 2 +- .../commits/components/author_select_spec.js | 51 +- .../projects/commits/store/actions_spec.js | 2 +- .../components/revision_dropdown_legacy_spec.js | 2 +- .../compare/components/revision_dropdown_spec.js | 4 +- spec/frontend/projects/new/components/app_spec.js | 3 +- .../new/components/new_project_url_select_spec.js | 2 +- .../ci_cd_analytics_area_chart_spec.js.snap | 1 + .../components/transfer_project_form_spec.js | 6 +- .../components/topics_token_selector_spec.js | 2 +- .../components/service_desk_root_spec.js | 4 +- .../prometheus_metrics/custom_metrics_spec.js | 6 +- .../prometheus_metrics/prometheus_metrics_spec.js | 7 +- .../protected_branch_edit_spec.js | 2 +- spec/frontend/read_more_spec.js | 2 +- spec/frontend/ref/components/ref_selector_spec.js | 22 + spec/frontend/ref/stores/actions_spec.js | 7 + spec/frontend/ref/stores/mutations_spec.js | 10 + .../releases/components/app_edit_new_spec.js | 21 + .../frontend/releases/components/app_index_spec.js | 4 +- .../releases/components/releases_sort_spec.js | 3 +- .../releases/components/tag_create_spec.js | 107 +++ .../releases/components/tag_field_new_spec.js | 229 ++---- .../releases/components/tag_search_spec.js | 144 ++++ .../releases/stores/modules/detail/getters_spec.js | 27 +- .../components/blob_button_group_spec.js | 34 +- .../repository/components/breadcrumbs_spec.js | 107 ++- .../components/delete_blob_modal_spec.js | 8 +- .../repository/components/fork_info_spec.js | 220 +++-- .../components/fork_sync_conflicts_modal_spec.js | 6 +- .../repository/components/last_commit_spec.js | 24 +- .../components/new_directory_modal_spec.js | 2 +- .../repository/components/preview/index_spec.js | 2 +- .../repository/components/table/row_spec.js | 287 ++++--- .../repository/components/tree_content_spec.js | 2 +- spec/frontend/repository/mock_data.js | 4 + .../__snapshots__/list_item_spec.js.snap | 67 -- .../frontend/saved_replies/components/form_spec.js | 144 ---- .../saved_replies/components/list_item_spec.js | 50 -- .../frontend/saved_replies/components/list_spec.js | 46 -- spec/frontend/saved_replies/pages/index_spec.js | 45 -- .../frontend/__fixtures__/locale/de/converted.json | 21 + .../frontend/__fixtures__/locale/de/gitlab.po | 13 + spec/frontend/scripts/frontend/po_to_json_spec.js | 244 ++++++ .../search/highlight_blob_search_result_spec.js | 6 +- spec/frontend/search/mock_data.js | 300 +++---- .../frontend/search/sidebar/components/app_spec.js | 2 +- .../sidebar/components/checkbox_filter_spec.js | 52 +- .../components/confidentiality_filter_spec.js | 35 +- .../sidebar/components/language_filter_spec.js | 53 +- .../sidebar/components/scope_navigation_spec.js | 18 + .../components/scope_new_navigation_spec.js | 83 ++ .../sidebar/components/status_filter_spec.js | 35 +- spec/frontend/search/store/getters_spec.js | 8 + spec/frontend/search/store/utils_spec.js | 15 + .../topbar/components/searchable_dropdown_spec.js | 2 +- spec/frontend/search_autocomplete_spec.js | 292 ------- spec/frontend/search_autocomplete_utils_spec.js | 114 --- .../security_configuration/components/app_spec.js | 28 +- .../components/feature_card_spec.js | 101 ++- .../components/training_provider_list_spec.js | 2 +- .../frontend/sentry/sentry_browser_wrapper_spec.js | 2 +- spec/frontend/shortcuts_spec.js | 17 +- .../assignees/sidebar_participant_spec.js | 3 +- .../assignees/user_name_with_status_spec.js | 4 +- .../components/date/sidebar_date_widget_spec.js | 27 +- .../dropdown_contents_create_view_spec.js | 85 +- .../dropdown_contents_create_view_spec.js | 30 +- .../dropdown_contents_labels_view_spec.js | 4 +- .../labels_select_widget/dropdown_header_spec.js | 4 +- .../labels_select_root_spec.js | 2 +- .../labels/labels_select_widget/mock_data.js | 11 + .../components/move/issuable_move_dropdown_spec.js | 4 +- .../components/move/move_issue_button_spec.js | 4 - .../components/move/move_issues_button_spec.js | 2 +- .../reviewers/uncollapsed_reviewer_list_spec.js | 149 +++- .../components/severity/sidebar_severity_spec.js | 154 ---- .../severity/sidebar_severity_widget_spec.js | 160 ++++ .../components/sidebar_dropdown_widget_spec.js | 26 +- .../components/subscriptions/subscriptions_spec.js | 61 +- .../components/time_tracking/time_tracker_spec.js | 2 +- .../components/toggle/toggle_sidebar_spec.js | 2 +- spec/frontend/sidebar/mock_data.js | 11 + .../snippet_visibility_edit_spec.js.snap | 2 +- spec/frontend/snippets/components/edit_spec.js | 2 +- .../snippets/components/snippet_blob_view_spec.js | 220 +++-- .../snippets/components/snippet_header_spec.js | 2 +- .../components/context_switcher_spec.js | 52 +- .../super_sidebar/components/create_menu_spec.js | 24 + .../components/frequent_items_list_spec.js | 35 + .../global_search_autocomplete_items_spec.js | 196 +---- .../components/global_search_default_items_spec.js | 61 +- .../components/global_search_scoped_items_spec.js | 107 +-- .../global_search/components/global_search_spec.js | 324 ++------ .../components/global_search/mock_data.js | 202 +++-- .../components/global_search/store/actions_spec.js | 4 +- .../components/global_search/store/getters_spec.js | 7 +- .../global_search/store/mutations_spec.js | 2 +- .../components/global_search/utils_spec.js | 60 ++ .../super_sidebar/components/groups_list_spec.js | 9 +- .../super_sidebar/components/help_center_spec.js | 103 ++- .../super_sidebar/components/items_list_spec.js | 45 +- .../components/merge_request_menu_spec.js | 19 +- .../super_sidebar/components/nav_item_spec.js | 38 +- .../components/pinned_section_spec.js | 75 ++ .../super_sidebar/components/projects_list_spec.js | 9 +- .../components/search_results_spec.js | 16 +- .../super_sidebar/components/sidebar_menu_spec.js | 151 ++++ .../super_sidebar/components/super_sidebar_spec.js | 154 +++- .../components/super_sidebar_toggle_spec.js | 106 +++ .../super_sidebar/components/user_bar_spec.js | 142 +++- .../super_sidebar/components/user_menu_spec.js | 189 ++++- .../components/user_name_group_spec.js | 22 +- spec/frontend/super_sidebar/mock_data.js | 28 + .../super_sidebar_collapsed_state_manager_spec.js | 67 +- .../surveys/merge_request_performance/app_spec.js | 28 +- .../tags/components/delete_tag_modal_spec.js | 2 +- .../components/states_table_actions_spec.js | 2 +- spec/frontend/test_setup.js | 13 + .../components/timelog_source_cell_spec.js | 136 ++++ .../time_tracking/components/timelogs_app_spec.js | 238 ++++++ .../components/timelogs_table_spec.js | 223 ++++++ spec/frontend/toggles/index_spec.js | 2 +- .../tracking/tracking_initialization_spec.js | 21 +- spec/frontend/tracking/tracking_spec.js | 91 +-- .../components/project_storage_detail_spec.js | 49 +- .../storage/components/storage_type_icon_spec.js | 1 - .../storage/components/usage_graph_spec.js | 24 +- spec/frontend/usage_quotas/storage/mock_data.js | 26 +- .../user_lists/components/edit_user_list_spec.js | 2 +- .../user_lists/components/user_lists_spec.js | 2 +- spec/frontend/user_popovers_spec.js | 8 +- .../components/mr_widget_memory_usage_spec.js | 245 +++--- .../components/mr_widget_rebase_spec.js | 343 ++++---- .../components/states/mr_widget_merging_spec.js | 12 +- .../states/mr_widget_ready_to_merge_spec.js | 7 +- .../mr_widget_unresolved_discussions_spec.js | 36 +- .../__snapshots__/dynamic_content_spec.js.snap | 82 +- .../components/widget/dynamic_content_spec.js | 2 + .../components/widget/widget_spec.js | 22 +- .../extentions/accessibility/index_spec.js | 2 +- .../extentions/code_quality/index_spec.js | 2 +- .../extentions/terraform/index_spec.js | 2 +- .../mr_widget_options_spec.js | 39 +- .../vue_shared/alert_details/alert_details_spec.js | 10 +- .../vue_shared/alert_details/alert_metrics_spec.js | 63 -- .../sidebar/alert_sidebar_assignees_spec.js | 157 ++-- .../components/color_picker/color_picker_spec.js | 2 +- .../color_select_root_spec.js | 2 +- .../dropdown_contents_color_view_spec.js | 2 +- .../dropdown_contents_spec.js | 19 +- .../color_select_dropdown/dropdown_value_spec.js | 11 +- .../components/diff_viewer/utils_spec.js | 33 + .../components/diff_viewer/viewers/renamed_spec.js | 250 +++--- .../components/dropdown/dropdown_widget_spec.js | 2 +- .../dropdown_keyboard_navigation_spec.js | 21 +- .../components/entity_select/entity_select_spec.js | 2 +- .../entity_select/project_select_spec.js | 13 + .../components/file_finder/index_spec.js | 2 +- .../vue_shared/components/file_finder/item_spec.js | 10 +- .../filtered_search_bar_root_spec.js | 2 +- .../filtered_search_bar/tokens/base_token_spec.js | 2 + .../tokens/crm_organization_token_spec.js | 2 +- .../filtered_search_bar/tokens/emoji_token_spec.js | 6 +- .../filtered_search_bar/tokens/label_token_spec.js | 2 +- .../tokens/milestone_token_spec.js | 4 +- .../filtered_search_bar/tokens/user_token_spec.js | 6 +- .../vue_shared/components/gl_countdown_spec.js | 4 +- .../components/listbox_input/listbox_input_spec.js | 4 +- .../markdown/comment_templates_dropdown_spec.js | 76 ++ .../markdown/editor_mode_dropdown_spec.js | 4 +- .../components/markdown/markdown_editor_spec.js | 50 +- .../markdown/saved_replies_dropdown_spec.js | 62 -- .../components/markdown/suggestions_spec.js | 54 +- .../markdown_drawer/markdown_drawer_spec.js | 8 +- .../components/notes/system_note_spec.js | 2 +- .../projects_list/projects_list_item_spec.js | 169 ++++ .../components/projects_list/projects_list_spec.js | 34 + .../__snapshots__/code_instruction_spec.js.snap | 32 +- .../__snapshots__/history_item_spec.js.snap | 2 +- .../components/registry/code_instruction_spec.js | 4 +- .../__snapshots__/skeleton_loader_spec.js.snap | 4 +- .../instructions/runner_cli_instructions_spec.js | 6 +- .../runner_instructions_modal_spec.js | 4 +- .../vue_shared/components/slot_switch_spec.js | 5 +- .../source_viewer/components/chunk_line_spec.js | 6 - .../source_viewer/components/chunk_spec.js | 1 - .../source_viewer/source_viewer_deprecated_spec.js | 2 +- .../vue_shared/components/split_button_spec.js | 5 +- .../components/tooltip_on_truncate_spec.js | 6 +- .../components/user_callout_dismisser_spec.js | 14 +- .../user_deletion_obstacles_list_spec.js | 2 +- .../components/user_popover/user_popover_spec.js | 11 +- .../components/vuex_module_provider_spec.js | 11 +- .../vue_shared/components/web_ide_link_spec.js | 4 +- .../vue_shared/directives/track_event_spec.js | 61 +- .../issuable/issuable_blocked_icon_spec.js | 12 +- .../issuable/list/components/issuable_item_spec.js | 2 +- .../list/components/issuable_list_root_spec.js | 4 +- .../frontend/vue_shared/issuable/list/mock_data.js | 6 + .../issuable/show/components/issuable_body_spec.js | 143 ++-- .../show/components/issuable_edit_form_spec.js | 4 +- .../show/components/issuable_header_spec.js | 23 +- .../components/manage_via_mr_spec.js | 36 +- .../webhooks/components/push_events_spec.js | 2 +- .../webhooks/components/test_dropdown_spec.js | 13 +- spec/frontend/whats_new/utils/notification_spec.js | 5 +- .../work_items/components/item_title_spec.js | 2 +- .../components/notes/work_item_add_note_spec.js | 45 +- .../notes/work_item_comment_form_spec.js | 100 ++- .../components/notes/work_item_discussion_spec.js | 6 +- .../notes/work_item_note_actions_spec.js | 84 +- .../components/notes/work_item_note_spec.js | 117 ++- .../work_items/components/widget_wrapper_spec.js | 2 +- .../components/work_item_actions_spec.js | 149 +++- .../components/work_item_assignees_spec.js | 2 +- .../components/work_item_description_spec.js | 7 +- .../work_items/components/work_item_detail_spec.js | 10 +- .../work_item_link_child_metadata_spec.js | 19 +- .../work_item_links/work_item_link_child_spec.js | 123 ++- .../work_item_links/work_item_links_menu_spec.js | 2 +- .../work_item_links/work_item_links_spec.js | 2 +- .../work_items/components/work_item_notes_spec.js | 14 +- spec/frontend/work_items/mock_data.js | 89 ++- .../work_items/pages/work_item_root_spec.js | 2 +- spec/frontend/work_items/router_spec.js | 2 +- .../content_editor_integration_spec.js | 2 +- .../ide/user_opens_ide_spec.js | 4 +- .../snippets/snippets_notes_spec.js | 7 +- spec/graphql/graphql_triggers_spec.rb | 52 +- spec/graphql/mutations/achievements/delete_spec.rb | 56 ++ spec/graphql/mutations/achievements/update_spec.rb | 57 ++ .../concerns/mutations/finds_by_gid_spec.rb | 26 - .../container_repositories/destroy_spec.rb | 2 +- .../container_repositories/destroy_tags_spec.rb | 4 +- spec/graphql/mutations/work_items/update_spec.rb | 21 + .../achievements/achievements_resolver_spec.rb | 18 +- spec/graphql/resolvers/blobs_resolver_spec.rb | 8 + .../graphql/resolvers/ci/all_jobs_resolver_spec.rb | 2 +- spec/graphql/resolvers/ci/jobs_resolver_spec.rb | 2 +- .../resolvers/ci/runner_projects_resolver_spec.rb | 26 +- .../resolvers/ci/runner_status_resolver_spec.rb | 20 +- .../resolvers/ci/variables_resolver_spec.rb | 2 +- .../group_data_transfer_resolver_spec.rb | 65 ++ .../project_data_transfer_resolver_spec.rb | 68 ++ .../resolvers/data_transfer_resolver_spec.rb | 31 - .../resolvers/group_labels_resolver_spec.rb | 61 ++ spec/graphql/resolvers/labels_resolver_spec.rb | 60 ++ .../resolvers/paginated_tree_resolver_spec.rb | 12 +- spec/graphql/resolvers/timelog_resolver_spec.rb | 22 +- .../achievements/user_achievement_type_spec.rb | 2 +- .../graphql/types/ci/catalog/resource_type_spec.rb | 18 + .../types/ci/config/include_type_enum_spec.rb | 2 +- spec/graphql/types/ci/job_trace_type_spec.rb | 27 + spec/graphql/types/ci/job_type_spec.rb | 3 +- spec/graphql/types/ci/runner_machine_type_spec.rb | 18 - spec/graphql/types/ci/runner_manager_type_spec.rb | 18 + spec/graphql/types/ci/runner_type_spec.rb | 4 +- spec/graphql/types/ci/variable_sort_enum_spec.rb | 2 +- .../clusters/agent_activity_event_type_spec.rb | 2 +- .../types/clusters/agent_token_type_spec.rb | 2 +- spec/graphql/types/clusters/agent_type_spec.rb | 2 +- .../project_data_transfer_type_spec.rb | 38 + spec/graphql/types/issue_type_spec.rb | 5 +- spec/graphql/types/merge_request_type_spec.rb | 4 +- .../types/permission_types/work_item_spec.rb | 3 +- spec/graphql/types/project_type_spec.rb | 5 +- spec/graphql/types/timelog_type_spec.rb | 2 +- spec/graphql/types/work_item_type_spec.rb | 1 + .../available_export_fields_enum_spec.rb | 1 + .../types/work_items/widget_interface_spec.rb | 14 +- .../work_items/widgets/award_emoji_type_spec.rb | 12 + .../widgets/current_user_todos_input_type_spec.rb | 9 + .../widgets/current_user_todos_type_spec.rb | 11 + .../widgets/hierarchy_update_input_type_spec.rb | 8 +- spec/haml_lint/linter/no_plain_nodes_spec.rb | 34 +- spec/helpers/abuse_reports_helper_spec.rb | 13 + spec/helpers/access_tokens_helper_spec.rb | 2 +- spec/helpers/application_helper_spec.rb | 33 +- spec/helpers/avatars_helper_spec.rb | 142 ++-- spec/helpers/blob_helper_spec.rb | 26 +- spec/helpers/ci/catalog/resources_helper_spec.rb | 12 +- spec/helpers/ci/pipelines_helper_spec.rb | 30 +- spec/helpers/ci/variables_helper_spec.rb | 2 +- spec/helpers/emoji_helper_spec.rb | 22 +- spec/helpers/feature_flags_helper_spec.rb | 14 +- spec/helpers/groups_helper_spec.rb | 1 + spec/helpers/ide_helper_spec.rb | 3 + spec/helpers/integrations_helper_spec.rb | 3 +- spec/helpers/issuables_helper_spec.rb | 35 +- spec/helpers/issues_helper_spec.rb | 16 +- spec/helpers/merge_requests_helper_spec.rb | 26 +- spec/helpers/namespaces_helper_spec.rb | 33 +- spec/helpers/notify_helper_spec.rb | 17 +- spec/helpers/packages_helper_spec.rb | 69 ++ spec/helpers/page_layout_helper_spec.rb | 14 +- .../helpers/projects/ml/experiments_helper_spec.rb | 18 +- spec/helpers/projects/pipeline_helper_spec.rb | 2 +- spec/helpers/projects_helper_spec.rb | 10 +- .../routing/pseudonymization_helper_spec.rb | 228 +++--- spec/helpers/search_helper_spec.rb | 40 + spec/helpers/sidebars_helper_spec.rb | 186 ++++- spec/helpers/storage_helper_spec.rb | 28 +- spec/helpers/todos_helper_spec.rb | 46 +- spec/helpers/tree_helper_spec.rb | 1 + spec/helpers/users/group_callouts_helper_spec.rb | 10 +- spec/helpers/users_helper_spec.rb | 53 +- spec/helpers/visibility_level_helper_spec.rb | 25 +- .../check_forced_decomposition_spec.rb | 2 +- .../doorkeeper_openid_connect_patch_spec.rb | 74 ++ spec/initializers/load_balancing_spec.rb | 2 +- spec/initializers/net_http_patch_spec.rb | 6 + spec/initializers/net_http_response_patch_spec.rb | 10 +- spec/lib/api/ci/helpers/runner_spec.rb | 74 +- .../entities/clusters/agent_authorization_spec.rb | 36 - .../agents/authorizations/ci_access_spec.rb | 36 + spec/lib/api/entities/ml/mlflow/run_info_spec.rb | 4 +- spec/lib/api/entities/ml/mlflow/run_spec.rb | 2 +- spec/lib/api/github/entities_spec.rb | 2 +- spec/lib/api/helpers/members_helpers_spec.rb | 18 - .../jira_connect/serializers/branch_entity_spec.rb | 49 +- .../jira_connect/serializers/build_entity_spec.rb | 2 +- .../serializers/feature_flag_entity_spec.rb | 10 +- .../lib/atlassian/jira_issue_key_extractor_spec.rb | 8 + .../jira_issue_key_extractors/branch_spec.rb | 57 ++ spec/lib/backup/database_spec.rb | 41 +- spec/lib/backup/gitaly_backup_spec.rb | 11 +- spec/lib/backup/manager_spec.rb | 52 +- spec/lib/backup/repositories_spec.rb | 10 +- .../lib/banzai/filter/external_link_filter_spec.rb | 32 +- .../issuable_reference_expansion_filter_spec.rb | 2 +- spec/lib/banzai/filter/kroki_filter_spec.rb | 20 +- .../banzai/filter/markdown_engines/base_spec.rb | 17 + .../filter/markdown_engines/common_mark_spec.rb | 17 + spec/lib/banzai/filter/markdown_filter_spec.rb | 21 +- spec/lib/banzai/filter/math_filter_spec.rb | 1 + .../references/design_reference_filter_spec.rb | 10 +- .../banzai/filter/repository_link_filter_spec.rb | 13 +- .../banzai/filter/syntax_highlight_filter_spec.rb | 2 +- .../pipeline/plain_markdown_pipeline_spec.rb | 4 +- .../banzai/reference_parser/commit_parser_spec.rb | 53 +- spec/lib/banzai/reference_redactor_spec.rb | 17 +- spec/lib/bulk_imports/clients/graphql_spec.rb | 31 - spec/lib/bulk_imports/clients/http_spec.rb | 4 +- .../pipelines/project_entities_pipeline_spec.rb | 18 +- .../group_attributes_transformer_spec.rb | 130 ++- .../projects/pipelines/project_pipeline_spec.rb | 2 +- .../projects/pipelines/references_pipeline_spec.rb | 10 + .../project_attributes_transformer_spec.rb | 92 ++- .../lib/feature_groups/gitlab_team_members_spec.rb | 65 -- spec/lib/feature_spec.rb | 27 - .../cycle_analytics/request_params_spec.rb | 16 +- spec/lib/gitlab/app_logger_spec.rb | 25 +- spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb | 81 +- .../lib/gitlab/auth/u2f_webauthn_converter_spec.rb | 29 - spec/lib/gitlab/auth_spec.rb | 118 ++- ...n_mode_scope_for_personal_access_tokens_spec.rb | 9 +- .../backfill_ci_queuing_tables_spec.rb | 245 ------ .../backfill_group_features_spec.rb | 2 +- .../backfill_integrations_type_new_spec.rb | 67 -- ...fill_member_namespace_for_group_members_spec.rb | 2 +- ...ckfill_namespace_id_for_namespace_route_spec.rb | 2 +- ...ckfill_namespace_traversal_ids_children_spec.rb | 21 - .../backfill_namespace_traversal_ids_roots_spec.rb | 21 - .../backfill_partitioned_table_spec.rb | 140 ++++ .../backfill_prepared_at_merge_requests_spec.rb | 22 +- .../backfill_snippet_repositories_spec.rb | 2 +- .../backfill_upvotes_count_on_issues_spec.rb | 46 -- .../backfill_user_namespace_spec.rb | 39 - .../cleanup_orphaned_lfs_objects_projects_spec.rb | 85 -- .../delete_orphaned_deployments_spec.rb | 54 -- ..._policies_linked_to_no_container_images_spec.rb | 142 ---- .../drop_invalid_security_findings_spec.rb | 57 -- .../drop_invalid_vulnerabilities_spec.rb | 126 --- .../encrypt_ci_trigger_token_spec.rb | 4 +- .../encrypt_integration_properties_spec.rb | 63 -- .../encrypt_static_object_token_spec.rb | 64 -- ...ract_project_topics_into_separate_table_spec.rb | 46 -- .../fix_first_mentioned_in_commit_at_spec.rb | 166 ---- .../fix_merge_request_diff_commit_users_spec.rb | 25 - ...occurrences_with_hashes_as_raw_metadata_spec.rb | 2 +- .../merge_topics_with_same_name_spec.rb | 148 ---- ...te_evidences_for_vulnerability_findings_spec.rb | 27 +- ...igrate_links_for_vulnerability_findings_spec.rb | 64 +- ...migrate_merge_request_diff_commit_users_spec.rb | 413 ---------- ...l_namespace_project_maintainer_to_owner_spec.rb | 2 +- ...ct_taggings_context_from_tags_to_topics_spec.rb | 30 - .../migrate_u2f_webauthn_spec.rb | 67 -- ...ner_registry_enabled_to_project_feature_spec.rb | 98 --- .../nullify_orphan_runner_id_on_ci_builds_spec.rb | 2 +- .../populate_namespace_statistics_spec.rb | 71 -- ...ulate_topics_non_private_projects_count_spec.rb | 50 -- ...ulate_topics_total_projects_count_cache_spec.rb | 35 - .../populate_vulnerability_reads_spec.rb | 93 --- ...culate_vulnerabilities_occurrences_uuid_spec.rb | 530 ------------- .../remove_all_trace_expiration_dates_spec.rb | 54 -- ...move_duplicate_vulnerabilities_findings_spec.rb | 171 ---- ..._project_group_link_with_missing_groups_spec.rb | 124 +++ .../remove_vulnerability_finding_links_spec.rb | 2 +- ...migrate_merge_request_diff_commit_users_spec.rb | 50 -- .../update_timelogs_null_spent_at_spec.rb | 40 - .../update_timelogs_project_id_spec.rb | 52 -- ...ere_two_factor_auth_required_from_group_spec.rb | 84 -- spec/lib/gitlab/bullet/exclusions_spec.rb | 15 +- spec/lib/gitlab/cache/client_spec.rb | 3 - spec/lib/gitlab/cache/metadata_spec.rb | 13 - spec/lib/gitlab/cache/metrics_spec.rb | 5 - spec/lib/gitlab/ci/ansi2json/signed_state_spec.rb | 67 ++ spec/lib/gitlab/ci/ansi2json/state_spec.rb | 83 ++ spec/lib/gitlab/ci/ansi2json_spec.rb | 18 +- spec/lib/gitlab/ci/build/cache_spec.rb | 65 +- spec/lib/gitlab/ci/build/context/build_spec.rb | 23 +- spec/lib/gitlab/ci/build/context/global_spec.rb | 11 +- spec/lib/gitlab/ci/components/header_spec.rb | 50 -- .../lib/gitlab/ci/components/instance_path_spec.rb | 31 + spec/lib/gitlab/ci/config/entry/job_spec.rb | 33 + spec/lib/gitlab/ci/config/entry/publish_spec.rb | 40 + .../ci/config/external/file/artifact_spec.rb | 41 +- .../gitlab/ci/config/external/file/base_spec.rb | 131 ++- .../ci/config/external/file/component_spec.rb | 33 +- .../gitlab/ci/config/external/file/local_spec.rb | 28 + .../gitlab/ci/config/external/file/project_spec.rb | 33 + .../gitlab/ci/config/external/file/remote_spec.rb | 39 +- .../ci/config/external/file/template_spec.rb | 33 + .../gitlab/ci/config/external/interpolator_spec.rb | 312 ++++++++ .../ci/config/external/mapper/matcher_spec.rb | 66 +- .../external/mapper/variables_expander_spec.rb | 2 +- .../gitlab/ci/config/external/processor_spec.rb | 2 +- spec/lib/gitlab/ci/config/header/spec_spec.rb | 12 + spec/lib/gitlab/ci/config/yaml/result_spec.rb | 31 +- spec/lib/gitlab/ci/config/yaml_spec.rb | 130 ++- spec/lib/gitlab/ci/input/arguments/default_spec.rb | 8 + spec/lib/gitlab/ci/input/arguments/options_spec.rb | 4 +- .../lib/gitlab/ci/input/arguments/required_spec.rb | 4 + spec/lib/gitlab/ci/jwt_v2_spec.rb | 20 +- spec/lib/gitlab/ci/parsers/security/sast_spec.rb | 4 +- .../ci/parsers/security/secret_detection_spec.rb | 2 +- .../gitlab/ci/pipeline/seed/build/cache_spec.rb | 53 +- spec/lib/gitlab/ci/pipeline/seed/build_spec.rb | 28 +- spec/lib/gitlab/ci/status/composite_spec.rb | 18 +- .../processable/waiting_for_resource_spec.rb | 21 +- spec/lib/gitlab/ci/trace/chunked_io_spec.rb | 10 - .../gitlab/ci/variables/builder/pipeline_spec.rb | 91 ++- spec/lib/gitlab/ci/variables/builder_spec.rb | 165 +++- spec/lib/gitlab/ci/variables/collection_spec.rb | 2 +- spec/lib/gitlab/ci/yaml_processor_spec.rb | 8 +- .../gitlab/config/loader/multi_doc_yaml_spec.rb | 24 +- .../content_security_policy/config_loader_spec.rb | 47 -- .../async_indexes/migration_helpers_spec.rb | 86 ++ .../background_migration/batched_job_spec.rb | 36 +- .../background_migration/batched_migration_spec.rb | 13 +- .../health_status/indicators/patroni_apdex_spec.rb | 148 ++++ .../background_migration/health_status_spec.rb | 7 +- .../gitlab/database/consistency_checker_spec.rb | 2 +- .../load_balancing/action_cable_callbacks_spec.rb | 11 +- .../sidekiq_client_middleware_spec.rb | 12 +- .../sidekiq_server_middleware_spec.rb | 52 +- spec/lib/gitlab/database/load_balancing_spec.rb | 6 +- .../gitlab/database/lock_writes_manager_spec.rb | 24 +- .../lib/gitlab/database/loose_foreign_keys_spec.rb | 49 +- .../automatic_lock_writes_on_tables_spec.rb | 12 +- .../migration_helpers/convert_to_bigint_spec.rb | 4 +- .../loose_foreign_key_helpers_spec.rb | 16 +- .../restrict_gitlab_schema_spec.rb | 4 +- .../wraparound_vacuum_helpers_spec.rb | 99 +++ spec/lib/gitlab/database/migration_helpers_spec.rb | 185 +---- .../database/migrations/pg_backend_pid_spec.rb | 44 + spec/lib/gitlab/database/migrations/runner_spec.rb | 2 +- .../convert_table_to_first_list_partition_spec.rb | 156 +++- .../backfill_partitioned_table_spec.rb | 7 +- .../table_management_helpers_spec.rb | 26 +- spec/lib/gitlab/database/partitioning_spec.rb | 44 +- .../gitlab/database/postgres_foreign_key_spec.rb | 36 +- .../gitlab_schemas_validate_connection_spec.rb | 14 +- .../prevent_cross_database_modification_spec.rb | 14 +- .../adapters/column_database_adapter_spec.rb | 66 ++ .../adapters/column_structure_sql_adapter_spec.rb | 69 ++ .../database/schema_validation/database_spec.rb | 137 ++-- .../schema_validation/inconsistency_spec.rb | 70 ++ .../database/schema_validation/runner_spec.rb | 2 +- .../schema_validation/schema_inconsistency_spec.rb | 17 + .../schema_objects/column_spec.rb | 25 + .../schema_validation/schema_objects/index_spec.rb | 1 + .../schema_validation/schema_objects/table_spec.rb | 40 + .../schema_objects/trigger_spec.rb | 1 + .../schema_validation/structure_sql_spec.rb | 100 +-- .../schema_validation/track_inconsistency_spec.rb | 82 ++ .../validators/base_validator_spec.rb | 5 + .../validators/different_definition_tables_spec.rb | 7 + .../validators/extra_table_columns_spec.rb | 7 + .../validators/extra_tables_spec.rb | 7 + .../validators/missing_table_columns_spec.rb | 7 + .../validators/missing_tables_spec.rb | 9 + spec/lib/gitlab/database/tables_locker_spec.rb | 20 +- spec/lib/gitlab/database/tables_truncate_spec.rb | 16 +- .../database/transaction_timeout_settings_spec.rb | 2 +- spec/lib/gitlab/database_spec.rb | 44 +- spec/lib/gitlab/diff/highlight_cache_spec.rb | 16 +- spec/lib/gitlab/diff/highlight_spec.rb | 15 +- .../email/hook/silent_mode_interceptor_spec.rb | 74 ++ spec/lib/gitlab/email/incoming_email_spec.rb | 34 + spec/lib/gitlab/email/receiver_spec.rb | 13 +- spec/lib/gitlab/email/reply_parser_spec.rb | 77 +- spec/lib/gitlab/email/service_desk_email_spec.rb | 53 ++ spec/lib/gitlab/emoji_spec.rb | 17 - spec/lib/gitlab/error_tracking_spec.rb | 43 +- spec/lib/gitlab/favicon_spec.rb | 12 +- spec/lib/gitlab/git/blame_mode_spec.rb | 84 ++ spec/lib/gitlab/git/blame_pagination_spec.rb | 175 ++++ spec/lib/gitlab/git/repository_spec.rb | 101 --- spec/lib/gitlab/git_ref_validator_spec.rb | 5 + .../gitlab/github_import/bulk_importing_spec.rb | 232 ++++-- .../importer/attachments/issues_importer_spec.rb | 2 + .../attachments/merge_requests_importer_spec.rb | 2 + .../importer/attachments/releases_importer_spec.rb | 2 + .../github_import/importer/labels_importer_spec.rb | 4 +- .../importer/milestones_importer_spec.rb | 6 +- .../pull_requests/review_requests_importer_spec.rb | 2 + .../pull_requests_reviews_importer_spec.rb | 1 + .../importer/releases_importer_spec.rb | 6 +- .../representation/collaborator_spec.rb | 11 + .../representation/issue_event_spec.rb | 6 +- .../github_import/representation/issue_spec.rb | 3 +- .../representation/lfs_object_spec.rb | 3 +- .../github_import/representation/note_text_spec.rb | 110 ++- .../representation/pull_request_review_spec.rb | 2 +- .../representation/pull_request_spec.rb | 3 +- .../pull_requests/review_requests_spec.rb | 23 + spec/lib/gitlab/github_import/user_finder_spec.rb | 35 + spec/lib/gitlab/gl_repository/repo_type_spec.rb | 2 +- .../graphql/authorize/authorize_resource_spec.rb | 10 +- .../graphql/deprecations/deprecation_spec.rb | 2 +- spec/lib/gitlab/graphql/known_operations_spec.rb | 1 - .../loaders/lazy_relation_loader/registry_spec.rb | 24 + .../lazy_relation_loader/relation_proxy_spec.rb | 29 + .../graphql/loaders/lazy_relation_loader_spec.rb | 123 +++ .../action_cable_with_load_balancing_spec.rb | 18 + .../gitlab/graphql/tracers/metrics_tracer_spec.rb | 1 - .../gitlab/graphql/tracers/timer_tracer_spec.rb | 2 - spec/lib/gitlab/harbor/client_spec.rb | 12 +- spec/lib/gitlab/http_connection_adapter_spec.rb | 14 - spec/lib/gitlab/import/metrics_spec.rb | 28 +- spec/lib/gitlab/import_export/all_models.yml | 40 +- .../gitlab/import_export/attributes_finder_spec.rb | 3 +- spec/lib/gitlab/import_export/fork_spec.rb | 59 -- .../group/relation_tree_restorer_spec.rb | 29 +- .../import_export/group/tree_restorer_spec.rb | 4 +- .../import_export_equivalence_spec.rb | 67 -- .../import_export/json/legacy_reader/file_spec.rb | 32 - .../import_export/json/legacy_reader/hash_spec.rb | 35 - .../json/legacy_reader/shared_example.rb | 102 --- .../import_export/json/legacy_writer_spec.rb | 102 --- .../import_export/json/ndjson_reader_spec.rb | 40 +- .../json/streaming_serializer_spec.rb | 2 +- .../import_export/model_configuration_spec.rb | 2 +- .../project/exported_relations_merger_spec.rb | 4 +- .../project/relation_tree_restorer_spec.rb | 51 +- .../import_export/project/tree_restorer_spec.rb | 194 +++-- .../import_export/project/tree_saver_spec.rb | 30 +- .../gitlab/import_export/safe_model_attributes.yml | 18 + spec/lib/gitlab/incoming_email_spec.rb | 34 - spec/lib/gitlab/jwt_authenticatable_spec.rb | 8 +- spec/lib/gitlab/kas/user_access_spec.rb | 2 +- spec/lib/gitlab/kubernetes/helm/api_spec.rb | 269 ------- spec/lib/gitlab/kubernetes/helm/pod_spec.rb | 89 --- .../gitlab/kubernetes/helm/v2/base_command_spec.rb | 50 -- .../gitlab/kubernetes/helm/v2/certificate_spec.rb | 28 - .../kubernetes/helm/v2/delete_command_spec.rb | 38 - .../gitlab/kubernetes/helm/v2/init_command_spec.rb | 35 - .../kubernetes/helm/v2/install_command_spec.rb | 183 ----- .../kubernetes/helm/v2/patch_command_spec.rb | 87 -- .../kubernetes/helm/v2/reset_command_spec.rb | 32 - .../gitlab/kubernetes/helm/v3/base_command_spec.rb | 44 - .../kubernetes/helm/v3/delete_command_spec.rb | 35 - .../kubernetes/helm/v3/install_command_spec.rb | 168 ---- .../kubernetes/helm/v3/patch_command_spec.rb | 81 -- .../legacy_github_import/user_formatter_spec.rb | 23 +- .../metrics/subscribers/action_cable_spec.rb | 35 +- .../metrics/subscribers/active_record_spec.rb | 14 +- .../metrics/subscribers/external_http_spec.rb | 47 +- .../metrics/subscribers/load_balancing_spec.rb | 2 +- spec/lib/gitlab/middleware/go_spec.rb | 2 +- spec/lib/gitlab/middleware/multipart_spec.rb | 4 +- spec/lib/gitlab/octokit/middleware_spec.rb | 31 +- spec/lib/gitlab/redis/multi_store_spec.rb | 32 +- spec/lib/gitlab/reference_extractor_spec.rb | 4 +- spec/lib/gitlab/regex_spec.rb | 68 +- .../assignment_event_recorder_spec.rb | 91 +++ spec/lib/gitlab/service_desk_email_spec.rb | 53 -- spec/lib/gitlab/service_desk_spec.rb | 8 +- .../sidekiq_logging/structured_logger_spec.rb | 4 +- .../duplicate_jobs/duplicate_job_spec.rb | 4 +- spec/lib/gitlab/slug/environment_spec.rb | 59 +- spec/lib/gitlab/subscription_portal_spec.rb | 1 + .../finders/global_template_finder_spec.rb | 10 +- .../destinations/database_events_snowplow_spec.rb | 113 +++ spec/lib/gitlab/tracking_spec.rb | 123 ++- spec/lib/gitlab/untrusted_regexp_spec.rb | 35 +- spec/lib/gitlab/url_blocker_spec.rb | 73 +- .../gitlab/url_blockers/ip_allowlist_entry_spec.rb | 24 +- spec/lib/gitlab/usage/metric_definition_spec.rb | 2 - spec/lib/gitlab/usage/metric_spec.rb | 1 - .../metrics/instrumentations/database_mode_spec.rb | 9 + ..._email_encrypted_secrets_enabled_metric_spec.rb | 2 +- .../index_inconsistencies_metric_spec.rb | 2 +- ..._email_encrypted_secrets_enabled_metric_spec.rb | 2 +- .../metrics/names_suggestions/generator_spec.rb | 2 +- spec/lib/gitlab/usage/service_ping_report_spec.rb | 29 +- .../usage_data_counters/hll_redis_counter_spec.rb | 2 +- .../issue_activity_unique_counter_spec.rb | 4 +- spec/lib/gitlab/usage_data_spec.rb | 26 +- spec/lib/gitlab/utils/error_message_spec.rb | 17 +- spec/lib/gitlab/utils/measuring_spec.rb | 2 +- spec/lib/gitlab/utils/strong_memoize_spec.rb | 67 +- spec/lib/json_web_token/hmac_token_spec.rb | 4 +- spec/lib/product_analytics/settings_spec.rb | 81 ++ .../admin/menus/abuse_reports_menu_spec.rb | 42 + .../admin/menus/admin_overview_menu_spec.rb | 12 + .../admin/menus/admin_settings_menu_spec.rb | 12 + .../sidebars/admin/menus/analytics_menu_spec.rb | 12 + .../sidebars/admin/menus/applications_menu_spec.rb | 12 + spec/lib/sidebars/admin/menus/ci_cd_menu_spec.rb | 12 + .../sidebars/admin/menus/deploy_keys_menu_spec.rb | 12 + spec/lib/sidebars/admin/menus/labels_menu_spec.rb | 12 + .../lib/sidebars/admin/menus/messages_menu_spec.rb | 12 + .../sidebars/admin/menus/monitoring_menu_spec.rb | 12 + .../sidebars/admin/menus/system_hooks_menu_spec.rb | 12 + spec/lib/sidebars/admin/panel_spec.rb | 15 + .../sidebars/concerns/super_sidebar_panel_spec.rb | 6 +- .../super_sidebar_menus/analyze_menu_spec.rb | 28 + .../groups/super_sidebar_menus/build_menu_spec.rb | 21 + .../groups/super_sidebar_menus/manage_menu_spec.rb | 25 + .../super_sidebar_menus/monitor_menu_spec.rb | 22 + .../super_sidebar_menus/operations_menu_spec.rb | 24 + .../groups/super_sidebar_menus/plan_menu_spec.rb | 26 + .../groups/super_sidebar_menus/secure_menu_spec.rb | 25 + .../sidebars/groups/super_sidebar_panel_spec.rb | 11 +- spec/lib/sidebars/menu_spec.rb | 51 +- .../projects/menus/deployments_menu_spec.rb | 2 +- .../sidebars/projects/menus/issues_menu_spec.rb | 11 +- .../projects/menus/merge_requests_menu_spec.rb | 13 +- .../menus/packages_registries_menu_spec.rb | 23 +- .../projects/menus/repository_menu_spec.rb | 45 +- .../sidebars/projects/menus/snippets_menu_spec.rb | 3 +- .../super_sidebar_menus/analyze_menu_spec.rb | 30 + .../super_sidebar_menus/build_menu_spec.rb | 29 + .../projects/super_sidebar_menus/code_menu_spec.rb | 28 + .../super_sidebar_menus/manage_menu_spec.rb | 25 + .../super_sidebar_menus/monitor_menu_spec.rb | 26 + .../super_sidebar_menus/operations_menu_spec.rb | 18 +- .../projects/super_sidebar_menus/plan_menu_spec.rb | 14 +- .../super_sidebar_menus/secure_menu_spec.rb | 29 + .../sidebars/projects/super_sidebar_panel_spec.rb | 11 +- spec/lib/sidebars/search/panel_spec.rb | 29 + spec/lib/sidebars/static_menu_spec.rb | 8 +- .../menus/comment_templates_menu_spec.rb | 65 ++ .../user_settings/menus/saved_replies_menu_spec.rb | 65 -- spec/lib/uploaded_file_spec.rb | 42 +- spec/mailers/emails/profile_spec.rb | 29 +- spec/mailers/emails/service_desk_spec.rb | 91 ++- spec/mailers/notify_spec.rb | 157 +++- ...10831203408_upsert_base_work_item_types_spec.rb | 69 -- ...columns_and_triggers_for_ci_build_needs_spec.rb | 21 - ..._and_triggers_for_ci_build_trace_chunks_spec.rb | 21 - ...orary_columns_and_triggers_for_taggings_spec.rb | 23 - ...igint_conversion_for_ci_builds_metadata_spec.rb | 23 - ...57_finalize_ci_builds_bigint_conversion_spec.rb | 18 - ...ype_for_existing_approval_project_rules_spec.rb | 48 -- ...10_cleanup_orphan_project_access_tokens_spec.rb | 47 -- ...cleanup_bigint_conversion_for_ci_builds_spec.rb | 23 - ...culate_vulnerabilities_occurrences_uuid_spec.rb | 47 -- ..._drop_int4_columns_for_ci_job_artifacts_spec.rb | 23 - ...op_int4_column_for_ci_sources_pipelines_spec.rb | 21 - ...10922082019_drop_int4_column_for_events_spec.rb | 21 - ...rop_int4_column_for_push_event_payloads_spec.rb | 21 - ...ulate_topics_total_projects_count_cache_spec.rb | 29 - ...migrate_merge_request_diff_commit_users_spec.rb | 48 -- ...ove_duplicate_vulnerabilities_findings3_spec.rb | 166 ---- ...rge_request_diff_commit_users_migration_spec.rb | 63 -- ...4_consume_remaining_user_namespace_jobs_spec.rb | 21 - ...ll_constraint_to_security_findings_uuid_spec.rb | 23 - ...schedule_drop_invalid_security_findings_spec.rb | 72 -- ...1_change_namespace_type_default_to_user_spec.rb | 5 - ..._and_duplicate_vulnerabilities_findings_spec.rb | 190 ----- ...814_migrate_remaining_u2f_registrations_spec.rb | 43 - ...tic_objects_external_storage_auth_token_spec.rb | 78 -- ...11126204445_add_task_to_work_item_types_spec.rb | 54 -- ...kfill_sequence_column_for_sprints_table_spec.rb | 42 - ...x_to_projects_on_marked_for_deletion_at_spec.rb | 18 - ...culate_vulnerabilities_occurrences_uuid_spec.rb | 45 -- ...te_uuid_on_vulnerabilities_occurrences4_spec.rb | 148 ---- ...11210140629_encrypt_static_object_token_spec.rb | 50 -- ...fill_incident_issue_escalation_statuses_spec.rb | 19 - ...lculate_finding_signatures_as_completed_spec.rb | 64 -- ...t_or_update_vulnerability_reads_trigger_spec.rb | 151 ---- ..._add_update_vulnerability_reads_trigger_spec.rb | 128 --- ...te_vulnerability_reads_location_trigger_spec.rb | 136 ---- ...s_issues_on_vulnerability_reads_trigger_spec.rb | 134 ---- ...0107064845_populate_vulnerability_reads_spec.rb | 106 --- ...40_drop_position_from_security_findings_spec.rb | 21 - .../20220124130028_dedup_runner_projects_spec.rb | 66 -- ...28155251_remove_dangling_running_builds_spec.rb | 53 -- ...roval_rules_code_owners_rule_type_index_spec.rb | 33 - ...2105733_delete_service_template_records_spec.rb | 42 - ...e_statistics_with_dependency_proxy_size_spec.rb | 64 -- ...04194347_encrypt_integration_properties_spec.rb | 40 - ...l_namespace_project_maintainer_to_owner_spec.rb | 20 - ...ns_trigger_type_new_on_insert_null_safe_spec.rb | 37 - ...20220213103859_remove_integrations_type_spec.rb | 31 - ...create_not_null_constraint_releases_tag_spec.rb | 23 - .../20220222192525_remove_null_releases_spec.rb | 22 - ...28_schedule_merge_topics_with_same_name_spec.rb | 36 - ...5223212_add_security_training_providers_spec.rb | 25 - ...0_remove_duplicate_project_tag_releases_spec.rb | 47 -- ...eftover_external_pull_request_deletions_spec.rb | 43 - ...e_dependency_list_usage_data_from_redis_spec.rb | 24 - ...grate_shimo_confluence_service_category_spec.rb | 5 +- ...move_leftover_ci_job_artifact_deletions_spec.rb | 2 +- ...utomatic_iterations_cadences_start_date_spec.rb | 12 +- ..._success_index_to_authentication_events_spec.rb | 2 +- ...misassociated_vulnerability_occurrences_spec.rb | 2 +- ...misassociated_vulnerability_occurrences_spec.rb | 2 +- ...2902_finalise_project_namespace_members_spec.rb | 8 +- ...urce_licence_for_recent_public_projects_spec.rb | 4 +- ...ove_deactivated_user_highest_role_stats_spec.rb | 2 +- ...date_start_date_for_iterations_cadences_spec.rb | 30 +- ...y_to_sbom_vulnerable_component_versions_spec.rb | 2 +- ...y_to_sbom_vulnerable_component_versions_spec.rb | 2 +- ...1144258_remove_orphan_group_token_users_spec.rb | 16 +- ...ate_ci_pipeline_artifacts_locked_status_spec.rb | 4 +- ...ize_group_member_namespace_id_migration_spec.rb | 8 +- ...ective_and_keyresult_to_work_item_types_spec.rb | 35 +- ..._renaming_background_migration_finished_spec.rb | 6 +- ...0_finalize_backfill_user_details_fields_spec.rb | 4 + ...stics_storage_size_without_uploads_size_spec.rb | 2 +- ...k_item_type_backfill_migration_finished_spec.rb | 4 + ...22_schedule_backfill_releases_author_id_spec.rb | 30 +- ...roject_statistics_upload_size_migration_spec.rb | 2 +- ...ed_on_ci_namespace_monthly_usages_table_spec.rb | 2 +- ...ize_backfill_environment_tier_migration_spec.rb | 6 +- ...20230202131928_encrypt_ci_trigger_token_spec.rb | 27 - ...0208125736_schedule_migration_for_links_spec.rb | 8 +- ..._project_group_link_with_missing_groups_spec.rb | 32 + ...nullify_creator_id_of_orphaned_projects_spec.rb | 6 +- ...233_migrate_evidences_from_raw_metadata_spec.rb | 8 +- ...2350_add_notifications_work_item_widget_spec.rb | 21 +- ...ueue_backfill_project_wiki_repositories_spec.rb | 26 - ...42631_backfill_ml_candidates_package_id_spec.rb | 61 ++ ...14144640_reschedule_migration_for_links_spec.rb | 31 + ...add_current_user_todos_work_item_widget_spec.rb | 8 + ..._created_at_desc_index_to_package_files_spec.rb | 20 + ...63947_backfill_ml_candidates_project_id_spec.rb | 50 ++ ...0823_backfill_ml_candidates_internal_id_spec.rb | 64 ++ ..._namespace_records_from_vsa_aggregation_spec.rb | 41 + ...ign_key_to_packages_npm_metadata_caches_spec.rb | 24 + ...101138_add_award_emoji_work_item_widget_spec.rb | 8 + ...l_product_analytics_data_collector_host_spec.rb | 47 ++ ...28100534_truncate_error_tracking_tables_spec.rb | 56 ++ ...00222_drop_software_licenses_temp_index_spec.rb | 20 + ...0330103104_reschedule_migrate_evidences_spec.rb | 31 + ...al_index_on_vulnerability_report_types2_spec.rb | 49 ++ ...ueue_backfill_project_wiki_repositories_spec.rb | 26 + ...igint_conversion_for_sent_notifications_spec.rb | 21 + ...14119_finalize_encrypt_ci_trigger_token_spec.rb | 96 +++ spec/migrations/add_open_source_plan_spec.rb | 86 -- .../backfill_all_project_namespaces_spec.rb | 37 - .../backfill_cycle_analytics_aggregations_spec.rb | 36 - spec/migrations/backfill_group_features_spec.rb | 31 - ...l_member_namespace_id_for_group_members_spec.rb | 29 - ...kfill_namespace_id_for_namespace_routes_spec.rb | 29 - .../backfill_project_namespaces_for_group_spec.rb | 43 - spec/migrations/backfill_user_namespace_spec.rb | 29 - .../bulk_insert_cluster_enabled_grants_spec.rb | 2 +- ...ll_integrations_enable_ssl_verification_spec.rb | 2 +- ...ansitions_with_same_from_state_to_state_spec.rb | 2 +- ...e_migrate_shared_vulnerability_scanners_spec.rb | 58 +- .../disable_job_token_scope_when_unused_spec.rb | 10 - spec/migrations/drop_packages_events_table_spec.rb | 24 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 35 + ...backfill_is_finished_for_gitlab_dot_com_spec.rb | 2 - .../ensure_unique_debian_packages_spec.rb | 56 ++ ...int_backfill_is_finished_for_gl_dot_com_spec.rb | 35 + .../finalize_invalid_member_cleanup_spec.rb | 4 + ...inalize_issues_iid_scoping_to_namespace_spec.rb | 72 ++ ...inalize_issues_namespace_id_backfilling_spec.rb | 8 +- .../finalize_orphaned_routes_cleanup_spec.rb | 8 +- .../finalize_project_namespaces_backfill_spec.rb | 8 +- ...inalize_routes_backfilling_for_projects_spec.rb | 8 +- ...ize_traversal_ids_background_migrations_spec.rb | 60 -- .../insert_daily_invites_trial_plan_limits_spec.rb | 51 ++ ...udit_event_streaming_verification_token_spec.rb | 22 - ...n_mode_scope_for_personal_access_tokens_spec.rb | 18 - ...i_builds_on_name_and_id_parser_features_spec.rb | 28 - ...on_name_and_id_parser_with_new_features_spec.rb | 28 - .../remove_invalid_deploy_access_level_spec.rb | 48 -- ...ot_null_contraint_on_title_from_sprints_spec.rb | 29 - .../remove_packages_events_package_id_fk_spec.rb | 23 + ..._provider_and_identities_non_root_group_spec.rb | 53 ++ ...d_status_from_pending_alert_escalations_spec.rb | 37 - ..._token_and_scim_identity_non_root_group_spec.rb | 58 ++ ...n_mode_scope_for_personal_access_tokens_spec.rb | 19 + ...erun_remove_invalid_deploy_access_level_spec.rb | 86 ++ ...tatus_on_merge_requests_corrected_regex_spec.rb | 2 +- .../schedule_fix_incorrect_max_seats_used2_spec.rb | 34 - .../schedule_fix_incorrect_max_seats_used_spec.rb | 26 - .../schedule_fixing_security_scan_statuses_spec.rb | 4 +- ...igrate_shared_vulnerability_identifiers_spec.rb | 32 + .../schedule_purging_stale_security_scans_spec.rb | 2 +- ...ability_finding_signatures_for_findings_spec.rb | 90 --- .../schedule_update_timelogs_null_spent_at_spec.rb | 44 - ...ing_send_user_confirmation_email_column_spec.rb | 2 +- ...setting_from_soft_email_confirmation_ff_spec.rb | 62 ++ ...ce_merge_request_diff_commit_migrations_spec.rb | 70 -- .../start_backfill_ci_queuing_tables_spec.rb | 49 -- ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ..._note_id_to_bigint_for_gitlab_dot_com_2_spec.rb | 84 ++ ...request_user_mentions_note_id_to_bigint_spec.rb | 66 ++ ...es_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ .../swap_sent_notifications_id_columns_spec.rb | 71 ++ ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ...ns_note_id_to_bigint_for_gitlab_dot_com_spec.rb | 66 ++ ...nt_used_for_ci_namespace_monthly_usages_spec.rb | 2 +- ...ount_used_for_ci_project_monthly_usages_spec.rb | 2 +- ...egistry_exp_pol_worker_capacity_default_spec.rb | 41 - ...te_application_settings_protected_paths_spec.rb | 47 -- ...efault_scan_method_of_dast_site_profile_spec.rb | 32 - .../update_invalid_member_states_spec.rb | 30 - spec/models/abuse/trust_score_spec.rb | 57 ++ spec/models/abuse_report_spec.rb | 35 + spec/models/active_session_spec.rb | 46 +- .../models/analytics/cycle_analytics/stage_spec.rb | 40 +- spec/models/application_setting_spec.rb | 28 + spec/models/awareness_session_spec.rb | 163 ---- spec/models/blob_viewer/package_json_spec.rb | 13 +- spec/models/bulk_imports/entity_spec.rb | 29 +- spec/models/ci/bridge_spec.rb | 28 +- spec/models/ci/build_dependencies_spec.rb | 11 +- spec/models/ci/build_metadata_spec.rb | 11 +- spec/models/ci/build_need_spec.rb | 2 +- spec/models/ci/build_report_result_spec.rb | 13 + spec/models/ci/build_runner_session_spec.rb | 2 +- spec/models/ci/build_spec.rb | 342 ++++++-- spec/models/ci/build_trace_chunk_spec.rb | 3 +- spec/models/ci/build_trace_metadata_spec.rb | 2 +- spec/models/ci/build_trace_spec.rb | 24 +- spec/models/ci/catalog/listing_spec.rb | 1 + spec/models/ci/catalog/resource_spec.rb | 42 + spec/models/ci/group_spec.rb | 12 +- spec/models/ci/group_variable_spec.rb | 2 +- spec/models/ci/job_token/allowlist_spec.rb | 24 +- spec/models/ci/job_token/scope_spec.rb | 10 +- spec/models/ci/pipeline_spec.rb | 262 +++--- spec/models/ci/processable_spec.rb | 20 +- spec/models/ci/ref_spec.rb | 7 +- spec/models/ci/resource_group_spec.rb | 19 + spec/models/ci/runner_machine_build_spec.rb | 100 --- spec/models/ci/runner_machine_spec.rb | 291 ------- spec/models/ci/runner_manager_build_spec.rb | 100 +++ spec/models/ci/runner_manager_spec.rb | 291 +++++++ spec/models/ci/runner_spec.rb | 8 +- spec/models/ci/runner_version_spec.rb | 2 +- spec/models/ci/secure_file_spec.rb | 23 +- spec/models/ci/sources/pipeline_spec.rb | 2 +- spec/models/ci/stage_spec.rb | 24 +- spec/models/ci/variable_spec.rb | 2 +- spec/models/clusters/agent_spec.rb | 8 +- .../ci_access/group_authorization_spec.rb | 16 + .../ci_access/implicit_authorization_spec.rb | 14 + .../ci_access/project_authorization_spec.rb | 16 + .../user_access/group_authorization_spec.rb | 16 + .../user_access/project_authorization_spec.rb | 16 + .../clusters/agents/group_authorization_spec.rb | 16 - .../clusters/agents/implicit_authorization_spec.rb | 14 - .../clusters/agents/project_authorization_spec.rb | 16 - spec/models/clusters/applications/helm_spec.rb | 116 --- spec/models/clusters/applications/ingress_spec.rb | 180 ----- spec/models/clusters/applications/jupyter_spec.rb | 130 --- spec/models/clusters/applications/knative_spec.rb | 235 ------ spec/models/clusters/applications/runner_spec.rb | 127 --- spec/models/clusters/cluster_spec.rb | 236 +----- .../clusters/integrations/prometheus_spec.rb | 4 +- spec/models/compare_spec.rb | 34 +- spec/models/concerns/awareness_spec.rb | 39 - spec/models/concerns/ci/maskable_spec.rb | 2 +- .../concerns/ci/partitionable/switch_spec.rb | 9 +- spec/models/concerns/ci/partitionable_spec.rb | 2 +- .../concerns/ci/track_environment_usage_spec.rb | 20 +- .../agents/authorization_config_scopes_spec.rb | 21 - .../authorizations/ci_access/config_scopes_spec.rb | 21 + .../concerns/database_event_tracking_spec.rb | 41 +- spec/models/concerns/deployment_platform_spec.rb | 45 +- spec/models/concerns/expirable_spec.rb | 66 +- spec/models/concerns/has_user_type_spec.rb | 14 +- spec/models/concerns/issuable_spec.rb | 6 +- spec/models/concerns/token_authenticatable_spec.rb | 5 +- spec/models/container_repository_spec.rb | 15 + .../design_management/git_repository_spec.rb | 58 ++ spec/models/design_management/repository_spec.rb | 57 +- spec/models/group_group_link_spec.rb | 128 +-- spec/models/group_label_spec.rb | 35 + spec/models/group_spec.rb | 42 +- spec/models/hooks/web_hook_spec.rb | 2 +- spec/models/import_failure_spec.rb | 8 +- spec/models/integration_spec.rb | 2 +- spec/models/integrations/every_integration_spec.rb | 4 +- spec/models/integrations/ewm_spec.rb | 12 +- spec/models/integrations/field_spec.rb | 16 +- spec/models/integrations/google_play_spec.rb | 28 +- spec/models/integrations/harbor_spec.rb | 2 +- spec/models/integrations/jira_spec.rb | 82 +- spec/models/integrations/redmine_spec.rb | 4 +- spec/models/integrations/youtrack_spec.rb | 6 +- spec/models/issue_spec.rb | 70 +- spec/models/member_spec.rb | 8 + spec/models/members/project_member_spec.rb | 18 - spec/models/merge_request_spec.rb | 10 +- spec/models/ml/candidate_spec.rb | 100 ++- spec/models/ml/experiment_spec.rb | 26 +- spec/models/namespace_setting_spec.rb | 2 + spec/models/namespace_spec.rb | 24 - spec/models/note_spec.rb | 42 +- spec/models/onboarding/completion_spec.rb | 30 +- spec/models/onboarding/progress_spec.rb | 6 +- spec/models/packages/debian/file_metadatum_spec.rb | 45 +- spec/models/packages/event_spec.rb | 51 ++ spec/models/packages/npm/metadata_cache_spec.rb | 31 + spec/models/packages/npm/metadatum_spec.rb | 14 +- spec/models/packages/package_file_spec.rb | 11 + spec/models/packages/package_spec.rb | 57 +- spec/models/pages/lookup_path_spec.rb | 29 +- spec/models/pages_deployment_spec.rb | 60 ++ spec/models/plan_limits_spec.rb | 1 + spec/models/preloaders/labels_preloader_spec.rb | 14 +- .../runner_machine_policy_preloader_spec.rb | 38 - .../runner_manager_policy_preloader_spec.rb | 38 + ...s_max_access_level_by_project_preloader_spec.rb | 61 ++ ..._max_access_level_in_projects_preloader_spec.rb | 51 -- spec/models/project_label_spec.rb | 35 + spec/models/project_setting_spec.rb | 30 + spec/models/project_spec.rb | 57 +- spec/models/project_wiki_spec.rb | 27 + spec/models/projects/data_transfer_spec.rb | 26 + spec/models/protected_branch_spec.rb | 28 + spec/models/repository_spec.rb | 9 + .../resource_events/issue_assignment_event_spec.rb | 17 + .../merge_request_assignment_event_spec.rb | 17 + spec/models/resource_milestone_event_spec.rb | 18 + spec/models/resource_state_event_spec.rb | 17 + .../service_desk/custom_email_credential_spec.rb | 67 ++ spec/models/service_desk_setting_spec.rb | 52 +- spec/models/terraform/state_spec.rb | 2 +- spec/models/terraform/state_version_spec.rb | 4 +- spec/models/u2f_registration_spec.rb | 66 -- spec/models/user_preference_spec.rb | 7 + spec/models/user_spec.rb | 28 +- spec/models/work_item_spec.rb | 189 +++++ spec/models/work_items/resource_link_event_spec.rb | 16 + spec/models/work_items/widget_definition_spec.rb | 4 +- spec/models/work_items/widgets/award_emoji_spec.rb | 30 + .../achievements/user_achievement_policy_spec.rb | 78 ++ spec/policies/ci/build_policy_spec.rb | 30 +- spec/policies/ci/pipeline_policy_spec.rb | 18 +- spec/policies/ci/pipeline_schedule_policy_spec.rb | 12 +- spec/policies/ci/runner_machine_policy_spec.rb | 176 ---- spec/policies/ci/runner_manager_policy_spec.rb | 176 ++++ spec/policies/environment_policy_spec.rb | 3 +- spec/policies/global_policy_spec.rb | 51 +- spec/policies/group_policy_spec.rb | 169 +++- spec/policies/issue_policy_spec.rb | 4 +- spec/policies/project_policy_spec.rb | 200 ++++- .../issue_email_participant_presenter_spec.rb | 43 +- .../presenters/ml/candidates_csv_presenter_spec.rb | 84 ++ .../packages/npm/package_presenter_spec.rb | 161 +--- .../project_clusterable_presenter_spec.rb | 10 +- spec/requests/abuse_reports_controller_spec.rb | 1 + .../admin/background_migrations_controller_spec.rb | 11 + spec/requests/admin/projects_controller_spec.rb | 28 + spec/requests/admin/users_controller_spec.rb | 42 + spec/requests/api/admin/ci/variables_spec.rb | 14 +- spec/requests/api/admin/instance_clusters_spec.rb | 2 +- spec/requests/api/admin/sidekiq_spec.rb | 4 +- spec/requests/api/appearance_spec.rb | 18 +- spec/requests/api/applications_spec.rb | 4 +- spec/requests/api/broadcast_messages_spec.rb | 87 +- spec/requests/api/bulk_imports_spec.rb | 87 +- spec/requests/api/ci/jobs_spec.rb | 14 +- spec/requests/api/ci/pipelines_spec.rb | 83 +- spec/requests/api/ci/runner/jobs_put_spec.rb | 17 +- .../api/ci/runner/jobs_request_post_spec.rb | 59 +- .../api/ci/runner/runners_verify_post_spec.rb | 95 +-- spec/requests/api/ci/runners_spec.rb | 223 +++--- spec/requests/api/ci/variables_spec.rb | 2 +- spec/requests/api/clusters/agent_tokens_spec.rb | 2 +- spec/requests/api/clusters/agents_spec.rb | 2 +- spec/requests/api/commit_statuses_spec.rb | 4 +- spec/requests/api/debian_group_packages_spec.rb | 29 + spec/requests/api/debian_project_packages_spec.rb | 46 +- spec/requests/api/deploy_keys_spec.rb | 134 +++- spec/requests/api/deploy_tokens_spec.rb | 45 +- spec/requests/api/draft_notes_spec.rb | 43 + .../api/error_tracking/project_settings_spec.rb | 175 ++-- spec/requests/api/freeze_periods_spec.rb | 212 +++-- .../api/graphql/ci/config_variables_spec.rb | 4 +- .../api/graphql/ci/group_variables_spec.rb | 2 +- .../api/graphql/ci/instance_variables_spec.rb | 2 +- spec/requests/api/graphql/ci/jobs_spec.rb | 106 ++- .../api/graphql/ci/manual_variables_spec.rb | 2 +- .../api/graphql/ci/project_variables_spec.rb | 2 +- spec/requests/api/graphql/ci/runner_spec.rb | 247 ++++-- .../api/graphql/group/data_transfer_spec.rb | 115 +++ .../api/graphql/group/labels_query_spec.rb | 19 - spec/requests/api/graphql/jobs_query_spec.rb | 8 +- .../graphql/mutations/achievements/delete_spec.rb | 79 ++ .../graphql/mutations/achievements/update_spec.rb | 90 +++ .../api/graphql/mutations/ci/job/play_spec.rb | 2 +- .../api/graphql/mutations/ci/runner/create_spec.rb | 275 ++++++- .../agent_tokens/agent_tokens/create_spec.rb | 2 +- .../mutations/clusters/agents/create_spec.rb | 2 +- .../mutations/clusters/agents/delete_spec.rb | 2 +- .../mutations/container_repository/destroy_spec.rb | 4 +- .../container_repository/destroy_tags_spec.rb | 8 +- .../mutations/merge_requests/set_assignees_spec.rb | 2 +- .../graphql/mutations/projects/sync_fork_spec.rb | 22 + .../graphql/mutations/work_items/convert_spec.rb | 79 ++ .../graphql/mutations/work_items/create_spec.rb | 99 ++- .../graphql/mutations/work_items/export_spec.rb | 4 +- .../graphql/mutations/work_items/update_spec.rb | 406 +++++++++- .../project/alert_management/alert/notes_spec.rb | 2 +- .../project/branches_tipping_at_commit_spec.rb | 67 ++ .../api/graphql/project/cluster_agents_spec.rb | 2 +- .../api/graphql/project/data_transfer_spec.rb | 112 +++ .../api/graphql/project/fork_details_spec.rb | 43 +- .../api/graphql/project/merge_request_spec.rb | 27 + .../api/graphql/project/merge_requests_spec.rb | 24 +- .../graphql/project/tags_tipping_at_commit_spec.rb | 67 ++ .../api/graphql/project/work_items_spec.rb | 45 ++ spec/requests/api/graphql/project_query_spec.rb | 61 ++ spec/requests/api/graphql/work_item_spec.rb | 132 ++- spec/requests/api/group_clusters_spec.rb | 2 +- spec/requests/api/group_variables_spec.rb | 2 +- spec/requests/api/groups_spec.rb | 367 +++++---- spec/requests/api/import_github_spec.rb | 74 +- spec/requests/api/integrations_spec.rb | 2 +- spec/requests/api/internal/kubernetes_spec.rb | 16 +- spec/requests/api/internal/pages_spec.rb | 12 +- .../requests/api/issues/get_project_issues_spec.rb | 22 +- spec/requests/api/issues/issues_spec.rb | 17 +- .../api/issues/post_projects_issues_spec.rb | 100 +-- .../api/issues/put_projects_issues_spec.rb | 7 +- spec/requests/api/keys_spec.rb | 47 +- spec/requests/api/lint_spec.rb | 280 +++---- spec/requests/api/maven_packages_spec.rb | 52 ++ spec/requests/api/merge_requests_spec.rb | 136 +++- .../api/metrics/dashboard/annotations_spec.rb | 2 +- spec/requests/api/ml/mlflow_spec.rb | 20 +- spec/requests/api/namespaces_spec.rb | 74 +- spec/requests/api/notes_spec.rb | 10 +- spec/requests/api/npm_project_packages_spec.rb | 115 ++- spec/requests/api/pages/pages_spec.rb | 18 +- spec/requests/api/pages_domains_spec.rb | 4 + .../self_information_spec.rb | 4 +- spec/requests/api/personal_access_tokens_spec.rb | 10 +- spec/requests/api/project_attributes.yml | 2 + spec/requests/api/project_clusters_spec.rb | 2 +- spec/requests/api/project_export_spec.rb | 123 +-- spec/requests/api/project_import_spec.rb | 86 +- spec/requests/api/project_snapshots_spec.rb | 13 +- spec/requests/api/project_snippets_spec.rb | 136 ++-- spec/requests/api/projects_spec.rb | 883 ++++++++++++--------- spec/requests/api/protected_branches_spec.rb | 100 ++- spec/requests/api/releases_spec.rb | 21 +- spec/requests/api/search_spec.rb | 2 +- spec/requests/api/settings_spec.rb | 5 +- spec/requests/api/sidekiq_metrics_spec.rb | 17 +- spec/requests/api/snippets_spec.rb | 20 +- spec/requests/api/statistics_spec.rb | 8 +- spec/requests/api/tags_spec.rb | 2 +- spec/requests/api/terraform/state_spec.rb | 83 +- spec/requests/api/terraform/state_version_spec.rb | 2 +- spec/requests/api/topics_spec.rb | 95 ++- .../api/usage_data_non_sql_metrics_spec.rb | 10 +- spec/requests/api/usage_data_queries_spec.rb | 12 +- spec/requests/api/users_spec.rb | 609 ++++++++------ spec/requests/api/v3/github_spec.rb | 27 +- spec/requests/git_http_spec.rb | 5 + .../groups/usage_quotas_controller_spec.rb | 2 +- spec/requests/import/github_controller_spec.rb | 40 + spec/requests/jwks_controller_spec.rb | 9 + spec/requests/openid_connect_spec.rb | 4 +- .../profiles/comment_templates_controller_spec.rb | 35 + .../profiles/saved_replies_controller_spec.rb | 35 - .../projects/cluster_agents_controller_spec.rb | 2 +- .../google_cloud/configuration_controller_spec.rb | 2 +- .../google_cloud/databases_controller_spec.rb | 2 +- .../google_cloud/deployments_controller_spec.rb | 2 +- .../google_cloud/gcp_regions_controller_spec.rb | 2 +- .../google_cloud/revoke_oauth_controller_spec.rb | 2 +- .../service_accounts_controller_spec.rb | 2 +- .../projects/ml/candidates_controller_spec.rb | 53 +- .../projects/ml/experiments_controller_spec.rb | 230 ++++-- spec/requests/projects/usage_quotas_spec.rb | 2 +- spec/requests/projects/wikis_controller_spec.rb | 1 - spec/requests/projects/work_items_spec.rb | 176 +++- spec/requests/registrations_controller_spec.rb | 25 + spec/requests/search_controller_spec.rb | 10 +- spec/requests/sessions_spec.rb | 46 ++ .../time_tracking/timelogs_controller_spec.rb | 46 ++ spec/requests/users/pins_spec.rb | 67 ++ spec/routing/project_routing_spec.rb | 8 +- .../migration/add_limit_to_text_columns_spec.rb | 24 - .../cop/rspec/invalid_feature_category_spec.rb | 14 +- .../rspec/misspelled_aggregate_failures_spec.rb | 136 ++++ .../cop/rspec/shared_groups_metadata_spec.rb | 70 ++ spec/rubocop/cop/search/namespaced_class_spec.rb | 100 +++ .../worker_data_consistency_spec.rb | 123 ++- .../create_pipeline_failure_incident_spec.rb | 120 --- spec/scripts/failed_tests_spec.rb | 6 +- spec/scripts/generate_rspec_pipeline_spec.rb | 69 +- .../pipeline/create_test_failure_issues_spec.rb | 163 ++-- spec/scripts/review_apps/automated_cleanup_spec.rb | 87 +- spec/serializers/admin/abuse_report_entity_spec.rb | 66 +- .../admin/abuse_report_serializer_spec.rb | 6 +- spec/serializers/build_details_entity_spec.rb | 8 +- .../ci/downloadable_artifact_entity_spec.rb | 3 +- spec/serializers/ci/job_entity_spec.rb | 6 +- spec/serializers/ci/pipeline_entity_spec.rb | 8 +- .../deploy_keys/basic_deploy_key_entity_spec.rb | 1 + .../deploy_keys/deploy_key_entity_spec.rb | 1 + spec/serializers/diff_file_entity_spec.rb | 4 +- spec/serializers/diff_viewer_entity_spec.rb | 47 +- .../discussion_diff_file_entity_spec.rb | 3 +- spec/serializers/environment_entity_spec.rb | 12 +- spec/serializers/environment_serializer_spec.rb | 5 +- spec/serializers/group_child_entity_spec.rb | 9 +- spec/serializers/group_deploy_key_entity_spec.rb | 1 + spec/serializers/import/bulk_import_entity_spec.rb | 2 +- spec/serializers/issue_board_entity_spec.rb | 12 +- spec/serializers/issue_entity_spec.rb | 11 +- .../serializers/issue_sidebar_basic_entity_spec.rb | 5 +- .../merge_request_metrics_helper_spec.rb | 12 +- ...merge_request_poll_cached_widget_entity_spec.rb | 42 +- .../merge_request_poll_widget_entity_spec.rb | 8 +- spec/serializers/pipeline_details_entity_spec.rb | 6 +- spec/serializers/pipeline_serializer_spec.rb | 48 +- spec/services/achievements/award_service_spec.rb | 9 +- spec/services/achievements/destroy_service_spec.rb | 39 + spec/services/achievements/update_service_spec.rb | 48 ++ spec/services/boards/issues/list_service_spec.rb | 10 +- spec/services/bulk_imports/create_service_spec.rb | 321 +++++++- .../bulk_update_integration_service_spec.rb | 8 +- spec/services/ci/archive_trace_service_spec.rb | 30 - .../ci/catalog/add_resource_service_spec.rb | 55 -- spec/services/ci/change_variable_service_spec.rb | 2 +- spec/services/ci/change_variables_service_spec.rb | 2 +- .../ci/create_pipeline_service/variables_spec.rb | 2 +- spec/services/ci/create_pipeline_service_spec.rb | 166 +++- spec/services/ci/delete_objects_service_spec.rb | 2 +- .../ci/generate_kubeconfig_service_spec.rb | 14 +- .../ci/job_artifacts/create_service_spec.rb | 508 ++++++++---- .../ci/list_config_variables_service_spec.rb | 2 +- .../status_collection_spec.rb | 51 +- .../atomic_processing_service_spec.rb | 18 +- .../test_cases/dag_test_on_failure_no_needs.yml | 31 + .../stage_test_on_failure_no_prev_stage.yml | 29 + spec/services/ci/register_job_service_spec.rb | 50 +- .../ci/runners/create_runner_service_spec.rb | 160 +++- .../runners/stale_machines_cleanup_service_spec.rb | 45 -- .../runners/stale_managers_cleanup_service_spec.rb | 45 ++ .../ci/update_instance_variables_service_spec.rb | 2 +- .../clusters/agent_tokens/create_service_spec.rb | 2 +- .../clusters/agent_tokens/revoke_service_spec.rb | 2 +- .../agent_tokens/track_usage_service_spec.rb | 2 +- .../ci_access/filter_service_spec.rb | 100 +++ .../ci_access/refresh_service_spec.rb | 154 ++++ .../user_access/refresh_service_spec.rb | 181 +++++ .../agents/authorize_proxy_user_service_spec.rb | 2 +- .../agents/create_activity_event_service_spec.rb | 2 +- .../clusters/agents/create_service_spec.rb | 2 +- .../agents/delete_expired_events_service_spec.rb | 2 +- .../clusters/agents/delete_service_spec.rb | 2 +- .../agents/filter_authorizations_service_spec.rb | 100 --- .../agents/refresh_authorization_service_spec.rb | 154 ---- .../build_kubernetes_namespace_service_spec.rb | 2 +- spec/services/clusters/build_service_spec.rb | 2 +- .../cleanup/project_namespace_service_spec.rb | 2 +- .../cleanup/service_account_service_spec.rb | 2 +- spec/services/clusters/create_service_spec.rb | 4 +- spec/services/clusters/destroy_service_spec.rb | 2 +- .../clusters/integrations/create_service_spec.rb | 2 +- .../prometheus_health_check_service_spec.rb | 2 +- .../create_or_update_namespace_service_spec.rb | 2 +- ...reate_or_update_service_account_service_spec.rb | 2 +- .../fetch_kubernetes_token_service_spec.rb | 2 +- spec/services/clusters/kubernetes_spec.rb | 2 +- ..._management_project_permissions_service_spec.rb | 2 +- spec/services/clusters/update_service_spec.rb | 2 +- .../database/consistency_check_service_spec.rb | 2 +- .../database/consistency_fix_service_spec.rb | 2 +- spec/services/git/wiki_push_service/change_spec.rb | 10 +- spec/services/issuable/callbacks/milestone_spec.rb | 101 +++ spec/services/issues/after_create_service_spec.rb | 7 - spec/services/issues/build_service_spec.rb | 36 +- spec/services/issues/close_service_spec.rb | 72 +- spec/services/issues/create_service_spec.rb | 117 ++- spec/services/issues/reopen_service_spec.rb | 9 +- spec/services/issues/update_service_spec.rb | 97 ++- .../members/groups/creator_service_spec.rb | 2 + .../members/projects/creator_service_spec.rb | 2 + .../merge_requests/after_create_service_spec.rb | 16 - .../services/merge_requests/create_service_spec.rb | 24 +- .../services/merge_requests/update_service_spec.rb | 21 + .../dashboard/pod_dashboard_service_spec.rb | 2 +- .../metrics/global_metrics_update_service_spec.rb | 14 + .../candidate_repository_spec.rb | 12 +- spec/services/notes/create_service_spec.rb | 1 - spec/services/notes/quick_actions_service_spec.rb | 4 +- spec/services/notification_service_spec.rb | 34 +- .../services/packages/create_event_service_spec.rb | 46 -- .../debian/find_or_create_package_service_spec.rb | 21 +- .../debian/process_changes_service_spec.rb | 34 +- .../debian/process_package_file_service_spec.rb | 40 +- .../packages/npm/create_package_service_spec.rb | 92 ++- .../packages/npm/deprecate_package_service_spec.rb | 115 +++ .../packages/npm/generate_metadata_service_spec.rb | 173 ++++ .../all_merge_requests_count_service_spec.rb | 15 +- spec/services/projects/blame_service_spec.rb | 131 --- .../gitlab/cleanup_tags_service_spec.rb | 34 +- .../third_party/cleanup_tags_service_spec.rb | 172 ++-- spec/services/projects/create_service_spec.rb | 19 +- spec/services/projects/destroy_service_spec.rb | 17 +- spec/services/projects/fork_service_spec.rb | 34 +- .../projects/group_links/create_service_spec.rb | 13 +- .../projects/group_links/destroy_service_spec.rb | 9 +- .../projects/group_links/update_service_spec.rb | 9 +- .../hashed_storage/migration_service_spec.rb | 16 +- .../projects/lfs_pointers/lfs_link_service_spec.rb | 14 +- .../open_merge_requests_count_service_spec.rb | 5 +- .../prometheus/alerts/notify_service_spec.rb | 24 +- .../protect_default_branch_service_spec.rb | 2 + spec/services/projects/transfer_service_spec.rb | 9 +- spec/services/projects/unlink_fork_service_spec.rb | 6 +- .../services/projects/update_pages_service_spec.rb | 66 +- spec/services/projects/update_service_spec.rb | 59 +- .../protected_branches/cache_service_spec.rb | 1 + spec/services/releases/create_service_spec.rb | 20 + ...nthetic_milestone_notes_builder_service_spec.rb | 6 +- .../dependency_scanning_create_service_spec.rb | 2 +- spec/services/snippets/destroy_service_spec.rb | 2 +- spec/services/spam/spam_verdict_service_spec.rb | 10 +- spec/services/system_note_service_spec.rb | 4 +- .../system_notes/issuables_service_spec.rb | 24 +- .../services/tasks_to_be_done/base_service_spec.rb | 4 +- .../terraform/remote_state_handler_spec.rb | 1 + spec/services/users/approve_service_spec.rb | 18 + .../users/update_canonical_email_service_spec.rb | 26 +- spec/services/work_items/create_service_spec.rb | 335 ++++---- .../services/work_items/export_csv_service_spec.rb | 23 +- .../work_items/parent_links/base_service_spec.rb | 31 + .../work_items/parent_links/create_service_spec.rb | 60 +- .../parent_links/destroy_service_spec.rb | 36 +- .../parent_links/reorder_service_spec.rb | 176 ++++ .../work_items/prepare_import_csv_service_spec.rb | 52 ++ spec/services/work_items/update_service_spec.rb | 27 + .../assignees_service/update_service_spec.rb | 22 +- .../award_emoji_service/update_service_spec.rb | 96 +++ .../update_service_spec.rb | 106 +++ .../description_service/update_service_spec.rb | 21 +- .../hierarchy_service/update_service_spec.rb | 98 ++- .../widgets/labels_service/update_service_spec.rb | 48 ++ .../milestone_service/create_service_spec.rb | 28 - .../milestone_service/update_service_spec.rb | 58 -- .../update_service_spec.rb | 22 +- spec/spec_helper.rb | 68 +- .../banzai/filter_timeout_shared_examples.rb | 37 - .../banzai/reference_filter_shared_examples.rb | 88 -- spec/support/capybara.rb | 2 +- spec/support/chunked_io/chunked_io_helpers.rb | 13 - .../project_import_rate_limiter_shared_examples.rb | 22 - .../cycle_analytics_helpers/test_generation.rb | 160 ---- spec/support/finder_collection_allowlist.yml | 3 +- .../metrics_instrumentation_shared_examples.rb | 44 - spec/support/google_api/cloud_platform_helpers.rb | 166 ---- spec/support/graphql/arguments.rb | 71 -- spec/support/graphql/fake_query_type.rb | 22 - spec/support/graphql/fake_tracer.rb | 15 - spec/support/graphql/field_inspection.rb | 35 - spec/support/graphql/field_selection.rb | 69 -- spec/support/graphql/resolver_factories.rb | 40 - .../action_cable/mock_action_cable.rb | 100 --- .../action_cable/mock_gitlab_schema.rb | 41 - spec/support/graphql/subscriptions/notes/helper.rb | 94 --- spec/support/graphql/var.rb | 59 -- spec/support/helpers/api_internal_base_helpers.rb | 14 +- spec/support/helpers/board_helpers.rb | 16 +- spec/support/helpers/chunked_io_helpers.rb | 13 + spec/support/helpers/ci/source_pipeline_helpers.rb | 12 +- spec/support/helpers/content_editor_helpers.rb | 6 +- spec/support/helpers/cycle_analytics_helpers.rb | 15 +- .../cycle_analytics_helpers/test_generation.rb | 166 ++++ .../helpers/database/multiple_databases_helpers.rb | 22 + spec/support/helpers/email_helpers.rb | 21 + .../helpers/every_sidekiq_worker_test_helper.rb | 9 + spec/support/helpers/fake_webauthn_device.rb | 2 +- spec/support/helpers/feature_flag_helpers.rb | 24 +- .../helpers/features/access_token_helpers.rb | 23 +- .../helpers/features/admin_users_helpers.rb | 28 +- spec/support/helpers/features/blob_spec_helpers.rb | 18 +- spec/support/helpers/features/branches_helpers.rb | 33 +- .../helpers/features/canonical_link_helpers.rb | 22 +- .../features/invite_members_modal_helper.rb | 154 ---- .../features/invite_members_modal_helpers.rb | 148 ++++ spec/support/helpers/features/iteration_helpers.rb | 9 +- spec/support/helpers/features/list_rows_helpers.rb | 28 - spec/support/helpers/features/members_helpers.rb | 114 ++- .../helpers/features/merge_request_helpers.rb | 32 +- spec/support/helpers/features/notes_helpers.rb | 76 +- spec/support/helpers/features/releases_helpers.rb | 107 +-- .../helpers/features/responsive_table_helpers.rb | 22 +- spec/support/helpers/features/runners_helpers.rb | 92 +-- spec/support/helpers/features/snippet_helpers.rb | 89 --- .../helpers/features/snippet_spec_helpers.rb | 83 ++ spec/support/helpers/features/sorting_helpers.rb | 36 +- .../helpers/features/source_editor_spec_helpers.rb | 26 +- .../helpers/features/top_nav_spec_helpers.rb | 46 +- .../support/helpers/features/two_factor_helpers.rb | 138 ++-- .../helpers/features/web_ide_spec_helpers.rb | 167 ++-- spec/support/helpers/gitaly_setup.rb | 69 -- .../helpers/google_api/cloud_platform_helpers.rb | 168 ++++ spec/support/helpers/graphql/arguments.rb | 71 ++ spec/support/helpers/graphql/fake_query_type.rb | 23 + spec/support/helpers/graphql/fake_tracer.rb | 15 + spec/support/helpers/graphql/field_inspection.rb | 35 + spec/support/helpers/graphql/field_selection.rb | 69 ++ spec/support/helpers/graphql/resolver_factories.rb | 40 + .../action_cable/mock_action_cable.rb | 100 +++ .../action_cable/mock_gitlab_schema.rb | 41 + .../helpers/graphql/subscriptions/notes/helper.rb | 94 +++ spec/support/helpers/graphql/var.rb | 59 ++ spec/support/helpers/graphql_helpers.rb | 36 +- spec/support/helpers/http_io_helpers.rb | 49 ++ spec/support/helpers/keyset_pagination_helpers.rb | 20 + spec/support/helpers/login_helpers.rb | 2 +- spec/support/helpers/migrations_helpers.rb | 2 +- .../helpers/migrations_helpers/cluster_helpers.rb | 71 ++ .../migrations_helpers/namespaces_helper.rb | 15 + .../migrations_helpers/schema_version_finder.rb | 35 + .../vulnerabilities_findings_helper.rb | 118 +++ .../ci/partitioning_testing/cascade_check.rb | 34 + .../partitioning_testing/partition_identifiers.rb | 13 + .../models/ci/partitioning_testing/rspec_hooks.rb | 23 + .../ci/partitioning_testing/schema_helpers.rb | 91 +++ .../merge_request_without_merge_request_diff.rb | 7 + spec/support/helpers/navbar_structure_helper.rb | 10 +- .../helpers/project_template_test_helper.rb | 2 +- spec/support/helpers/prometheus/metric_builders.rb | 29 + spec/support/helpers/redis_helpers.rb | 9 + spec/support/helpers/search_helpers.rb | 8 +- spec/support/helpers/snowplow_helpers.rb | 10 +- spec/support/helpers/stub_gitlab_calls.rb | 6 +- spec/support/helpers/stub_object_storage.rb | 110 ++- spec/support/helpers/test_env.rb | 2 +- spec/support/helpers/test_reports_helper.rb | 103 +++ spec/support/helpers/trace_helpers.rb | 29 + spec/support/helpers/workhorse_helpers.rb | 58 +- spec/support/http_io/http_io_helpers.rb | 51 -- spec/support/import_export/common_util.rb | 25 +- spec/support/matchers/have_plain_text_content.rb | 16 + spec/support/matchers/markdown_matchers.rb | 2 +- spec/support/migrations_helpers/cluster_helpers.rb | 71 -- .../migrations_helpers/namespaces_helper.rb | 14 - .../migrations_helpers/schema_version_finder.rb | 34 - .../vulnerabilities_findings_helper.rb | 118 --- .../ci/partitioning_testing/cascade_check.rb | 34 - .../partitioning_testing/partition_identifiers.rb | 13 - .../models/ci/partitioning_testing/rspec_hooks.rb | 19 - .../ci/partitioning_testing/schema_helpers.rb | 91 --- .../merge_request_without_merge_request_diff.rb | 7 - .../additional_metrics_shared_examples.rb | 159 ---- spec/support/prometheus/metric_builders.rb | 29 - .../access_control_ce_shared_examples.rb | 32 - spec/support/redis/redis_helpers.rb | 9 - .../redis/redis_new_instance_shared_examples.rb | 111 --- spec/support/redis/redis_shared_examples.rb | 459 ----------- spec/support/rspec_order_todo.yml | 22 - .../services/clusters/create_service_shared.rb | 64 -- .../services/deploy_token_shared_examples.rb | 86 -- .../services/import_csv_service_shared_examples.rb | 38 - ...le_description_quick_actions_shared_examples.rb | 62 -- .../issuable_import_csv_service_shared_examples.rb | 107 --- .../issuable_update_service_shared_examples.rb | 99 --- .../move_and_clone_services_shared_examples.rb | 22 - ...igrate_to_ghost_user_service_shared_examples.rb | 89 --- .../services/service_response_shared_examples.rb | 25 - .../integrations/integrations_shared_context.rb | 2 + .../finders/issues_finder_shared_contexts.rb | 78 +- .../merge_requests_finder_shared_contexts.rb | 42 +- .../finders/work_items_finder_shared_contexts.rb | 78 +- .../issuable/merge_request_shared_context.rb | 2 +- .../shared_contexts/navbar_structure_context.rb | 2 +- .../clusters/create_service_shared_context.rb | 19 + .../delete_tags_service_shared_context.rb | 8 +- .../cycle_analytics/flow_metrics_examples.rb | 36 + .../cycle_analytics/request_params_examples.rb | 10 +- .../filters/filter_timeout_shared_examples.rb | 37 + .../filters/reference_filter_shared_examples.rb | 88 ++ .../bulk_imports/visibility_level_examples.rb | 37 - .../project_import_rate_limiter_shared_examples.rb | 22 + .../controllers/unique_hll_events_examples.rb | 3 + .../features/2fa_shared_examples.rb | 6 +- .../features/abuse_report_shared_examples.rb | 6 +- .../features/access_tokens_shared_examples.rb | 2 +- .../features/confidential_notes_shared_examples.rb | 2 +- .../features/content_editor_shared_examples.rb | 293 ++++++- .../features/dashboard/sidebar_shared_examples.rb | 11 +- .../features/deploy_token_shared_examples.rb | 8 +- .../editable_merge_request_shared_examples.rb | 2 +- .../features/explore/sidebar_shared_examples.rb | 28 + .../issuable_invite_members_shared_examples.rb | 2 +- .../manage_applications_shared_examples.rb | 2 +- ...aster_manages_access_requests_shared_example.rb | 2 +- .../features/packages_shared_examples.rb | 39 + .../features/reportable_note_shared_examples.rb | 4 +- .../features/rss_shared_examples.rb | 13 + .../features/runners_shared_examples.rb | 45 +- .../user_views_wiki_sidebar_shared_examples.rb | 14 + .../features/work_items_shared_examples.rb | 141 +++- .../finders/issues_finder_shared_examples.rb | 27 +- .../data_transfer_resolver_shared_examples.rb | 23 + .../gitlab_style_deprecations_shared_examples.rb | 6 +- .../gitlab/cycle_analytics/deployment_metrics.rb | 9 +- .../database/schema_objects_shared_examples.rb | 6 + .../database/table_validators_shared_examples.rb | 84 ++ .../sidekiq_middleware/strategy_shared_examples.rb | 6 +- .../issuable_activity_shared_examples.rb | 4 +- .../admin/menus/admin_menus_shared_examples.rb | 74 ++ .../mailers/notify_shared_examples.rb | 14 + .../metrics_instrumentation_shared_examples.rb | 44 + .../add_work_item_widget_shared_examples.rb | 33 + .../auto_disabling_hooks_shared_examples.rb | 16 +- .../cascading_namespace_setting_shared_examples.rb | 28 +- .../slack_mattermost_notifier_shared_examples.rb | 40 +- .../models/concerns/timebox_shared_examples.rb | 9 +- .../concerns/unstoppable_hooks_shared_examples.rb | 8 +- .../issue_tracker_service_shared_examples.rb | 10 +- .../models/member_shared_examples.rb | 24 + .../models/members_notifications_shared_example.rb | 2 +- .../models/resource_event_shared_examples.rb | 40 +- .../additional_metrics_shared_examples.rb | 161 ++++ .../access_control_ce_shared_examples.rb | 32 + .../issuable/close_quick_action_shared_examples.rb | 2 +- ...ote_to_incident_quick_action_shared_examples.rb | 6 +- .../redis/redis_new_instance_shared_examples.rb | 111 +++ .../shared_examples/redis/redis_shared_examples.rb | 463 +++++++++++ .../access_tokens_controller_shared_examples.rb | 2 +- .../requests/admin_mode_shared_examples.rb | 118 ++- .../api/custom_attributes_shared_examples.rb | 34 +- .../requests/api/hooks_shared_examples.rb | 86 +- .../api/issuable_update_shared_examples.rb | 9 + .../requests/api/notes_shared_examples.rb | 74 +- .../requests/api/npm_packages_shared_examples.rb | 43 +- .../api/npm_packages_tags_shared_examples.rb | 21 + .../requests/api/packages_shared_examples.rb | 24 +- .../pipelines/visibility_table_shared_examples.rb | 4 +- .../repository_storage_moves_shared_examples.rb | 18 +- .../requests/api/snippets_shared_examples.rb | 25 +- .../diff_file_entity_shared_examples.rb | 81 +- .../services/base_helm_service_shared_examples.rb | 22 - .../clusters/create_service_shared_examples.rb | 28 + .../services/deploy_token_shared_examples.rb | 88 ++ .../services/import_csv_service_shared_examples.rb | 38 + ...le_description_quick_actions_shared_examples.rb | 62 ++ .../issuable_import_csv_service_shared_examples.rb | 107 +++ .../issuable_update_service_shared_examples.rb | 137 ++++ .../issuable/update_service_shared_examples.rb | 29 - .../move_and_clone_services_shared_examples.rb | 22 + ...igrate_to_ghost_user_service_shared_examples.rb | 89 +++ .../generate_distribution_shared_examples.rb | 2 +- .../create_service_shared_examples.rb | 3 +- .../services/service_response_shared_examples.rb | 21 + .../widgets/milestone_service_shared_examples.rb | 42 - .../export_and_import_shared_examples.rb | 39 + ...nd_migration_execution_worker_shared_example.rb | 14 +- ..._background_migration_worker_shared_examples.rb | 206 ++--- spec/support/stub_member_access_level.rb | 46 ++ spec/support/test_reports/test_reports_helper.rb | 103 --- spec/support/tmpdir.rb | 2 + spec/support/trace/trace_helpers.rb | 29 - .../helpers/migrations_helpers_spec.rb | 38 +- spec/support_specs/matchers/event_store_spec.rb | 2 +- .../support_specs/stub_member_access_level_spec.rb | 69 ++ spec/tasks/dev_rake_spec.rb | 4 +- spec/tasks/gettext_rake_spec.rb | 90 +-- .../gitlab/background_migrations_rake_spec.rb | 45 +- spec/tasks/gitlab/backup_rake_spec.rb | 6 +- .../db/decomposition/connection_status_spec.rb | 2 +- .../rollback/bump_ci_sequences_rake_spec.rb | 4 +- spec/tasks/gitlab/db/lock_writes_rake_spec.rb | 2 +- .../gitlab/db/truncate_legacy_tables_rake_spec.rb | 25 +- spec/tasks/gitlab/db/validate_config_rake_spec.rb | 2 +- spec/tasks/gitlab/db_rake_spec.rb | 64 +- spec/tasks/gitlab/gitaly_rake_spec.rb | 22 +- ...ct_statistics_build_artifacts_size_rake_spec.rb | 41 +- spec/tasks/gitlab/setup_rake_spec.rb | 4 + spec/tasks/gitlab/storage_rake_spec.rb | 2 +- spec/tooling/danger/feature_flag_spec.rb | 22 + spec/tooling/danger/multiversion_spec.rb | 79 ++ .../specs/feature_category_suggestion_spec.rb | 99 +++ .../specs/match_with_array_suggestion_spec.rb | 99 +++ .../specs/project_factory_suggestion_spec.rb | 104 +++ spec/tooling/danger/specs_spec.rb | 271 +------ spec/tooling/danger/stable_branch_spec.rb | 22 + spec/tooling/docs/deprecation_handling_spec.rb | 2 +- spec/tooling/graphql/docs/renderer_spec.rb | 8 +- spec/tooling/lib/tooling/find_changes_spec.rb | 281 +++++++ spec/tooling/lib/tooling/find_tests_spec.rb | 159 ++++ spec/tooling/lib/tooling/gettext_extractor_spec.rb | 254 ++++++ .../lib/tooling/helpers/file_handler_spec.rb | 127 +++ spec/tooling/lib/tooling/kubernetes_client_spec.rb | 376 ++++----- .../mappings/graphql_base_type_mappings_spec.rb | 251 ++++++ .../mappings/js_to_system_specs_mappings_spec.rb | 95 ++- .../mappings/partial_to_views_mappings_spec.rb | 280 +++++++ .../tooling/mappings/view_to_js_mappings_spec.rb | 89 ++- .../mappings/view_to_system_specs_mappings_spec.rb | 127 +++ spec/tooling/lib/tooling/predictive_tests_spec.rb | 134 ++++ spec/tooling/quality/test_level_spec.rb | 11 +- spec/uploaders/attachment_uploader_spec.rb | 10 +- spec/uploaders/avatar_uploader_spec.rb | 10 +- .../ci/pipeline_artifact_uploader_spec.rb | 6 +- .../dependency_proxy/file_uploader_spec.rb | 9 +- .../design_v432x230_uploader_spec.rb | 14 +- spec/uploaders/external_diff_uploader_spec.rb | 8 +- spec/uploaders/file_uploader_spec.rb | 14 +- spec/uploaders/job_artifact_uploader_spec.rb | 8 +- spec/uploaders/lfs_object_uploader_spec.rb | 8 +- .../object_storage/cdn/google_cdn_spec.rb | 13 +- .../packages/composer/cache_uploader_spec.rb | 8 +- .../debian/component_file_uploader_spec.rb | 12 +- .../distribution_release_file_uploader_spec.rb | 12 +- .../packages/package_file_uploader_spec.rb | 8 +- .../packages/rpm/repository_file_uploader_spec.rb | 8 +- spec/uploaders/pages/deployment_uploader_spec.rb | 6 +- spec/uploaders/personal_file_uploader_spec.rb | 10 +- .../application_settings/_ci_cd.html.haml_spec.rb | 5 +- .../_repository_check.html.haml_spec.rb | 13 +- spec/views/admin/projects/_form.html.haml_spec.rb | 41 + spec/views/ci/status/_badge.html.haml_spec.rb | 10 +- spec/views/ci/status/_icon.html.haml_spec.rb | 10 +- spec/views/devise/sessions/new.html.haml_spec.rb | 94 +-- .../devise/shared/_signup_box.html.haml_spec.rb | 10 +- spec/views/groups/edit.html.haml_spec.rb | 6 +- spec/views/groups/packages/index.html.haml_spec.rb | 39 + .../groups/settings/_general.html.haml_spec.rb | 21 + spec/views/groups/show.html.haml_spec.rb | 38 + spec/views/layouts/_head.html.haml_spec.rb | 2 +- spec/views/layouts/_search.html.haml_spec.rb | 77 -- spec/views/layouts/application.html.haml_spec.rb | 4 - .../layouts/nav/sidebar/_admin.html.haml_spec.rb | 11 +- .../layouts/nav/sidebar/_project.html.haml_spec.rb | 16 +- .../autodevops_disabled_email.text.erb_spec.rb | 14 +- .../notify/new_achievement_email.html.haml_spec.rb | 26 + .../notify/pipeline_failed_email.text.erb_spec.rb | 14 +- spec/views/profiles/keys/_key.html.haml_spec.rb | 18 +- .../profiles/preferences/show.html.haml_spec.rb | 4 +- spec/views/projects/_home_panel.html.haml_spec.rb | 24 - .../projects/commit/_commit_box.html.haml_spec.rb | 3 +- spec/views/projects/commit/show.html.haml_spec.rb | 13 - spec/views/projects/edit.html.haml_spec.rb | 8 +- .../projects/merge_requests/edit.html.haml_spec.rb | 4 +- .../projects/packages/index.html.haml_spec.rb | 39 + .../settings/merge_requests/show.html.haml_spec.rb | 8 +- spec/views/projects/tags/index.html.haml_spec.rb | 4 +- spec/views/search/_results.html.haml_spec.rb | 6 - spec/views/search/show.html.haml_spec.rb | 6 + .../shared/milestones/_issuables.html.haml_spec.rb | 9 +- .../runners/_runner_details.html.haml_spec.rb | 13 +- .../user_refresh_over_user_range_worker_spec.rb | 4 +- .../ci_database_worker_spec.rb | 6 +- spec/workers/build_hooks_worker_spec.rb | 4 +- spec/workers/build_queue_worker_spec.rb | 4 +- .../bulk_imports/export_request_worker_spec.rb | 2 +- .../track_artifact_report_worker_spec.rb | 3 +- .../stale_machines_cleanup_cron_worker_spec.rb | 18 +- .../agents/delete_expired_events_worker_spec.rb | 2 +- .../activate_integration_worker_spec.rb | 2 +- .../deactivate_integration_worker_spec.rb | 2 +- .../cleanup/project_namespace_worker_spec.rb | 3 +- .../cleanup/service_account_worker_spec.rb | 2 +- spec/workers/concerns/cluster_agent_queue_spec.rb | 4 +- spec/workers/concerns/cronjob_queue_spec.rb | 2 +- .../gitlab/github_import/object_importer_spec.rb | 6 +- spec/workers/concerns/worker_context_spec.rb | 18 +- .../ci_database_worker_spec.rb | 3 +- ...espace_mirrors_consistency_check_worker_spec.rb | 2 +- ...roject_mirrors_consistency_check_worker_spec.rb | 2 +- spec/workers/deployments/hooks_worker_spec.rb | 4 +- .../design_management/new_version_worker_spec.rb | 8 +- spec/workers/email_receiver_worker_spec.rb | 4 +- spec/workers/every_sidekiq_worker_spec.rb | 10 +- .../github_gists_import/import_gist_worker_spec.rb | 66 +- .../attachments/import_issue_worker_spec.rb | 15 +- .../import_merge_request_worker_spec.rb | 15 +- .../attachments/import_note_worker_spec.rb | 1 + .../attachments/import_release_worker_spec.rb | 1 + .../import_release_attachments_worker_spec.rb | 6 +- .../close_incident_worker_spec.rb | 2 +- spec/workers/integrations/irker_worker_spec.rb | 9 +- spec/workers/issuable_export_csv_worker_spec.rb | 4 + .../jira_connect/sync_branch_worker_spec.rb | 4 +- .../jira_connect/sync_builds_worker_spec.rb | 4 +- .../jira_connect/sync_deployments_worker_spec.rb | 4 +- .../jira_connect/sync_feature_flags_worker_spec.rb | 4 +- .../jira_connect/sync_merge_request_worker_spec.rb | 33 +- .../jira_connect/sync_project_worker_spec.rb | 72 +- .../loose_foreign_keys/cleanup_worker_spec.rb | 2 +- .../delete_source_branch_worker_spec.rb | 14 +- .../update_head_pipeline_worker_spec.rb | 38 +- .../metrics/global_metrics_update_worker_spec.rb | 30 + ...ssociate_ml_candidate_to_package_worker_spec.rb | 105 +++ .../namespaces/process_sync_events_worker_spec.rb | 2 +- .../namespaces/root_statistics_worker_spec.rb | 124 ++- .../namespaces/schedule_aggregation_worker_spec.rb | 100 ++- spec/workers/object_pool/destroy_worker_spec.rb | 10 +- .../cleanup_dangling_package_files_worker_spec.rb | 85 ++ .../debian/process_package_file_worker_spec.rb | 1 + .../packages/npm/deprecate_package_worker_spec.rb | 35 + spec/workers/pipeline_hooks_worker_spec.rb | 4 +- spec/workers/pipeline_metrics_worker_spec.rb | 20 +- spec/workers/process_commit_worker_spec.rb | 12 +- .../inactive_projects_deletion_cron_worker_spec.rb | 26 +- ...e_projects_deletion_notification_worker_spec.rb | 9 +- .../projects/process_sync_events_worker_spec.rb | 2 +- spec/workers/rebase_worker_spec.rb | 12 +- .../remote_mirror_notification_worker_spec.rb | 6 +- spec/workers/remove_expired_members_worker_spec.rb | 15 +- ...remove_unaccepted_member_invites_worker_spec.rb | 56 +- .../remove_unreferenced_lfs_objects_worker_spec.rb | 14 +- .../repository_update_remote_mirror_worker_spec.rb | 18 +- spec/workers/run_pipeline_schedule_worker_spec.rb | 8 +- ..._head_pipeline_for_merge_request_worker_spec.rb | 12 +- spec/workers/update_highest_role_worker_spec.rb | 2 +- .../users/deactivate_dormant_users_worker_spec.rb | 5 +- ...records_to_ghost_user_in_batches_worker_spec.rb | 2 +- spec/workers/web_hook_worker_spec.rb | 4 +- .../import_work_items_csv_worker_spec.rb | 44 + 2420 files changed, 56977 insertions(+), 37111 deletions(-) delete mode 100644 spec/channels/awareness_channel_spec.rb create mode 100644 spec/controllers/projects/work_items_controller_spec.rb delete mode 100644 spec/experiments/require_verification_for_namespace_creation_experiment_spec.rb delete mode 100644 spec/experiments/security_reports_mr_widget_prompt_experiment_spec.rb create mode 100644 spec/factories/abuse/trust_score.rb delete mode 100644 spec/factories/ci/runner_machines.rb create mode 100644 spec/factories/ci/runner_managers.rb create mode 100644 spec/factories/clusters/agents/authorizations/ci_access/group_authorizations.rb create mode 100644 spec/factories/clusters/agents/authorizations/ci_access/project_authorizations.rb create mode 100644 spec/factories/clusters/agents/authorizations/user_access/group_authorizations.rb create mode 100644 spec/factories/clusters/agents/authorizations/user_access/project_authorizations.rb delete mode 100644 spec/factories/clusters/agents/group_authorizations.rb delete mode 100644 spec/factories/clusters/agents/project_authorizations.rb delete mode 100644 spec/factories/clusters/applications/helm.rb create mode 100644 spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb delete mode 100644 spec/factories/member_roles.rb create mode 100644 spec/factories/packages/npm/metadata_cache.rb create mode 100644 spec/factories/resource_events/issue_assignment_events.rb create mode 100644 spec/factories/resource_events/merge_request_assignment_events.rb create mode 100644 spec/factories/search_index.rb create mode 100644 spec/factories/service_desk/custom_email_credential.rb create mode 100644 spec/factories/work_items/resource_link_events.rb create mode 100644 spec/features/emails/issues_spec.rb create mode 100644 spec/features/nav/pinned_nav_items_spec.rb create mode 100644 spec/features/profiles/list_users_comment_template_spec.rb delete mode 100644 spec/features/profiles/list_users_saved_replies_spec.rb create mode 100644 spec/features/profiles/user_creates_comment_template_spec.rb delete mode 100644 spec/features/profiles/user_creates_saved_reply_spec.rb create mode 100644 spec/features/profiles/user_deletes_comment_template_spec.rb delete mode 100644 spec/features/profiles/user_deletes_saved_reply_spec.rb create mode 100644 spec/features/profiles/user_updates_comment_template_spec.rb delete mode 100644 spec/features/profiles/user_updates_saved_reply_spec.rb create mode 100644 spec/features/profiles/user_uses_comment_template_spec.rb delete mode 100644 spec/features/profiles/user_uses_saved_reply_spec.rb create mode 100644 spec/finders/achievements/achievements_finder_spec.rb delete mode 100644 spec/finders/clusters/agent_authorizations_finder_spec.rb create mode 100644 spec/finders/clusters/agents/authorizations/ci_access/finder_spec.rb create mode 100644 spec/finders/data_transfer/group_data_transfer_finder_spec.rb create mode 100644 spec/finders/data_transfer/mocked_transfer_finder_spec.rb create mode 100644 spec/finders/data_transfer/project_data_transfer_finder_spec.rb create mode 100644 spec/finders/groups/accepting_project_creations_finder_spec.rb create mode 100644 spec/finders/groups/accepting_project_shares_finder_spec.rb create mode 100644 spec/fixtures/emails/valid_reply_with_references_in_comma.eml create mode 100644 spec/fixtures/lib/gitlab/import_export/designs/tree/project.json create mode 100644 spec/fixtures/lib/gitlab/import_export/designs/tree/project/issues.ndjson create mode 100644 spec/fixtures/lib/gitlab/import_export/designs/tree/project/project_members.ndjson create mode 100644 spec/fixtures/pages_with_custom_root.zip create mode 100644 spec/fixtures/pages_with_custom_root.zip.meta create mode 100644 spec/fixtures/pages_with_custom_root.zip.meta0 create mode 100644 spec/frontend/__helpers__/assert_props.js create mode 100644 spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js create mode 100644 spec/frontend/admin/abuse_reports/components/abuse_report_details_spec.js delete mode 100644 spec/frontend/artifacts/components/app_spec.js delete mode 100644 spec/frontend/artifacts/components/artifact_row_spec.js delete mode 100644 spec/frontend/artifacts/components/artifacts_bulk_delete_spec.js delete mode 100644 spec/frontend/artifacts/components/artifacts_table_row_details_spec.js delete mode 100644 spec/frontend/artifacts/components/feedback_banner_spec.js delete mode 100644 spec/frontend/artifacts/components/job_artifacts_table_spec.js delete mode 100644 spec/frontend/artifacts/components/job_checkbox_spec.js delete mode 100644 spec/frontend/artifacts/graphql/cache_update_spec.js create mode 100644 spec/frontend/authentication/password/components/password_input_spec.js create mode 100644 spec/frontend/ci/artifacts/components/app_spec.js create mode 100644 spec/frontend/ci/artifacts/components/artifact_row_spec.js create mode 100644 spec/frontend/ci/artifacts/components/artifacts_bulk_delete_spec.js create mode 100644 spec/frontend/ci/artifacts/components/artifacts_table_row_details_spec.js create mode 100644 spec/frontend/ci/artifacts/components/feedback_banner_spec.js create mode 100644 spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js create mode 100644 spec/frontend/ci/artifacts/components/job_checkbox_spec.js create mode 100644 spec/frontend/ci/artifacts/graphql/cache_update_spec.js create mode 100644 spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js create mode 100644 spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js create mode 100644 spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js create mode 100644 spec/frontend/ci/runner/group_register_runner_app/group_register_runner_app_spec.js create mode 100644 spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap create mode 100644 spec/frontend/comment_templates/components/form_spec.js create mode 100644 spec/frontend/comment_templates/components/list_item_spec.js create mode 100644 spec/frontend/comment_templates/components/list_spec.js create mode 100644 spec/frontend/comment_templates/pages/index_spec.js delete mode 100644 spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap create mode 100644 spec/frontend/content_editor/components/toolbar_attachment_button_spec.js delete mode 100644 spec/frontend/content_editor/components/toolbar_image_button_spec.js delete mode 100644 spec/frontend/content_editor/components/toolbar_link_button_spec.js delete mode 100644 spec/frontend/content_editor/components/wrappers/label_spec.js create mode 100644 spec/frontend/content_editor/components/wrappers/reference_label_spec.js create mode 100644 spec/frontend/content_editor/components/wrappers/reference_spec.js delete mode 100644 spec/frontend/design_management/components/upload/mock_data/all_versions.js create mode 100644 spec/frontend/diffs/components/diff_code_quality_item_spec.js create mode 100644 spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap create mode 100644 spec/frontend/diffs/components/shared/findings_drawer_spec.js create mode 100644 spec/frontend/diffs/mock_data/findings_drawer.js create mode 100644 spec/frontend/environments/kubernetes_pods_spec.js create mode 100644 spec/frontend/fixtures/comment_templates.rb create mode 100644 spec/frontend/fixtures/milestones.rb delete mode 100644 spec/frontend/fixtures/saved_replies.rb delete mode 100644 spec/frontend/fixtures/static/search_autocomplete.html create mode 100644 spec/frontend/fixtures/timelogs.rb create mode 100644 spec/frontend/groups/settings/components/group_settings_readme_spec.js create mode 100644 spec/frontend/groups/settings/mock_data.js create mode 100644 spec/frontend/ide/lib/languages/codeowners_spec.js create mode 100644 spec/frontend/import/details/components/import_details_app_spec.js create mode 100644 spec/frontend/import/details/components/import_details_table_spec.js create mode 100644 spec/frontend/import/details/mock_data.js create mode 100644 spec/frontend/issues/new/components/type_select_spec.js create mode 100644 spec/frontend/lib/apollo/indexed_db_persistent_storage_spec.js create mode 100644 spec/frontend/lib/utils/datetime/time_spent_utility_spec.js create mode 100644 spec/frontend/lib/utils/secret_detection_spec.js create mode 100644 spec/frontend/lib/utils/web_ide_navigator_spec.js create mode 100644 spec/frontend/ml/experiment_tracking/components/delete_button_spec.js create mode 100644 spec/frontend/ml/experiment_tracking/routes/experiments/show/components/experiment_header_spec.js create mode 100644 spec/frontend/notebook/cells/output/dataframe_spec.js create mode 100644 spec/frontend/notebook/cells/output/dataframe_util_spec.js create mode 100644 spec/frontend/oauth_application/components/oauth_secret_spec.js create mode 100644 spec/frontend/pages/admin/jobs/components/cancel_jobs_modal_spec.js create mode 100644 spec/frontend/pages/admin/jobs/components/cancel_jobs_spec.js create mode 100644 spec/frontend/pages/admin/jobs/components/table/admin_job_table_app_spec.js create mode 100644 spec/frontend/pages/admin/jobs/components/table/graphql/cache_config_spec.js delete mode 100644 spec/frontend/pages/admin/jobs/index/components/cancel_jobs_modal_spec.js delete mode 100644 spec/frontend/pages/admin/jobs/index/components/cancel_jobs_spec.js create mode 100644 spec/frontend/pipelines/pipeline_operations_spec.js delete mode 100644 spec/frontend/pipelines/pipelines_actions_spec.js create mode 100644 spec/frontend/pipelines/pipelines_manual_actions_spec.js create mode 100644 spec/frontend/releases/components/tag_create_spec.js create mode 100644 spec/frontend/releases/components/tag_search_spec.js delete mode 100644 spec/frontend/saved_replies/components/__snapshots__/list_item_spec.js.snap delete mode 100644 spec/frontend/saved_replies/components/form_spec.js delete mode 100644 spec/frontend/saved_replies/components/list_item_spec.js delete mode 100644 spec/frontend/saved_replies/components/list_spec.js delete mode 100644 spec/frontend/saved_replies/pages/index_spec.js create mode 100644 spec/frontend/scripts/frontend/__fixtures__/locale/de/converted.json create mode 100644 spec/frontend/scripts/frontend/__fixtures__/locale/de/gitlab.po create mode 100644 spec/frontend/scripts/frontend/po_to_json_spec.js create mode 100644 spec/frontend/search/sidebar/components/scope_new_navigation_spec.js delete mode 100644 spec/frontend/search_autocomplete_spec.js delete mode 100644 spec/frontend/search_autocomplete_utils_spec.js delete mode 100644 spec/frontend/sidebar/components/severity/sidebar_severity_spec.js create mode 100644 spec/frontend/sidebar/components/severity/sidebar_severity_widget_spec.js create mode 100644 spec/frontend/super_sidebar/components/global_search/utils_spec.js create mode 100644 spec/frontend/super_sidebar/components/pinned_section_spec.js create mode 100644 spec/frontend/super_sidebar/components/sidebar_menu_spec.js create mode 100644 spec/frontend/super_sidebar/components/super_sidebar_toggle_spec.js create mode 100644 spec/frontend/time_tracking/components/timelog_source_cell_spec.js create mode 100644 spec/frontend/time_tracking/components/timelogs_app_spec.js create mode 100644 spec/frontend/time_tracking/components/timelogs_table_spec.js delete mode 100644 spec/frontend/vue_shared/alert_details/alert_metrics_spec.js create mode 100644 spec/frontend/vue_shared/components/diff_viewer/utils_spec.js create mode 100644 spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js delete mode 100644 spec/frontend/vue_shared/components/markdown/saved_replies_dropdown_spec.js create mode 100644 spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js create mode 100644 spec/frontend/vue_shared/components/projects_list/projects_list_spec.js create mode 100644 spec/graphql/mutations/achievements/delete_spec.rb create mode 100644 spec/graphql/mutations/achievements/update_spec.rb delete mode 100644 spec/graphql/mutations/concerns/mutations/finds_by_gid_spec.rb create mode 100644 spec/graphql/mutations/work_items/update_spec.rb create mode 100644 spec/graphql/resolvers/data_transfer/group_data_transfer_resolver_spec.rb create mode 100644 spec/graphql/resolvers/data_transfer/project_data_transfer_resolver_spec.rb delete mode 100644 spec/graphql/resolvers/data_transfer_resolver_spec.rb create mode 100644 spec/graphql/types/ci/catalog/resource_type_spec.rb create mode 100644 spec/graphql/types/ci/job_trace_type_spec.rb delete mode 100644 spec/graphql/types/ci/runner_machine_type_spec.rb create mode 100644 spec/graphql/types/ci/runner_manager_type_spec.rb create mode 100644 spec/graphql/types/data_transfer/project_data_transfer_type_spec.rb create mode 100644 spec/graphql/types/work_items/widgets/award_emoji_type_spec.rb create mode 100644 spec/graphql/types/work_items/widgets/current_user_todos_input_type_spec.rb create mode 100644 spec/graphql/types/work_items/widgets/current_user_todos_type_spec.rb create mode 100644 spec/helpers/abuse_reports_helper_spec.rb create mode 100644 spec/initializers/doorkeeper_openid_connect_patch_spec.rb delete mode 100644 spec/lib/api/entities/clusters/agent_authorization_spec.rb create mode 100644 spec/lib/api/entities/clusters/agents/authorizations/ci_access_spec.rb create mode 100644 spec/lib/atlassian/jira_issue_key_extractors/branch_spec.rb create mode 100644 spec/lib/banzai/filter/markdown_engines/base_spec.rb create mode 100644 spec/lib/banzai/filter/markdown_engines/common_mark_spec.rb delete mode 100644 spec/lib/feature_groups/gitlab_team_members_spec.rb delete mode 100644 spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb create mode 100644 spec/lib/gitlab/background_migration/backfill_partitioned_table_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb create mode 100644 spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb delete mode 100644 spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb create mode 100644 spec/lib/gitlab/ci/ansi2json/signed_state_spec.rb create mode 100644 spec/lib/gitlab/ci/ansi2json/state_spec.rb delete mode 100644 spec/lib/gitlab/ci/components/header_spec.rb create mode 100644 spec/lib/gitlab/ci/config/entry/publish_spec.rb create mode 100644 spec/lib/gitlab/ci/config/external/interpolator_spec.rb create mode 100644 spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb create mode 100644 spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb create mode 100644 spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb create mode 100644 spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb create mode 100644 spec/lib/gitlab/email/hook/silent_mode_interceptor_spec.rb create mode 100644 spec/lib/gitlab/email/incoming_email_spec.rb create mode 100644 spec/lib/gitlab/email/service_desk_email_spec.rb create mode 100644 spec/lib/gitlab/git/blame_mode_spec.rb create mode 100644 spec/lib/gitlab/git/blame_pagination_spec.rb create mode 100644 spec/lib/gitlab/graphql/loaders/lazy_relation_loader/registry_spec.rb create mode 100644 spec/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy_spec.rb create mode 100644 spec/lib/gitlab/graphql/loaders/lazy_relation_loader_spec.rb create mode 100644 spec/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing_spec.rb delete mode 100644 spec/lib/gitlab/import_export/fork_spec.rb delete mode 100644 spec/lib/gitlab/import_export/import_export_equivalence_spec.rb delete mode 100644 spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb delete mode 100644 spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb delete mode 100644 spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb delete mode 100644 spec/lib/gitlab/import_export/json/legacy_writer_spec.rb delete mode 100644 spec/lib/gitlab/incoming_email_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/api_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/pod_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb delete mode 100644 spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb create mode 100644 spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb delete mode 100644 spec/lib/gitlab/service_desk_email_spec.rb create mode 100644 spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb create mode 100644 spec/lib/gitlab/usage/metrics/instrumentations/database_mode_spec.rb create mode 100644 spec/lib/product_analytics/settings_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/admin_overview_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/admin_settings_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/analytics_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/applications_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/ci_cd_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/deploy_keys_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/labels_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/messages_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/menus/system_hooks_menu_spec.rb create mode 100644 spec/lib/sidebars/admin/panel_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/build_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/manage_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/plan_menu_spec.rb create mode 100644 spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/code_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/manage_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb create mode 100644 spec/lib/sidebars/projects/super_sidebar_menus/secure_menu_spec.rb create mode 100644 spec/lib/sidebars/search/panel_spec.rb create mode 100644 spec/lib/sidebars/user_settings/menus/comment_templates_menu_spec.rb delete mode 100644 spec/lib/sidebars/user_settings/menus/saved_replies_menu_spec.rb delete mode 100644 spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb delete mode 100644 spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb delete mode 100644 spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb delete mode 100644 spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb delete mode 100644 spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb delete mode 100644 spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb delete mode 100644 spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb delete mode 100644 spec/migrations/20210914095310_cleanup_orphan_project_access_tokens_spec.rb delete mode 100644 spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb delete mode 100644 spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb delete mode 100644 spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb delete mode 100644 spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb delete mode 100644 spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb delete mode 100644 spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb delete mode 100644 spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb delete mode 100644 spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb delete mode 100644 spec/migrations/20211018152654_schedule_remove_duplicate_vulnerabilities_findings3_spec.rb delete mode 100644 spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb delete mode 100644 spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb delete mode 100644 spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb delete mode 100644 spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb delete mode 100644 spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb delete mode 100644 spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb delete mode 100644 spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb delete mode 100644 spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb delete mode 100644 spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb delete mode 100644 spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb delete mode 100644 spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb delete mode 100644 spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb delete mode 100644 spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb delete mode 100644 spec/migrations/20211210140629_encrypt_static_object_token_spec.rb delete mode 100644 spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb delete mode 100644 spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb delete mode 100644 spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb delete mode 100644 spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb delete mode 100644 spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb delete mode 100644 spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb delete mode 100644 spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb delete mode 100644 spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb delete mode 100644 spec/migrations/20220124130028_dedup_runner_projects_spec.rb delete mode 100644 spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb delete mode 100644 spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb delete mode 100644 spec/migrations/20220202105733_delete_service_template_records_spec.rb delete mode 100644 spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb delete mode 100644 spec/migrations/20220204194347_encrypt_integration_properties_spec.rb delete mode 100644 spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb delete mode 100644 spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb delete mode 100644 spec/migrations/20220213103859_remove_integrations_type_spec.rb delete mode 100644 spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb delete mode 100644 spec/migrations/20220222192525_remove_null_releases_spec.rb delete mode 100644 spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb delete mode 100644 spec/migrations/20220305223212_add_security_training_providers_spec.rb delete mode 100644 spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb delete mode 100644 spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb delete mode 100644 spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb create mode 100644 spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb delete mode 100644 spec/migrations/20230306195007_queue_backfill_project_wiki_repositories_spec.rb create mode 100644 spec/migrations/20230313142631_backfill_ml_candidates_package_id_spec.rb create mode 100644 spec/migrations/20230314144640_reschedule_migration_for_links_spec.rb create mode 100644 spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb create mode 100644 spec/migrations/20230321153035_add_package_id_created_at_desc_index_to_package_files_spec.rb create mode 100644 spec/migrations/20230321163947_backfill_ml_candidates_project_id_spec.rb create mode 100644 spec/migrations/20230321170823_backfill_ml_candidates_internal_id_spec.rb create mode 100644 spec/migrations/20230322085041_remove_user_namespace_records_from_vsa_aggregation_spec.rb create mode 100644 spec/migrations/20230322145403_add_project_id_foreign_key_to_packages_npm_metadata_caches_spec.rb create mode 100644 spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb create mode 100644 spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb create mode 100644 spec/migrations/20230328100534_truncate_error_tracking_tables_spec.rb create mode 100644 spec/migrations/20230329100222_drop_software_licenses_temp_index_spec.rb create mode 100644 spec/migrations/20230330103104_reschedule_migrate_evidences_spec.rb create mode 100644 spec/migrations/20230403085957_add_tmp_partial_index_on_vulnerability_report_types2_spec.rb create mode 100644 spec/migrations/20230405200858_requeue_backfill_project_wiki_repositories_spec.rb create mode 100644 spec/migrations/20230411153310_cleanup_bigint_conversion_for_sent_notifications_spec.rb create mode 100644 spec/migrations/20230412214119_finalize_encrypt_ci_trigger_token_spec.rb delete mode 100644 spec/migrations/add_open_source_plan_spec.rb delete mode 100644 spec/migrations/backfill_all_project_namespaces_spec.rb delete mode 100644 spec/migrations/backfill_cycle_analytics_aggregations_spec.rb delete mode 100644 spec/migrations/backfill_group_features_spec.rb delete mode 100644 spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb delete mode 100644 spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb delete mode 100644 spec/migrations/backfill_project_namespaces_for_group_spec.rb delete mode 100644 spec/migrations/backfill_user_namespace_spec.rb delete mode 100644 spec/migrations/disable_job_token_scope_when_unused_spec.rb create mode 100644 spec/migrations/drop_packages_events_table_spec.rb create mode 100644 spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_design_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_epic_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_issue_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_note_diff_files_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_snippet_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_suggestions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_system_note_metadata_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/ensure_unique_debian_packages_spec.rb create mode 100644 spec/migrations/ensure_vum_bigint_backfill_is_finished_for_gl_dot_com_spec.rb create mode 100644 spec/migrations/finalize_issues_iid_scoping_to_namespace_spec.rb delete mode 100644 spec/migrations/finalize_traversal_ids_background_migrations_spec.rb create mode 100644 spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb delete mode 100644 spec/migrations/populate_audit_event_streaming_verification_token_spec.rb delete mode 100644 spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb delete mode 100644 spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb delete mode 100644 spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb delete mode 100644 spec/migrations/remove_invalid_deploy_access_level_spec.rb delete mode 100644 spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb create mode 100644 spec/migrations/remove_packages_events_package_id_fk_spec.rb create mode 100644 spec/migrations/remove_saml_provider_and_identities_non_root_group_spec.rb delete mode 100644 spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb create mode 100644 spec/migrations/remove_scim_token_and_scim_identity_non_root_group_spec.rb create mode 100644 spec/migrations/requeue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb create mode 100644 spec/migrations/rerun_remove_invalid_deploy_access_level_spec.rb delete mode 100644 spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb delete mode 100644 spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb create mode 100644 spec/migrations/schedule_migrate_shared_vulnerability_identifiers_spec.rb delete mode 100644 spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb delete mode 100644 spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb create mode 100644 spec/migrations/set_email_confirmation_setting_from_soft_email_confirmation_ff_spec.rb delete mode 100644 spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb delete mode 100644 spec/migrations/start_backfill_ci_queuing_tables_spec.rb create mode 100644 spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_design_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_epic_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb create mode 100644 spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb create mode 100644 spec/migrations/swap_note_diff_files_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_sent_notifications_id_columns_spec.rb create mode 100644 spec/migrations/swap_snippet_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_suggestions_note_id_to_bigint_for_gitlab_dot_com_spec.rb create mode 100644 spec/migrations/swap_vulnerability_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb delete mode 100644 spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb delete mode 100644 spec/migrations/update_application_settings_protected_paths_spec.rb delete mode 100644 spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb delete mode 100644 spec/migrations/update_invalid_member_states_spec.rb create mode 100644 spec/models/abuse/trust_score_spec.rb delete mode 100644 spec/models/awareness_session_spec.rb create mode 100644 spec/models/ci/catalog/resource_spec.rb delete mode 100644 spec/models/ci/runner_machine_build_spec.rb delete mode 100644 spec/models/ci/runner_machine_spec.rb create mode 100644 spec/models/ci/runner_manager_build_spec.rb create mode 100644 spec/models/ci/runner_manager_spec.rb create mode 100644 spec/models/clusters/agents/authorizations/ci_access/group_authorization_spec.rb create mode 100644 spec/models/clusters/agents/authorizations/ci_access/implicit_authorization_spec.rb create mode 100644 spec/models/clusters/agents/authorizations/ci_access/project_authorization_spec.rb create mode 100644 spec/models/clusters/agents/authorizations/user_access/group_authorization_spec.rb create mode 100644 spec/models/clusters/agents/authorizations/user_access/project_authorization_spec.rb delete mode 100644 spec/models/clusters/agents/group_authorization_spec.rb delete mode 100644 spec/models/clusters/agents/implicit_authorization_spec.rb delete mode 100644 spec/models/clusters/agents/project_authorization_spec.rb delete mode 100644 spec/models/clusters/applications/helm_spec.rb delete mode 100644 spec/models/clusters/applications/ingress_spec.rb delete mode 100644 spec/models/clusters/applications/jupyter_spec.rb delete mode 100644 spec/models/clusters/applications/knative_spec.rb delete mode 100644 spec/models/clusters/applications/runner_spec.rb delete mode 100644 spec/models/concerns/awareness_spec.rb delete mode 100644 spec/models/concerns/clusters/agents/authorization_config_scopes_spec.rb create mode 100644 spec/models/concerns/clusters/agents/authorizations/ci_access/config_scopes_spec.rb create mode 100644 spec/models/design_management/git_repository_spec.rb create mode 100644 spec/models/packages/event_spec.rb create mode 100644 spec/models/packages/npm/metadata_cache_spec.rb delete mode 100644 spec/models/preloaders/runner_machine_policy_preloader_spec.rb create mode 100644 spec/models/preloaders/runner_manager_policy_preloader_spec.rb create mode 100644 spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb delete mode 100644 spec/models/preloaders/users_max_access_level_in_projects_preloader_spec.rb create mode 100644 spec/models/resource_events/issue_assignment_event_spec.rb create mode 100644 spec/models/resource_events/merge_request_assignment_event_spec.rb create mode 100644 spec/models/service_desk/custom_email_credential_spec.rb create mode 100644 spec/models/work_items/resource_link_event_spec.rb create mode 100644 spec/models/work_items/widgets/award_emoji_spec.rb create mode 100644 spec/policies/achievements/user_achievement_policy_spec.rb delete mode 100644 spec/policies/ci/runner_machine_policy_spec.rb create mode 100644 spec/policies/ci/runner_manager_policy_spec.rb create mode 100644 spec/presenters/ml/candidates_csv_presenter_spec.rb create mode 100644 spec/requests/admin/users_controller_spec.rb create mode 100644 spec/requests/api/graphql/group/data_transfer_spec.rb delete mode 100644 spec/requests/api/graphql/group/labels_query_spec.rb create mode 100644 spec/requests/api/graphql/mutations/achievements/delete_spec.rb create mode 100644 spec/requests/api/graphql/mutations/achievements/update_spec.rb create mode 100644 spec/requests/api/graphql/mutations/work_items/convert_spec.rb create mode 100644 spec/requests/api/graphql/project/branches_tipping_at_commit_spec.rb create mode 100644 spec/requests/api/graphql/project/data_transfer_spec.rb create mode 100644 spec/requests/api/graphql/project/tags_tipping_at_commit_spec.rb create mode 100644 spec/requests/import/github_controller_spec.rb create mode 100644 spec/requests/profiles/comment_templates_controller_spec.rb delete mode 100644 spec/requests/profiles/saved_replies_controller_spec.rb create mode 100644 spec/requests/registrations_controller_spec.rb create mode 100644 spec/requests/time_tracking/timelogs_controller_spec.rb create mode 100644 spec/requests/users/pins_spec.rb create mode 100644 spec/rubocop/cop/rspec/misspelled_aggregate_failures_spec.rb create mode 100644 spec/rubocop/cop/rspec/shared_groups_metadata_spec.rb create mode 100644 spec/rubocop/cop/search/namespaced_class_spec.rb delete mode 100644 spec/scripts/create_pipeline_failure_incident_spec.rb create mode 100644 spec/services/achievements/destroy_service_spec.rb create mode 100644 spec/services/achievements/update_service_spec.rb delete mode 100644 spec/services/ci/catalog/add_resource_service_spec.rb create mode 100644 spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_no_needs.yml create mode 100644 spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_no_prev_stage.yml delete mode 100644 spec/services/ci/runners/stale_machines_cleanup_service_spec.rb create mode 100644 spec/services/ci/runners/stale_managers_cleanup_service_spec.rb create mode 100644 spec/services/clusters/agents/authorizations/ci_access/filter_service_spec.rb create mode 100644 spec/services/clusters/agents/authorizations/ci_access/refresh_service_spec.rb create mode 100644 spec/services/clusters/agents/authorizations/user_access/refresh_service_spec.rb delete mode 100644 spec/services/clusters/agents/filter_authorizations_service_spec.rb delete mode 100644 spec/services/clusters/agents/refresh_authorization_service_spec.rb create mode 100644 spec/services/issuable/callbacks/milestone_spec.rb create mode 100644 spec/services/metrics/global_metrics_update_service_spec.rb create mode 100644 spec/services/packages/npm/deprecate_package_service_spec.rb create mode 100644 spec/services/packages/npm/generate_metadata_service_spec.rb delete mode 100644 spec/services/projects/blame_service_spec.rb create mode 100644 spec/services/work_items/parent_links/base_service_spec.rb create mode 100644 spec/services/work_items/parent_links/reorder_service_spec.rb create mode 100644 spec/services/work_items/prepare_import_csv_service_spec.rb create mode 100644 spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb create mode 100644 spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb create mode 100644 spec/services/work_items/widgets/labels_service/update_service_spec.rb delete mode 100644 spec/services/work_items/widgets/milestone_service/create_service_spec.rb delete mode 100644 spec/services/work_items/widgets/milestone_service/update_service_spec.rb delete mode 100644 spec/support/banzai/filter_timeout_shared_examples.rb delete mode 100644 spec/support/banzai/reference_filter_shared_examples.rb delete mode 100644 spec/support/chunked_io/chunked_io_helpers.rb delete mode 100644 spec/support/controllers/project_import_rate_limiter_shared_examples.rb delete mode 100644 spec/support/cycle_analytics_helpers/test_generation.rb delete mode 100644 spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb delete mode 100644 spec/support/google_api/cloud_platform_helpers.rb delete mode 100644 spec/support/graphql/arguments.rb delete mode 100644 spec/support/graphql/fake_query_type.rb delete mode 100644 spec/support/graphql/fake_tracer.rb delete mode 100644 spec/support/graphql/field_inspection.rb delete mode 100644 spec/support/graphql/field_selection.rb delete mode 100644 spec/support/graphql/resolver_factories.rb delete mode 100644 spec/support/graphql/subscriptions/action_cable/mock_action_cable.rb delete mode 100644 spec/support/graphql/subscriptions/action_cable/mock_gitlab_schema.rb delete mode 100644 spec/support/graphql/subscriptions/notes/helper.rb delete mode 100644 spec/support/graphql/var.rb create mode 100644 spec/support/helpers/chunked_io_helpers.rb create mode 100644 spec/support/helpers/cycle_analytics_helpers/test_generation.rb create mode 100644 spec/support/helpers/every_sidekiq_worker_test_helper.rb delete mode 100644 spec/support/helpers/features/invite_members_modal_helper.rb create mode 100644 spec/support/helpers/features/invite_members_modal_helpers.rb delete mode 100644 spec/support/helpers/features/list_rows_helpers.rb delete mode 100644 spec/support/helpers/features/snippet_helpers.rb create mode 100644 spec/support/helpers/features/snippet_spec_helpers.rb create mode 100644 spec/support/helpers/google_api/cloud_platform_helpers.rb create mode 100644 spec/support/helpers/graphql/arguments.rb create mode 100644 spec/support/helpers/graphql/fake_query_type.rb create mode 100644 spec/support/helpers/graphql/fake_tracer.rb create mode 100644 spec/support/helpers/graphql/field_inspection.rb create mode 100644 spec/support/helpers/graphql/field_selection.rb create mode 100644 spec/support/helpers/graphql/resolver_factories.rb create mode 100644 spec/support/helpers/graphql/subscriptions/action_cable/mock_action_cable.rb create mode 100644 spec/support/helpers/graphql/subscriptions/action_cable/mock_gitlab_schema.rb create mode 100644 spec/support/helpers/graphql/subscriptions/notes/helper.rb create mode 100644 spec/support/helpers/graphql/var.rb create mode 100644 spec/support/helpers/http_io_helpers.rb create mode 100644 spec/support/helpers/keyset_pagination_helpers.rb create mode 100644 spec/support/helpers/migrations_helpers/cluster_helpers.rb create mode 100644 spec/support/helpers/migrations_helpers/namespaces_helper.rb create mode 100644 spec/support/helpers/migrations_helpers/schema_version_finder.rb create mode 100644 spec/support/helpers/migrations_helpers/vulnerabilities_findings_helper.rb create mode 100644 spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb create mode 100644 spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb create mode 100644 spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb create mode 100644 spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb create mode 100644 spec/support/helpers/models/merge_request_without_merge_request_diff.rb create mode 100644 spec/support/helpers/prometheus/metric_builders.rb create mode 100644 spec/support/helpers/redis_helpers.rb create mode 100644 spec/support/helpers/test_reports_helper.rb create mode 100644 spec/support/helpers/trace_helpers.rb delete mode 100644 spec/support/http_io/http_io_helpers.rb create mode 100644 spec/support/matchers/have_plain_text_content.rb delete mode 100644 spec/support/migrations_helpers/cluster_helpers.rb delete mode 100644 spec/support/migrations_helpers/namespaces_helper.rb delete mode 100644 spec/support/migrations_helpers/schema_version_finder.rb delete mode 100644 spec/support/migrations_helpers/vulnerabilities_findings_helper.rb delete mode 100644 spec/support/models/ci/partitioning_testing/cascade_check.rb delete mode 100644 spec/support/models/ci/partitioning_testing/partition_identifiers.rb delete mode 100644 spec/support/models/ci/partitioning_testing/rspec_hooks.rb delete mode 100644 spec/support/models/ci/partitioning_testing/schema_helpers.rb delete mode 100644 spec/support/models/merge_request_without_merge_request_diff.rb delete mode 100644 spec/support/prometheus/additional_metrics_shared_examples.rb delete mode 100644 spec/support/prometheus/metric_builders.rb delete mode 100644 spec/support/protected_tags/access_control_ce_shared_examples.rb delete mode 100644 spec/support/redis/redis_helpers.rb delete mode 100644 spec/support/redis/redis_new_instance_shared_examples.rb delete mode 100644 spec/support/redis/redis_shared_examples.rb delete mode 100644 spec/support/services/clusters/create_service_shared.rb delete mode 100644 spec/support/services/deploy_token_shared_examples.rb delete mode 100644 spec/support/services/import_csv_service_shared_examples.rb delete mode 100644 spec/support/services/issuable_description_quick_actions_shared_examples.rb delete mode 100644 spec/support/services/issuable_import_csv_service_shared_examples.rb delete mode 100644 spec/support/services/issuable_update_service_shared_examples.rb delete mode 100644 spec/support/services/issues/move_and_clone_services_shared_examples.rb delete mode 100644 spec/support/services/migrate_to_ghost_user_service_shared_examples.rb delete mode 100644 spec/support/services/service_response_shared_examples.rb create mode 100644 spec/support/shared_contexts/services/clusters/create_service_shared_context.rb create mode 100644 spec/support/shared_examples/banzai/filters/filter_timeout_shared_examples.rb create mode 100644 spec/support/shared_examples/banzai/filters/reference_filter_shared_examples.rb create mode 100644 spec/support/shared_examples/controllers/project_import_rate_limiter_shared_examples.rb create mode 100644 spec/support/shared_examples/features/explore/sidebar_shared_examples.rb create mode 100644 spec/support/shared_examples/graphql/resolvers/data_transfer_resolver_shared_examples.rb create mode 100644 spec/support/shared_examples/lib/gitlab/database/table_validators_shared_examples.rb create mode 100644 spec/support/shared_examples/lib/sidebars/admin/menus/admin_menus_shared_examples.rb create mode 100644 spec/support/shared_examples/metrics_instrumentation_shared_examples.rb create mode 100644 spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb create mode 100644 spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb create mode 100644 spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb create mode 100644 spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb create mode 100644 spec/support/shared_examples/redis/redis_shared_examples.rb delete mode 100644 spec/support/shared_examples/services/base_helm_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/clusters/create_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/deploy_token_shared_examples.rb create mode 100644 spec/support/shared_examples/services/import_csv_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/issuable/issuable_description_quick_actions_shared_examples.rb create mode 100644 spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb delete mode 100644 spec/support/shared_examples/services/issuable/update_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/issues/move_and_clone_services_shared_examples.rb create mode 100644 spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb create mode 100644 spec/support/shared_examples/services/service_response_shared_examples.rb delete mode 100644 spec/support/shared_examples/services/work_items/widgets/milestone_service_shared_examples.rb create mode 100644 spec/support/shared_examples/work_items/export_and_import_shared_examples.rb create mode 100644 spec/support/stub_member_access_level.rb delete mode 100644 spec/support/test_reports/test_reports_helper.rb delete mode 100644 spec/support/trace/trace_helpers.rb create mode 100644 spec/support_specs/stub_member_access_level_spec.rb create mode 100644 spec/tooling/danger/multiversion_spec.rb create mode 100644 spec/tooling/danger/specs/feature_category_suggestion_spec.rb create mode 100644 spec/tooling/danger/specs/match_with_array_suggestion_spec.rb create mode 100644 spec/tooling/danger/specs/project_factory_suggestion_spec.rb create mode 100644 spec/tooling/lib/tooling/find_changes_spec.rb create mode 100644 spec/tooling/lib/tooling/find_tests_spec.rb create mode 100644 spec/tooling/lib/tooling/gettext_extractor_spec.rb create mode 100644 spec/tooling/lib/tooling/helpers/file_handler_spec.rb create mode 100644 spec/tooling/lib/tooling/mappings/graphql_base_type_mappings_spec.rb create mode 100644 spec/tooling/lib/tooling/mappings/partial_to_views_mappings_spec.rb create mode 100644 spec/tooling/lib/tooling/mappings/view_to_system_specs_mappings_spec.rb create mode 100644 spec/tooling/lib/tooling/predictive_tests_spec.rb create mode 100644 spec/views/admin/projects/_form.html.haml_spec.rb create mode 100644 spec/views/groups/packages/index.html.haml_spec.rb create mode 100644 spec/views/groups/settings/_general.html.haml_spec.rb create mode 100644 spec/views/groups/show.html.haml_spec.rb delete mode 100644 spec/views/layouts/_search.html.haml_spec.rb create mode 100644 spec/views/notify/new_achievement_email.html.haml_spec.rb create mode 100644 spec/views/projects/packages/index.html.haml_spec.rb create mode 100644 spec/workers/metrics/global_metrics_update_worker_spec.rb create mode 100644 spec/workers/ml/experiment_tracking/associate_ml_candidate_to_package_worker_spec.rb create mode 100644 spec/workers/packages/debian/cleanup_dangling_package_files_worker_spec.rb create mode 100644 spec/workers/packages/npm/deprecate_package_worker_spec.rb create mode 100644 spec/workers/work_items/import_work_items_csv_worker_spec.rb (limited to 'spec') diff --git a/spec/channels/awareness_channel_spec.rb b/spec/channels/awareness_channel_spec.rb deleted file mode 100644 index 47b1cd0188f..00000000000 --- a/spec/channels/awareness_channel_spec.rb +++ /dev/null @@ -1,81 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe AwarenessChannel, :clean_gitlab_redis_shared_state, type: :channel do - before do - stub_action_cable_connection(current_user: user) - end - - context "with user" do - let(:user) { create(:user) } - - describe "when no path parameter given" do - it "rejects subscription" do - subscribe path: nil - - expect(subscription).to be_rejected - end - end - - describe "with valid path parameter" do - it "successfully subscribes" do - subscribe path: "/test" - - session = AwarenessSession.for("/test") - - expect(subscription).to be_confirmed - # check if we can use session object instead - expect(subscription).to have_stream_from("awareness:#{session.to_param}") - end - - it "broadcasts set of collaborators when subscribing" do - session = AwarenessSession.for("/test") - - freeze_time do - collaborator = { - id: user.id, - name: user.name, - username: user.username, - avatar_url: user.avatar_url(size: 36), - last_activity: Time.zone.now, - last_activity_humanized: ActionController::Base.helpers.distance_of_time_in_words( - Time.zone.now, Time.zone.now - ) - } - - expect do - subscribe path: "/test" - end.to have_broadcasted_to("awareness:#{session.to_param}") - .with(collaborators: [collaborator]) - end - end - - it "transmits payload when user is touched" do - subscribe path: "/test" - - perform :touch - - expect(transmissions.size).to be 1 - end - - it "unsubscribes from channel" do - subscribe path: "/test" - session = AwarenessSession.for("/test") - - expect { subscription.unsubscribe_from_channel } - .to change { session.size }.by(-1) - end - end - end - - context "with guest" do - let(:user) { nil } - - it "rejects subscription" do - subscribe path: "/test" - - expect(subscription).to be_rejected - end - end -end diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb index 428a0588bdd..3951ef49288 100644 --- a/spec/commands/sidekiq_cluster/cli_spec.rb +++ b/spec/commands/sidekiq_cluster/cli_spec.rb @@ -246,12 +246,12 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_ if Gitlab.ee? [ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident status_page_publish], - %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export project_template_export] + %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import project_template_export] ] else [ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident], - %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export] + %w[bulk_imports_pipeline bulk_imports_relation_export project_export projects_import_export_parallel_project_export projects_import_export_relation_export repository_import] ] end diff --git a/spec/controllers/admin/applications_controller_spec.rb b/spec/controllers/admin/applications_controller_spec.rb index edb17aefe86..1feda0ed36f 100644 --- a/spec/controllers/admin/applications_controller_spec.rb +++ b/spec/controllers/admin/applications_controller_spec.rb @@ -50,6 +50,12 @@ RSpec.describe Admin::ApplicationsController do it { is_expected.to have_gitlab_http_status(:ok) } it { expect { subject }.to change { application.reload.secret } } + it 'returns the secret in json format' do + subject + + expect(json_response['secret']).not_to be_nil + end + context 'when renew fails' do before do allow_next_found_instance_of(Doorkeeper::Application) do |application| @@ -58,7 +64,7 @@ RSpec.describe Admin::ApplicationsController do end it { expect { subject }.not_to change { application.reload.secret } } - it { is_expected.to redirect_to(admin_application_url(application)) } + it { is_expected.to have_gitlab_http_status(:unprocessable_entity) } end end diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb index 8e62aeed7d0..a47bac4d2ef 100644 --- a/spec/controllers/admin/clusters_controller_spec.rb +++ b/spec/controllers/admin/clusters_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Admin::ClustersController, feature_category: :kubernetes_management do +RSpec.describe Admin::ClustersController, feature_category: :deployment_management do include AccessMatchersForController include GoogleApi::CloudPlatformHelpers @@ -259,14 +259,6 @@ RSpec.describe Admin::ClustersController, feature_category: :kubernetes_manageme expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('cluster_status') end - - it 'invokes schedule_status_update on each application' do - expect_next_instance_of(Clusters::Applications::Ingress) do |instance| - expect(instance).to receive(:schedule_status_update) - end - - get_cluster_status - end end describe 'security' do diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb index 35e374d3b7f..cdd088c2d5e 100644 --- a/spec/controllers/application_controller_spec.rb +++ b/spec/controllers/application_controller_spec.rb @@ -892,12 +892,12 @@ RSpec.describe ApplicationController, feature_category: :shared do end end - describe 'rescue_from Gitlab::Auth::IpBlacklisted' do + describe 'rescue_from Gitlab::Auth::IpBlocked' do controller(described_class) do skip_before_action :authenticate_user! def index - raise Gitlab::Auth::IpBlacklisted + raise Gitlab::Auth::IpBlocked end end diff --git a/spec/controllers/concerns/kas_cookie_spec.rb b/spec/controllers/concerns/kas_cookie_spec.rb index e2ca19457ff..d80df106cfd 100644 --- a/spec/controllers/concerns/kas_cookie_spec.rb +++ b/spec/controllers/concerns/kas_cookie_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe KasCookie, feature_category: :kubernetes_management do +RSpec.describe KasCookie, feature_category: :deployment_management do describe '#set_kas_cookie' do controller(ApplicationController) do include KasCookie @@ -52,4 +52,71 @@ RSpec.describe KasCookie, feature_category: :kubernetes_management do end end end + + describe '#content_security_policy' do + let_it_be(:user) { create(:user) } + + controller(ApplicationController) do + include KasCookie + + def index + render json: {}, status: :ok + end + end + + before do + stub_config_setting(host: 'gitlab.example.com') + sign_in(user) + allow(::Gitlab::Kas).to receive(:enabled?).and_return(true) + allow(::Gitlab::Kas).to receive(:tunnel_url).and_return(kas_tunnel_url) + end + + subject(:kas_csp_connect_src) do + get :index + + request.env['action_dispatch.content_security_policy'].directives['connect-src'] + end + + context "when feature flag is disabled" do + let_it_be(:kas_tunnel_url) { 'ws://gitlab.example.com/-/k8s-proxy/' } + + before do + stub_feature_flags(kas_user_access: false) + end + + it 'does not add KAS url to connect-src directives' do + expect(kas_csp_connect_src).not_to include(::Gitlab::Kas.tunnel_url) + end + end + + context 'when feature flag is enabled' do + before do + stub_feature_flags(kas_user_access: true) + end + + context 'when KAS is on same domain as rails' do + let_it_be(:kas_tunnel_url) { 'ws://gitlab.example.com/-/k8s-proxy/' } + + it 'does not add KAS url to CSP connect-src directive' do + expect(kas_csp_connect_src).not_to include(::Gitlab::Kas.tunnel_url) + end + end + + context 'when KAS is on subdomain' do + let_it_be(:kas_tunnel_url) { 'ws://kas.gitlab.example.com/k8s-proxy/' } + + it 'adds KAS url to CSP connect-src directive' do + expect(kas_csp_connect_src).to include(::Gitlab::Kas.tunnel_url) + end + end + + context 'when KAS tunnel url is configured without trailing slash' do + let_it_be(:kas_tunnel_url) { 'ws://kas.gitlab.example.com/k8s-proxy' } + + it 'adds KAS url to CSP connect-src directive with trailing slash' do + expect(kas_csp_connect_src).to include("#{::Gitlab::Kas.tunnel_url}/") + end + end + end + end end diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb index b0074b52aa2..65c2c77c027 100644 --- a/spec/controllers/concerns/product_analytics_tracking_spec.rb +++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb @@ -107,20 +107,6 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a expect_snowplow_tracking(user) end - context 'when FF is disabled' do - before do - stub_const("#{described_class}::MIGRATED_EVENTS", []) - allow(Feature).to receive(:enabled?).and_call_original - allow(Feature).to receive(:enabled?).with('route_hll_to_snowplow', anything).and_return(false) - end - - it 'doesnt track snowplow event' do - get :index - - expect_no_snowplow_event - end - end - it 'tracks the event if DNT is not enabled' do stub_do_not_track('0') diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb index 0e4771b20f7..893546def5a 100644 --- a/spec/controllers/dashboard/projects_controller_spec.rb +++ b/spec/controllers/dashboard/projects_controller_spec.rb @@ -40,19 +40,19 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures, feature_categ expect(assigns(:projects)).to eq(projects) end - it 'assigns the correct total_user_projects_count' do + it 'assigns the correct all_user_projects' do get :index - total_user_projects_count = assigns(:total_user_projects_count) + all_user_projects = assigns(:all_user_projects) - expect(total_user_projects_count.count).to eq(2) + expect(all_user_projects.count).to eq(2) end - it 'assigns the correct total_starred_projects_count' do + it 'assigns the correct all_starred_projects' do get :index - total_starred_projects_count = assigns(:total_starred_projects_count) + all_starred_projects = assigns(:all_starred_projects) - expect(total_starred_projects_count.count).to eq(1) - expect(total_starred_projects_count).to include(project2) + expect(all_starred_projects.count).to eq(1) + expect(all_starred_projects).to include(project2) end context 'project sorting' do @@ -91,20 +91,20 @@ RSpec.describe Dashboard::ProjectsController, :aggregate_failures, feature_categ expect(projects_result).to include(project) end - it 'excludes archived project from total_user_projects_count' do + it 'excludes archived project from all_user_projects' do get :index - total_user_projects_count = assigns(:total_user_projects_count) + all_user_projects = assigns(:all_user_projects) - expect(total_user_projects_count.count).to eq(1) - expect(total_user_projects_count).not_to include(archived_project) + expect(all_user_projects.count).to eq(1) + expect(all_user_projects).not_to include(archived_project) end - it 'excludes archived project from total_starred_projects_count' do + it 'excludes archived project from all_starred_projects' do get :index - total_starred_projects_count = assigns(:total_starred_projects_count) + all_starred_projects = assigns(:all_starred_projects) - expect(total_starred_projects_count.count).to eq(0) - expect(total_starred_projects_count).not_to include(archived_project) + expect(all_starred_projects.count).to eq(0) + expect(all_starred_projects).not_to include(archived_project) end end diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb index c4f0feb21e2..c2bdb0171e7 100644 --- a/spec/controllers/explore/projects_controller_spec.rb +++ b/spec/controllers/explore/projects_controller_spec.rb @@ -239,9 +239,14 @@ RSpec.describe Explore::ProjectsController, feature_category: :projects do context 'when user is signed in' do let(:user) { create(:user) } + let_it_be(:project) { create(:project, name: 'Project 1') } + let_it_be(:project2) { create(:project, name: 'Project 2') } before do sign_in(user) + project.add_developer(user) + project2.add_developer(user) + user.toggle_star(project2) end include_examples 'explore projects' @@ -260,6 +265,21 @@ RSpec.describe Explore::ProjectsController, feature_category: :projects do let(:controller_action) { :index } let(:params_with_name) { { name: 'some project' } } + it 'assigns the correct all_user_projects' do + get :index + all_user_projects = assigns(:all_user_projects) + + expect(all_user_projects.count).to eq(2) + end + + it 'assigns the correct all_starred_projects' do + get :index + all_starred_projects = assigns(:all_starred_projects) + + expect(all_starred_projects.count).to eq(1) + expect(all_starred_projects).to include(project2) + end + context 'when disable_anonymous_project_search is enabled' do before do stub_feature_flags(disable_anonymous_project_search: true) diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb index 01ea7101f2e..410579c0bed 100644 --- a/spec/controllers/groups/clusters_controller_spec.rb +++ b/spec/controllers/groups/clusters_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Groups::ClustersController, feature_category: :kubernetes_management do +RSpec.describe Groups::ClustersController, feature_category: :deployment_management do include AccessMatchersForController include GoogleApi::CloudPlatformHelpers @@ -322,12 +322,6 @@ RSpec.describe Groups::ClustersController, feature_category: :kubernetes_managem expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('cluster_status') end - - it 'invokes schedule_status_update on each application' do - expect_any_instance_of(Clusters::Applications::Ingress).to receive(:schedule_status_update) - - go - end end describe 'security' do diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb index 1a60f7d824e..9ae5cb6f87c 100644 --- a/spec/controllers/groups/runners_controller_spec.rb +++ b/spec/controllers/groups/runners_controller_spec.rb @@ -6,8 +6,8 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, group: group) } + let_it_be(:runner) { create(:ci_runner, :group, groups: [group]) } - let!(:runner) { create(:ci_runner, :group, groups: [group]) } let!(:project_runner) { create(:ci_runner, :project, projects: [project]) } let!(:instance_runner) { create(:ci_runner, :instance) } @@ -37,6 +37,12 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do expect_snowplow_event(category: described_class.name, action: 'index', user: user, namespace: group) end + + it 'assigns variables' do + get :index, params: { group_id: group } + + expect(assigns(:group_new_runner_path)).to eq(new_group_runner_path(group)) + end end context 'when user is not owner' do @@ -58,6 +64,130 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do end end + describe '#new' do + context 'when create_runner_workflow_for_namespace is enabled' do + before do + stub_feature_flags(create_runner_workflow_for_namespace: [group]) + end + + context 'when user is owner' do + before do + group.add_owner(user) + end + + it 'renders new with 200 status code' do + get :new, params: { group_id: group } + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:new) + end + end + + context 'when user is not owner' do + before do + group.add_maintainer(user) + end + + it 'renders a 404' do + get :new, params: { group_id: group } + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when create_runner_workflow_for_namespace is disabled' do + before do + stub_feature_flags(create_runner_workflow_for_namespace: false) + end + + context 'when user is owner' do + before do + group.add_owner(user) + end + + it 'renders a 404' do + get :new, params: { group_id: group } + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end + + describe '#register' do + subject(:register) { get :register, params: { group_id: group, id: new_runner } } + + context 'when create_runner_workflow_for_namespace is enabled' do + before do + stub_feature_flags(create_runner_workflow_for_namespace: [group]) + end + + context 'when user is owner' do + before do + group.add_owner(user) + end + + context 'when runner can be registered after creation' do + let_it_be(:new_runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) } + + it 'renders a :register template' do + register + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:register) + end + end + + context 'when runner cannot be registered after creation' do + let_it_be(:new_runner) { runner } + + it 'returns :not_found' do + register + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + + context 'when user is not owner' do + before do + group.add_maintainer(user) + end + + context 'when runner can be registered after creation' do + let_it_be(:new_runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) } + + it 'returns :not_found' do + register + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end + + context 'when create_runner_workflow_for_namespace is disabled' do + let_it_be(:new_runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) } + + before do + stub_feature_flags(create_runner_workflow_for_namespace: false) + end + + context 'when user is owner' do + before do + group.add_owner(user) + end + + it 'returns :not_found' do + register + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end + describe '#show' do context 'when user is owner' do before do @@ -158,6 +288,8 @@ RSpec.describe Groups::RunnersController, feature_category: :runner_fleet do end describe '#update' do + let!(:runner) { create(:ci_runner, :group, groups: [group]) } + context 'when user is an owner' do before do group.add_owner(user) diff --git a/spec/controllers/groups/settings/applications_controller_spec.rb b/spec/controllers/groups/settings/applications_controller_spec.rb index 2fadac2dc17..c398fd044c2 100644 --- a/spec/controllers/groups/settings/applications_controller_spec.rb +++ b/spec/controllers/groups/settings/applications_controller_spec.rb @@ -156,6 +156,12 @@ RSpec.describe Groups::Settings::ApplicationsController do it { is_expected.to have_gitlab_http_status(:ok) } it { expect { subject }.to change { application.reload.secret } } + it 'returns the secret in json format' do + subject + + expect(json_response['secret']).not_to be_nil + end + context 'when renew fails' do before do allow_next_found_instance_of(Doorkeeper::Application) do |application| @@ -164,7 +170,7 @@ RSpec.describe Groups::Settings::ApplicationsController do end it { expect { subject }.not_to change { application.reload.secret } } - it { is_expected.to redirect_to(group_settings_application_url(group, application)) } + it { is_expected.to have_gitlab_http_status(:unprocessable_entity) } end end diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb index 9184cd2263e..8617cc8af8f 100644 --- a/spec/controllers/groups_controller_spec.rb +++ b/spec/controllers/groups_controller_spec.rb @@ -152,29 +152,6 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code end end end - - describe 'require_verification_for_namespace_creation experiment', :experiment do - before do - sign_in(owner) - stub_experiments(require_verification_for_namespace_creation: :candidate) - end - - it 'tracks a "start_create_group" event' do - expect(experiment(:require_verification_for_namespace_creation)).to track( - :start_create_group - ).on_next_instance.with_context(user: owner) - - get :new - end - - context 'when creating a sub-group' do - it 'does not track a "start_create_group" event' do - expect(experiment(:require_verification_for_namespace_creation)).not_to track(:start_create_group) - - get :new, params: { parent_id: group.id } - end - end - end end describe 'GET #activity' do diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb index ac6715bacd5..056df213209 100644 --- a/spec/controllers/help_controller_spec.rb +++ b/spec/controllers/help_controller_spec.rb @@ -181,6 +181,7 @@ RSpec.describe HelpController do context 'when requested file exists' do before do stub_doc_file_read(file_name: 'user/ssh.md', content: fixture_file('blockquote_fence_after.md')) + stub_application_setting(help_page_documentation_base_url: '') subject end @@ -223,13 +224,13 @@ RSpec.describe HelpController do context 'when gitlab_docs is disabled' do let(:docs_enabled) { false } - it_behaves_like 'documentation pages local render' + it_behaves_like 'documentation pages redirect', 'https://docs.gitlab.com' end context 'when host is missing' do let(:host) { nil } - it_behaves_like 'documentation pages local render' + it_behaves_like 'documentation pages redirect', 'https://docs.gitlab.com' end end @@ -251,6 +252,10 @@ RSpec.describe HelpController do end context 'when requested file is missing' do + before do + stub_application_setting(help_page_documentation_base_url: '') + end + it 'renders not found' do get :show, params: { path: 'foo/bar' }, format: :md expect(response).to be_not_found diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb index b3b7753df61..f3b21e191c4 100644 --- a/spec/controllers/invites_controller_spec.rb +++ b/spec/controllers/invites_controller_spec.rb @@ -192,6 +192,26 @@ RSpec.describe InvitesController do expect(session[:invite_email]).to eq(member.invite_email) end + context 'with stored location for user' do + it 'stores the correct path for user' do + request + + expect(controller.stored_location_for(:user)).to eq(activity_project_path(member.source)) + end + + context 'with relative root' do + before do + stub_default_url_options(script_name: '/gitlab') + end + + it 'stores the correct path for user' do + request + + expect(controller.stored_location_for(:user)).to eq(activity_project_path(member.source)) + end + end + end + context 'when it is part of our invite email experiment' do let(:extra_params) { { invite_type: 'initial_email' } } diff --git a/spec/controllers/oauth/applications_controller_spec.rb b/spec/controllers/oauth/applications_controller_spec.rb index e7ec268a5a2..5b9fd192ad4 100644 --- a/spec/controllers/oauth/applications_controller_spec.rb +++ b/spec/controllers/oauth/applications_controller_spec.rb @@ -86,6 +86,12 @@ RSpec.describe Oauth::ApplicationsController do it_behaves_like 'redirects to login page when the user is not signed in' it_behaves_like 'redirects to 2fa setup page when the user requires it' + it 'returns the secret in json format' do + subject + + expect(json_response['secret']).not_to be_nil + end + context 'when renew fails' do before do allow_next_found_instance_of(Doorkeeper::Application) do |application| @@ -94,7 +100,7 @@ RSpec.describe Oauth::ApplicationsController do end it { expect { subject }.not_to change { application.reload.secret } } - it { is_expected.to redirect_to(oauth_application_url(application)) } + it { is_expected.to have_gitlab_http_status(:unprocessable_entity) } end end diff --git a/spec/controllers/projects/blame_controller_spec.rb b/spec/controllers/projects/blame_controller_spec.rb index f322c78b5e3..06c82bcb404 100644 --- a/spec/controllers/projects/blame_controller_spec.rb +++ b/spec/controllers/projects/blame_controller_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe Projects::BlameController do - let(:project) { create(:project, :repository) } - let(:user) { create(:user) } +RSpec.describe Projects::BlameController, feature_category: :source_code_management do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } before do sign_in(user) @@ -13,20 +13,14 @@ RSpec.describe Projects::BlameController do controller.instance_variable_set(:@project, project) end - describe "GET show" do - render_views - - before do - get :show, params: { namespace_id: project.namespace, project_id: project, id: id } - end - - context "valid branch, valid file" do + shared_examples 'blame_response' do + context 'valid branch, valid file' do let(:id) { 'master/files/ruby/popen.rb' } it { is_expected.to respond_with(:success) } end - context "valid branch, invalid file" do + context 'valid branch, invalid file' do let(:id) { 'master/files/ruby/invalid-path.rb' } it 'redirects' do @@ -34,10 +28,30 @@ RSpec.describe Projects::BlameController do end end - context "invalid branch, valid file" do + context 'invalid branch, valid file' do let(:id) { 'invalid-branch/files/ruby/missing_file.rb' } it { is_expected.to respond_with(:not_found) } end end + + describe 'GET show' do + render_views + + before do + get :show, params: { namespace_id: project.namespace, project_id: project, id: id } + end + + it_behaves_like 'blame_response' + end + + describe 'GET page' do + render_views + + before do + get :page, params: { namespace_id: project.namespace, project_id: project, id: id } + end + + it_behaves_like 'blame_response' + end end diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb index 2c05521d997..b07cb7a228d 100644 --- a/spec/controllers/projects/blob_controller_spec.rb +++ b/spec/controllers/projects/blob_controller_spec.rb @@ -385,7 +385,6 @@ RSpec.describe Projects::BlobController, feature_category: :source_code_manageme let(:namespace) { project.namespace.reload } let(:property) { target_event } let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_sfe_edit' } - let(:feature_flag_name) { 'route_hll_to_snowplow_phase4' } end end end @@ -545,7 +544,6 @@ RSpec.describe Projects::BlobController, feature_category: :source_code_manageme let(:namespace) { project.namespace } let(:property) { target_event } let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_sfe_edit' } - let(:feature_flag_name) { 'route_hll_to_snowplow_phase4' } end it 'redirects to blob' do diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb index d16e5eea2e9..ea73f62981e 100644 --- a/spec/controllers/projects/clusters_controller_spec.rb +++ b/spec/controllers/projects/clusters_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::ClustersController, feature_category: :kubernetes_management do +RSpec.describe Projects::ClustersController, feature_category: :deployment_management do include AccessMatchersForController include GoogleApi::CloudPlatformHelpers include KubernetesHelpers @@ -358,12 +358,6 @@ RSpec.describe Projects::ClustersController, feature_category: :kubernetes_manag expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('cluster_status') end - - it 'invokes schedule_status_update on each application' do - expect_any_instance_of(Clusters::Applications::Ingress).to receive(:schedule_status_update) - - go - end end describe 'security' do diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb index 36206a88786..44486d0ed41 100644 --- a/spec/controllers/projects/commit_controller_spec.rb +++ b/spec/controllers/projects/commit_controller_spec.rb @@ -84,22 +84,6 @@ RSpec.describe Projects::CommitController, feature_category: :source_code_manage expect(response).to be_successful end - it 'only loads blobs in the current page' do - stub_feature_flags(async_commit_diff_files: false) - stub_const('Projects::CommitController::COMMIT_DIFFS_PER_PAGE', 1) - - commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') - - expect_next_instance_of(Repository) do |repository| - # This commit contains 3 changed files but we expect only the blobs for the first one to be loaded - expect(repository).to receive(:blobs_at).with([[commit.id, '.gitignore']], anything).and_call_original - end - - go(id: commit.id) - - expect(response).to be_ok - end - shared_examples "export as" do |format| it "does generally work" do go(id: commit.id, format: format) @@ -459,6 +443,37 @@ RSpec.describe Projects::CommitController, feature_category: :source_code_manage end end + describe 'GET #diff_files' do + subject(:send_request) { get :diff_files, params: params } + + let(:format) { :html } + let(:params) do + { + namespace_id: project.namespace, + project_id: project, + id: commit.id, + format: format + } + end + + it 'renders diff files' do + send_request + + expect(assigns(:diffs)).to be_a(Gitlab::Diff::FileCollection::Commit) + expect(assigns(:environment)).to be_nil + end + + context 'when format is not html' do + let(:format) { :json } + + it 'returns 404 page' do + send_request + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + describe 'GET diff_for_path' do def diff_for_path(extra_params = {}) params = { diff --git a/spec/controllers/projects/commits_controller_spec.rb b/spec/controllers/projects/commits_controller_spec.rb index 9e03d1f315b..55ad92c517c 100644 --- a/spec/controllers/projects/commits_controller_spec.rb +++ b/spec/controllers/projects/commits_controller_spec.rb @@ -39,6 +39,12 @@ RSpec.describe Projects::CommitsController, feature_category: :source_code_manag it { is_expected.to respond_with(:success) } end + context "HEAD, valid file" do + let(:id) { 'HEAD/README.md' } + + it { is_expected.to respond_with(:success) } + end + context "valid branch, invalid file" do let(:id) { 'master/invalid-path.rb' } diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb index 3751b89951c..a49f8b51c12 100644 --- a/spec/controllers/projects/compare_controller_spec.rb +++ b/spec/controllers/projects/compare_controller_spec.rb @@ -284,14 +284,18 @@ RSpec.describe Projects::CompareController do let(:to_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' } let(:page) { 1 } - it 'shows the diff' do - show_request + shared_examples 'valid compare page' do + it 'shows the diff' do + show_request - expect(response).to be_successful - expect(assigns(:diffs).diff_files.first).to be_present - expect(assigns(:commits).length).to be >= 1 + expect(response).to be_successful + expect(assigns(:diffs).diff_files.first).to be_present + expect(assigns(:commits).length).to be >= 1 + end end + it_behaves_like 'valid compare page' + it 'only loads blobs in the current page' do stub_const('Projects::CompareController::COMMIT_DIFFS_PER_PAGE', 1) @@ -306,6 +310,19 @@ RSpec.describe Projects::CompareController do expect(response).to be_successful end + + context 'when from_ref is HEAD ref' do + let(:from_ref) { 'HEAD' } + let(:to_ref) { 'feature' } # Need to change to_ref too so there's something to compare with HEAD + + it_behaves_like 'valid compare page' + end + + context 'when to_ref is HEAD ref' do + let(:to_ref) { 'HEAD' } + + it_behaves_like 'valid compare page' + end end context 'when page is not valid' do diff --git a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb index 2d39e0e5317..a7f3212a6f9 100644 --- a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb +++ b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb @@ -80,8 +80,12 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do let(:oldest_version) { design.versions.ordered.last } shared_examples 'a successful request for sha' do + before do + allow(DesignManagement::GitRepository).to receive(:new).and_call_original + end + it do - expect_next_instance_of(DesignManagement::Repository) do |repository| + expect_next_instance_of(DesignManagement::GitRepository) do |repository| expect(repository).to receive(:blob_at).with(expected_ref, design.full_path).and_call_original end diff --git a/spec/controllers/projects/imports_controller_spec.rb b/spec/controllers/projects/imports_controller_spec.rb index 65a80b9e8ec..b4704d56cd9 100644 --- a/spec/controllers/projects/imports_controller_spec.rb +++ b/spec/controllers/projects/imports_controller_spec.rb @@ -27,7 +27,7 @@ RSpec.describe Projects::ImportsController do project.add_maintainer(user) end - context 'when repository does not exists' do + context 'when repository does not exist' do it 'renders template' do get :show, params: { namespace_id: project.namespace.to_param, project_id: project } diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index f1fe1940414..6747678b6fb 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -730,7 +730,7 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do go(id: issue.iid) expect(json_response).to include('title_text', 'description', 'description_text') - expect(json_response).to include('task_status', 'lock_version') + expect(json_response).to include('task_completion_status', 'lock_version') end end end diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb index 2e29d87dadd..ede26ebd032 100644 --- a/spec/controllers/projects/jobs_controller_spec.rb +++ b/spec/controllers/projects/jobs_controller_spec.rb @@ -1,11 +1,13 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration do +RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, feature_category: :continuous_integration, factory_default: :keep do include ApiHelpers include HttpIOHelpers + let_it_be(:namespace) { create_default(:namespace) } let_it_be(:project) { create(:project, :public, :repository) } + let_it_be(:merge_request) { create(:merge_request, source_project: project) } let_it_be(:owner) { create(:owner) } let_it_be(:admin) { create(:admin) } let_it_be(:maintainer) { create(:user) } @@ -19,11 +21,16 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu project.add_developer(developer) project.add_reporter(reporter) project.add_guest(guest) + create_default(:owner) + create_default(:user) + create_default(:ci_trigger_request) + create_default(:ci_stage) end let(:user) { developer } - let(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:default_pipeline) { create_default(:ci_pipeline) } before do stub_feature_flags(ci_enable_live_trace: true) @@ -152,7 +159,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end context 'when requesting JSON' do - let(:merge_request) { create(:merge_request, source_project: project) } let(:user) { developer } before do @@ -211,9 +217,9 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end context 'when job has artifacts' do - context 'with not expiry date' do - let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + let_it_be(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } + context 'with not expiry date' do context 'when artifacts are unlocked' do before do job.pipeline.unlocked! @@ -234,7 +240,7 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu context 'when artifacts are locked' do before do - job.pipeline.artifacts_locked! + job.pipeline.reload.artifacts_locked! end it 'exposes needed information' do @@ -252,11 +258,13 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end context 'with expired artifacts' do - let(:job) { create(:ci_build, :success, :artifacts, :expired, pipeline: pipeline) } + before do + job.update!(artifacts_expire_at: 1.minute.ago) + end context 'when artifacts are unlocked' do before do - job.pipeline.unlocked! + job.pipeline.reload.unlocked! end it 'exposes needed information' do @@ -275,7 +283,7 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu context 'when artifacts are locked' do before do - job.pipeline.artifacts_locked! + job.pipeline.reload.artifacts_locked! end it 'exposes needed information' do @@ -292,19 +300,17 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end end end - end - - context 'when job passed with no trace' do - let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } - it 'exposes empty state illustrations' do - get_show_json + context 'when job passed with no trace' do + it 'exposes empty state illustrations' do + get_show_json - expect(response).to have_gitlab_http_status(:ok) - expect(response).to match_response_schema('job/job_details') - expect(json_response['status']['illustration']).to have_key('image') - expect(json_response['status']['illustration']).to have_key('size') - expect(json_response['status']['illustration']).to have_key('title') + expect(response).to have_gitlab_http_status(:ok) + expect(response).to match_response_schema('job/job_details') + expect(json_response['status']['illustration']).to have_key('image') + expect(json_response['status']['illustration']).to have_key('size') + expect(json_response['status']['illustration']).to have_key('title') + end end end @@ -320,7 +326,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end context 'with deployment' do - let(:merge_request) { create(:merge_request, source_project: project) } let(:environment) { create(:environment, project: project, name: 'staging', state: :available) } let(:job) { create(:ci_build, :running, environment: environment.name, pipeline: pipeline) } @@ -512,7 +517,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state, featu end context 'when requesting triggered job JSON' do - let!(:merge_request) { create(:merge_request, source_project: project) } let(:trigger) { create(:ci_trigger, project: project) } let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) } let(:job) { create(:ci_build, pipeline: pipeline, trigger_request: trigger_request) } diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb index 9e18089bb23..fd77d07705d 100644 --- a/spec/controllers/projects/merge_requests_controller_spec.rb +++ b/spec/controllers/projects/merge_requests_controller_spec.rb @@ -575,6 +575,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review it 'returns :failed' do expect(json_response).to eq('status' => 'failed') end + + context 'for logging' do + let(:expected_params) { { merge_action_status: 'failed' } } + let(:subject_proc) { proc { subject } } + + subject { post :merge, params: base_params } + + it_behaves_like 'storing arguments in the application context' + it_behaves_like 'not executing any extra queries for the application context' + end end context 'when the sha parameter does not match the source SHA' do @@ -585,6 +595,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review it 'returns :sha_mismatch' do expect(json_response).to eq('status' => 'sha_mismatch') end + + context 'for logging' do + let(:expected_params) { { merge_action_status: 'sha_mismatch' } } + let(:subject_proc) { proc { subject } } + + subject { post :merge, params: base_params.merge(sha: 'foo') } + + it_behaves_like 'storing arguments in the application context' + it_behaves_like 'not executing any extra queries for the application context' + end end context 'when the sha parameter matches the source SHA' do @@ -606,6 +626,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review merge_with_sha end + context 'for logging' do + let(:expected_params) { { merge_action_status: 'success' } } + let(:subject_proc) { proc { subject } } + + subject { merge_with_sha } + + it_behaves_like 'storing arguments in the application context' + it_behaves_like 'not executing any extra queries for the application context' + end + context 'when squash is passed as 1' do it 'updates the squash attribute on the MR to true' do merge_request.update!(squash: false) @@ -673,6 +703,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review merge_when_pipeline_succeeds end + context 'for logging' do + let(:expected_params) { { merge_action_status: 'merge_when_pipeline_succeeds' } } + let(:subject_proc) { proc { subject } } + + subject { merge_when_pipeline_succeeds } + + it_behaves_like 'storing arguments in the application context' + it_behaves_like 'not executing any extra queries for the application context' + end + context 'when project.only_allow_merge_if_pipeline_succeeds? is true' do before do project.update_column(:only_allow_merge_if_pipeline_succeeds, true) diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb index 09b703a48d6..7027929d33a 100644 --- a/spec/controllers/projects/pipelines_controller_spec.rb +++ b/spec/controllers/projects/pipelines_controller_spec.rb @@ -199,22 +199,6 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte check_pipeline_response(returned: 6, all: 6) end end - - context "with lazy_load_pipeline_dropdown_actions feature flag disabled" do - before do - stub_feature_flags(lazy_load_pipeline_dropdown_actions: false) - end - - it 'returns manual and scheduled actions' do - get_pipelines_index_json - - expect(response).to have_gitlab_http_status(:ok) - expect(response).to match_response_schema('pipeline') - - expect(json_response.dig('pipelines', 0, 'details')).to include('manual_actions') - expect(json_response.dig('pipelines', 0, 'details')).to include('scheduled_actions') - end - end end def get_pipelines_index_html(params = {}) @@ -296,23 +280,6 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte end end - describe 'GET #index' do - before do - stub_application_setting(auto_devops_enabled: false) - end - - context 'with runners_availability_section experiment' do - it 'tracks the assignment', :experiment do - stub_experiments(runners_availability_section: true) - - expect(experiment(:runners_availability_section)) - .to track(:assignment).with_context(namespace: project.namespace).on_next_instance - - get :index, params: { namespace_id: project.namespace, project_id: project } - end - end - end - describe 'GET #show' do def get_pipeline_html get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :html @@ -1311,148 +1278,6 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte end end - describe 'GET config_variables.json', :use_clean_rails_memory_store_caching do - include ReactiveCachingHelpers - - let(:ci_config) { '' } - let(:files) { { '.gitlab-ci.yml' => YAML.dump(ci_config) } } - let(:project) { create(:project, :auto_devops_disabled, :custom_repo, files: files) } - let(:service) { Ci::ListConfigVariablesService.new(project, user) } - - before do - allow(Ci::ListConfigVariablesService) - .to receive(:new) - .and_return(service) - end - - context 'when sending a valid ref' do - let(:ref) { 'master' } - let(:ci_config) do - { - variables: { - KEY1: { value: 'val 1', description: 'description 1' } - }, - test: { - stage: 'test', - script: 'echo' - } - } - end - - before do - synchronous_reactive_cache(service) - end - - it 'returns variable list' do - get_config_variables - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response['KEY1']).to eq({ 'value' => 'val 1', 'description' => 'description 1' }) - end - end - - context 'when sending an invalid ref' do - let(:ref) { 'invalid-ref' } - - before do - synchronous_reactive_cache(service) - end - - it 'returns empty json' do - get_config_variables - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to eq({}) - end - end - - context 'when sending an invalid config' do - let(:ref) { 'master' } - let(:ci_config) do - { - variables: { - KEY1: { value: 'val 1', description: 'description 1' } - }, - test: { - stage: 'invalid', - script: 'echo' - } - } - end - - before do - synchronous_reactive_cache(service) - end - - it 'returns empty result' do - get_config_variables - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to eq({}) - end - end - - context 'when the cache is empty' do - let(:ref) { 'master' } - let(:ci_config) do - { - variables: { - KEY1: { value: 'val 1', description: 'description 1' } - }, - test: { - stage: 'test', - script: 'echo' - } - } - end - - it 'returns no content' do - get_config_variables - - expect(response).to have_gitlab_http_status(:no_content) - end - end - - context 'when project uses external project ci config' do - let(:other_project) { create(:project, :custom_repo, files: other_project_files) } - let(:other_project_files) { { '.gitlab-ci.yml' => YAML.dump(other_project_ci_config) } } - let(:ref) { 'master' } - - let(:other_project_ci_config) do - { - variables: { - KEY1: { value: 'val 1', description: 'description 1' } - }, - test: { - stage: 'test', - script: 'echo' - } - } - end - - before do - other_project.add_developer(user) - project.update!(ci_config_path: ".gitlab-ci.yml@#{other_project.full_path}:master") - synchronous_reactive_cache(service) - end - - it 'returns other project config variables' do - get_config_variables - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response['KEY1']).to eq({ 'value' => 'val 1', 'description' => 'description 1' }) - end - end - - private - - def get_config_variables - get :config_variables, params: { - namespace_id: project.namespace, project_id: project, sha: ref - }, format: :json - end - end - describe 'GET downloadable_artifacts.json' do context 'when pipeline is empty' do let(:pipeline) { create(:ci_empty_pipeline) } diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb index ab33195eb83..dbea3592e24 100644 --- a/spec/controllers/projects/project_members_controller_spec.rb +++ b/spec/controllers/projects/project_members_controller_spec.rb @@ -560,12 +560,4 @@ RSpec.describe Projects::ProjectMembersController do end it_behaves_like 'controller actions' - - context 'when project_members_index_by_project_namespace feature flag is disabled' do - before do - stub_feature_flags(project_members_index_by_project_namespace: false) - end - - it_behaves_like 'controller actions' - end end diff --git a/spec/controllers/projects/service_desk_controller_spec.rb b/spec/controllers/projects/service_desk_controller_spec.rb index e078bf9461e..6b914ac8f19 100644 --- a/spec/controllers/projects/service_desk_controller_spec.rb +++ b/spec/controllers/projects/service_desk_controller_spec.rb @@ -12,8 +12,8 @@ RSpec.describe Projects::ServiceDeskController do let_it_be(:user) { create(:user) } before do - allow(Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } project.add_maintainer(user) sign_in(user) diff --git a/spec/controllers/projects/work_items_controller_spec.rb b/spec/controllers/projects/work_items_controller_spec.rb new file mode 100644 index 00000000000..e0f61a4977b --- /dev/null +++ b/spec/controllers/projects/work_items_controller_spec.rb @@ -0,0 +1,156 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::WorkItemsController, feature_category: :team_planning do + let_it_be(:reporter) { create(:user) } + let_it_be(:guest) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:work_item) { create(:work_item, project: project) } + + let(:file) { 'file' } + + before do + project.add_reporter(reporter) + project.add_guest(guest) + end + + shared_examples 'response with 404 status' do + it 'renders a not found message' do + expect(WorkItems::ImportWorkItemsCsvWorker).not_to receive(:perform_async) + + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples 'redirects to new session path' do + it 'redirects to sign in' do + subject + + expect(response).to have_gitlab_http_status(:found) + expect(response).to redirect_to(new_user_session_path) + end + end + + describe 'GET index' do + specify do + expect( + get(:index, params: { namespace_id: project.namespace, project_id: project, work_items_path: work_item.id }) + ).to have_request_urgency(:low) + end + end + + describe 'POST authorize' do + subject do + post(:authorize, params: { namespace_id: project.namespace, project_id: project, file: file }) + end + + specify do + expect(subject).to have_request_urgency(:high) + end + + context 'when user is anonymous' do + it_behaves_like 'redirects to new session path' + end + end + + describe 'POST import_csv' do + subject { post :import_csv, params: { namespace_id: project.namespace, project_id: project, file: file } } + + let(:upload_service) { double } + let(:uploader) { double } + let(:upload) { double } + let(:upload_id) { 99 } + + specify do + expect(subject).to have_request_urgency(:low) + end + + context 'with authorized user' do + before do + sign_in(reporter) + allow(controller).to receive(:file_is_valid?).and_return(true) + end + + context 'when feature is available' do + context 'when the upload is processed successfully' do + before do + mock_upload + end + + it 'renders the correct message' do + expect(WorkItems::ImportWorkItemsCsvWorker).to receive(:perform_async) + .with(reporter.id, project.id, upload_id) + + subject + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['message']).to eq( + "Your work items are being imported. Once finished, you'll receive a confirmation email." + ) + end + end + + context 'when file is not valid' do + before do + allow(controller).to receive(:file_is_valid?).and_return(false) + end + + it 'renders the error message' do + expect(WorkItems::ImportWorkItemsCsvWorker).not_to receive(:perform_async) + + subject + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['errors']) + .to eq('The uploaded file was invalid. Supported file extensions are .csv.') + end + end + + context 'when service response includes errors' do + before do + mock_upload(false) + end + + it 'renders the error message' do + subject + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['errors']).to eq('File upload error.') + end + end + end + + context 'when feature is not available' do + before do + stub_feature_flags(import_export_work_items_csv: false) + end + + it_behaves_like 'response with 404 status' + end + end + + context 'with unauthorised user' do + before do + mock_upload + sign_in(guest) + allow(controller).to receive(:file_is_valid?).and_return(true) + end + + it_behaves_like 'response with 404 status' + end + + context 'with anonymous user' do + it 'redirects to sign in page' do + expect(WorkItems::ImportWorkItemsCsvWorker).not_to receive(:perform_async) + + subject + + expect(response).to have_gitlab_http_status(:found) + expect(response).to redirect_to(new_user_session_path) + end + end + end +end diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb index 5ece9f09e5f..b652aba1fff 100644 --- a/spec/controllers/projects_controller_spec.rb +++ b/spec/controllers/projects_controller_spec.rb @@ -1773,8 +1773,8 @@ RSpec.describe ProjectsController, feature_category: :projects do it 'updates Service Desk attributes' do project.add_maintainer(user) sign_in(user) - allow(Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } params = { service_desk_enabled: true } diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb index b2a62bcfbd6..e69074fd068 100644 --- a/spec/db/schema_spec.rb +++ b/spec/db/schema_spec.rb @@ -11,11 +11,28 @@ RSpec.describe 'Database schema', feature_category: :database do IGNORED_INDEXES_ON_FKS = { slack_integrations_scopes: %w[slack_api_scope_id], - p_ci_builds_metadata: %w[partition_id] # composable FK, the columns are reversed in the index definition + p_ci_builds_metadata: %w[partition_id], # composable FK, the columns are reversed in the index definition + p_ci_runner_machine_builds: %w[partition_id] # composable FK, the columns are reversed in the index definition }.with_indifferent_access.freeze TABLE_PARTITIONS = %w[ci_builds_metadata].freeze + # If splitting FK and table removal into two MRs as suggested in the docs, use this constant in the initial FK removal MR. + # In the subsequent table removal MR, remove the entries. + # See: https://docs.gitlab.com/ee/development/migration_style_guide.html#dropping-a-database-table + REMOVED_FKS = { + clusters_applications_cert_managers: %w[cluster_id], + clusters_applications_cilium: %w[cluster_id], + clusters_applications_crossplane: %w[cluster_id], + clusters_applications_helm: %w[cluster_id], + clusters_applications_ingress: %w[cluster_id], + clusters_applications_jupyter: %w[cluster_id oauth_application_id], + clusters_applications_knative: %w[cluster_id], + clusters_applications_prometheus: %w[cluster_id], + clusters_applications_runners: %w[cluster_id], + serverless_domain_cluster: %w[clusters_applications_knative_id creator_id pages_domain_id] + }.with_indifferent_access.freeze + # List of columns historically missing a FK, don't add more columns # See: https://docs.gitlab.com/ee/development/database/foreign_keys.html#naming-foreign-keys IGNORED_FK_COLUMNS = { @@ -31,6 +48,7 @@ RSpec.describe 'Database schema', feature_category: :database do award_emoji: %w[awardable_id user_id], aws_roles: %w[role_external_id], boards: %w[milestone_id iteration_id], + broadcast_messages: %w[namespace_id], chat_names: %w[chat_id team_id user_id integration_id], chat_teams: %w[team_id], ci_build_needs: %w[partition_id build_id], @@ -94,7 +112,6 @@ RSpec.describe 'Database schema', feature_category: :database do project_build_artifacts_size_refreshes: %w[last_job_artifact_id], project_data_transfers: %w[project_id namespace_id], project_error_tracking_settings: %w[sentry_project_id], - project_group_links: %w[group_id], project_statistics: %w[namespace_id], projects: %w[ci_id mirror_user_id], redirect_routes: %w[source_id], @@ -120,7 +137,9 @@ RSpec.describe 'Database schema', feature_category: :database do vulnerability_reads: %w[cluster_agent_id], # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584 # Fixes performance issues with the deletion of web-hooks with many log entries - web_hook_logs: %w[web_hook_id] + web_hook_logs: %w[web_hook_id], + ml_candidates: %w[internal_id] + }.with_indifferent_access.freeze context 'for table' do @@ -204,7 +223,6 @@ RSpec.describe 'Database schema', feature_category: :database do 'Ci::Processable' => %w[failure_reason], 'Ci::Runner' => %w[access_level], 'Ci::Stage' => %w[status], - 'Clusters::Applications::Ingress' => %w[ingress_type], 'Clusters::Cluster' => %w[platform_type provider_type], 'CommitStatus' => %w[failure_reason], 'GenericCommitStatus' => %w[failure_reason], @@ -312,6 +330,28 @@ RSpec.describe 'Database schema', feature_category: :database do expect(problematic_tables).to be_empty end end + + context 'for CI partitioned table' do + # Check that each partitionable model with more than 1 column has the partition_id column at the trailing + # position. Using PARTITIONABLE_MODELS instead of iterating tables since when partitioning existing tables, + # the routing table only gets created after the PK has already been created, which would be too late for a check. + + skip_tables = %w[] + partitionable_models = Ci::Partitionable::Testing::PARTITIONABLE_MODELS + (partitionable_models - skip_tables).each do |klass| + model = klass.safe_constantize + table_name = model.table_name + + primary_key_columns = Array(model.connection.primary_key(table_name)) + next if primary_key_columns.count == 1 + + describe table_name do + it 'expects every PK to have partition_id at trailing position' do + expect(primary_key_columns).to match([an_instance_of(String), 'partition_id']) + end + end + end + end end context 'index names' do @@ -347,7 +387,7 @@ RSpec.describe 'Database schema', feature_category: :database do end def ignored_fk_columns(table) - IGNORED_FK_COLUMNS.fetch(table, []) + REMOVED_FKS.merge(IGNORED_FK_COLUMNS).fetch(table, []) end def ignored_index_columns(table) diff --git a/spec/experiments/require_verification_for_namespace_creation_experiment_spec.rb b/spec/experiments/require_verification_for_namespace_creation_experiment_spec.rb deleted file mode 100644 index c91a8f1950e..00000000000 --- a/spec/experiments/require_verification_for_namespace_creation_experiment_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe RequireVerificationForNamespaceCreationExperiment, :experiment do - subject(:experiment) { described_class.new(user: user) } - - let(:user_created_at) { RequireVerificationForNamespaceCreationExperiment::EXPERIMENT_START_DATE + 1.hour } - let(:user) { create(:user, created_at: user_created_at) } - - describe '#candidate?' do - context 'when experiment subject is candidate' do - before do - stub_experiments(require_verification_for_namespace_creation: :candidate) - end - - it 'returns true' do - expect(experiment.candidate?).to eq(true) - end - end - - context 'when experiment subject is control' do - before do - stub_experiments(require_verification_for_namespace_creation: :control) - end - - it 'returns false' do - expect(experiment.candidate?).to eq(false) - end - end - end - - describe 'exclusions' do - context 'when user is new' do - it 'is not excluded' do - expect(subject).not_to exclude(user: user) - end - end - - context 'when user is NOT new' do - let(:user_created_at) { RequireVerificationForNamespaceCreationExperiment::EXPERIMENT_START_DATE - 1.day } - let(:user) { create(:user, created_at: user_created_at) } - - it 'is excluded' do - expect(subject).to exclude(user: user) - end - end - end -end diff --git a/spec/experiments/security_reports_mr_widget_prompt_experiment_spec.rb b/spec/experiments/security_reports_mr_widget_prompt_experiment_spec.rb deleted file mode 100644 index ee02fa5f1f2..00000000000 --- a/spec/experiments/security_reports_mr_widget_prompt_experiment_spec.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe SecurityReportsMrWidgetPromptExperiment do - it "defines a control and candidate" do - expect(subject.behaviors.keys).to match_array(%w[control candidate]) - end -end diff --git a/spec/factories/abuse/trust_score.rb b/spec/factories/abuse/trust_score.rb new file mode 100644 index 00000000000..a5ea7666945 --- /dev/null +++ b/spec/factories/abuse/trust_score.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :abuse_trust_score, class: 'Abuse::TrustScore' do + user + score { 0.1 } + source { :spamcheck } + correlation_id_value { 'abcdefg' } + end +end diff --git a/spec/factories/abuse_reports.rb b/spec/factories/abuse_reports.rb index 9f05d183ba4..699da744fab 100644 --- a/spec/factories/abuse_reports.rb +++ b/spec/factories/abuse_reports.rb @@ -11,5 +11,9 @@ FactoryBot.define do trait :closed do status { 'closed' } end + + trait :with_screenshot do + screenshot { fixture_file_upload('spec/fixtures/dk.png') } + end end end diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb index d68562c0aa5..2b6bddd2f6d 100644 --- a/spec/factories/ci/pipelines.rb +++ b/spec/factories/ci/pipelines.rb @@ -21,6 +21,12 @@ FactoryBot.define do transient { name { nil } } + transient { ci_ref_presence { true } } + + before(:create) do |pipeline, evaluator| + pipeline.ensure_ci_ref! if evaluator.ci_ref_presence && pipeline.ci_ref_id.nil? + end + after(:build) do |pipeline, evaluator| if evaluator.child_of pipeline.project = evaluator.child_of.project @@ -54,12 +60,6 @@ FactoryBot.define do end factory :ci_pipeline do - transient { ci_ref_presence { true } } - - before(:create) do |pipeline, evaluator| - pipeline.ensure_ci_ref! if evaluator.ci_ref_presence && pipeline.ci_ref_id.nil? - end - trait :invalid do status { :failed } yaml_errors { 'invalid YAML' } diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb index 78c11210f97..c57a2dd479f 100644 --- a/spec/factories/ci/reports/security/findings.rb +++ b/spec/factories/ci/reports/security/findings.rb @@ -27,6 +27,7 @@ FactoryBot.define do url: "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first" } ], + raw_source_code_extract: 'AES/ECB/NoPadding', evidence: { summary: 'Credit card detected', request: { diff --git a/spec/factories/ci/reports/security/reports.rb b/spec/factories/ci/reports/security/reports.rb index 5699b8fee3e..60d1f4615ac 100644 --- a/spec/factories/ci/reports/security/reports.rb +++ b/spec/factories/ci/reports/security/reports.rb @@ -19,6 +19,19 @@ FactoryBot.define do evaluator.findings.each { |o| report.add_finding(o) } end + factory :dependency_scanning_security_report do + type { :dependency_scanning } + + after :create do |report| + artifact = report.pipeline.job_artifacts.dependency_scanning.last + if artifact.present? + content = File.read(artifact.file.path) + + Gitlab::Ci::Parsers::Security::DependencyScanning.parse!(content, report) + end + end + end + skip_create initialize_with do diff --git a/spec/factories/ci/runner_machine_builds.rb b/spec/factories/ci/runner_machine_builds.rb index 0181def26ba..34238760112 100644 --- a/spec/factories/ci/runner_machine_builds.rb +++ b/spec/factories/ci/runner_machine_builds.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true FactoryBot.define do - factory :ci_runner_machine_build, class: 'Ci::RunnerMachineBuild' do + factory :ci_runner_machine_build, class: 'Ci::RunnerManagerBuild' do build factory: :ci_build, scheduling_type: :dag - runner_machine factory: :ci_runner_machine + runner_manager factory: :ci_runner_machine end end diff --git a/spec/factories/ci/runner_machines.rb b/spec/factories/ci/runner_machines.rb deleted file mode 100644 index 9d601caa634..00000000000 --- a/spec/factories/ci/runner_machines.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -FactoryBot.define do - factory :ci_runner_machine, class: 'Ci::RunnerMachine' do - runner factory: :ci_runner - system_xid { "r_#{SecureRandom.hex.slice(0, 10)}" } - - trait :stale do - created_at { 1.year.ago } - contacted_at { Ci::RunnerMachine::STALE_TIMEOUT.ago } - end - end -end diff --git a/spec/factories/ci/runner_managers.rb b/spec/factories/ci/runner_managers.rb new file mode 100644 index 00000000000..7a2b0c37215 --- /dev/null +++ b/spec/factories/ci/runner_managers.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :ci_runner_machine, class: 'Ci::RunnerManager' do + runner factory: :ci_runner + system_xid { "r_#{SecureRandom.hex.slice(0, 10)}" } + + trait :stale do + created_at { 1.year.ago } + contacted_at { Ci::RunnerManager::STALE_TIMEOUT.ago } + end + end +end diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb index a9a637b4284..f001cecd28e 100644 --- a/spec/factories/ci/runners.rb +++ b/spec/factories/ci/runners.rb @@ -66,9 +66,9 @@ FactoryBot.define do end end - trait :with_runner_machine do + trait :with_runner_manager do after(:build) do |runner, evaluator| - runner.runner_machines << build(:ci_runner_machine, runner: runner) + runner.runner_managers << build(:ci_runner_machine, runner: runner) end end diff --git a/spec/factories/clusters/agents/authorizations/ci_access/group_authorizations.rb b/spec/factories/clusters/agents/authorizations/ci_access/group_authorizations.rb new file mode 100644 index 00000000000..659114eef8e --- /dev/null +++ b/spec/factories/clusters/agents/authorizations/ci_access/group_authorizations.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :agent_ci_access_group_authorization, class: 'Clusters::Agents::Authorizations::CiAccess::GroupAuthorization' do + association :agent, factory: :cluster_agent + group + + transient do + environments { nil } + end + + config do + { default_namespace: 'production' }.tap do |c| + c[:environments] = environments if environments + end + end + end +end diff --git a/spec/factories/clusters/agents/authorizations/ci_access/project_authorizations.rb b/spec/factories/clusters/agents/authorizations/ci_access/project_authorizations.rb new file mode 100644 index 00000000000..10d4f8fb946 --- /dev/null +++ b/spec/factories/clusters/agents/authorizations/ci_access/project_authorizations.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :agent_ci_access_project_authorization, class: 'Clusters::Agents::Authorizations::CiAccess::ProjectAuthorization' do + association :agent, factory: :cluster_agent + project + + transient do + environments { nil } + end + + config do + { default_namespace: 'production' }.tap do |c| + c[:environments] = environments if environments + end + end + end +end diff --git a/spec/factories/clusters/agents/authorizations/user_access/group_authorizations.rb b/spec/factories/clusters/agents/authorizations/user_access/group_authorizations.rb new file mode 100644 index 00000000000..203aadbd741 --- /dev/null +++ b/spec/factories/clusters/agents/authorizations/user_access/group_authorizations.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :agent_user_access_group_authorization, + class: 'Clusters::Agents::Authorizations::UserAccess::GroupAuthorization' do + association :agent, factory: :cluster_agent + config { {} } + group + end +end diff --git a/spec/factories/clusters/agents/authorizations/user_access/project_authorizations.rb b/spec/factories/clusters/agents/authorizations/user_access/project_authorizations.rb new file mode 100644 index 00000000000..8171607f578 --- /dev/null +++ b/spec/factories/clusters/agents/authorizations/user_access/project_authorizations.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :agent_user_access_project_authorization, + class: 'Clusters::Agents::Authorizations::UserAccess::ProjectAuthorization' do + association :agent, factory: :cluster_agent + config { {} } + project + end +end diff --git a/spec/factories/clusters/agents/group_authorizations.rb b/spec/factories/clusters/agents/group_authorizations.rb deleted file mode 100644 index abe25794234..00000000000 --- a/spec/factories/clusters/agents/group_authorizations.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -FactoryBot.define do - factory :agent_group_authorization, class: 'Clusters::Agents::GroupAuthorization' do - association :agent, factory: :cluster_agent - group - - transient do - environments { nil } - end - - config do - { default_namespace: 'production' }.tap do |c| - c[:environments] = environments if environments - end - end - end -end diff --git a/spec/factories/clusters/agents/project_authorizations.rb b/spec/factories/clusters/agents/project_authorizations.rb deleted file mode 100644 index eecbfe95bfc..00000000000 --- a/spec/factories/clusters/agents/project_authorizations.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -FactoryBot.define do - factory :agent_project_authorization, class: 'Clusters::Agents::ProjectAuthorization' do - association :agent, factory: :cluster_agent - project - - transient do - environments { nil } - end - - config do - { default_namespace: 'production' }.tap do |c| - c[:environments] = environments if environments - end - end - end -end diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb deleted file mode 100644 index 99110a9b841..00000000000 --- a/spec/factories/clusters/applications/helm.rb +++ /dev/null @@ -1,115 +0,0 @@ -# frozen_string_literal: true - -FactoryBot.define do - factory :clusters_applications_helm, class: 'Clusters::Applications::Helm' do - cluster factory: %i(cluster provided_by_gcp) - - transient do - helm_installed { true } - end - - before(:create) do |_record, evaluator| - if evaluator.helm_installed - stub_method(Gitlab::Kubernetes::Helm::V2::Certificate, :generate_root) do - OpenStruct.new( # rubocop: disable Style/OpenStructUse - key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')), - cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) - ) - end - end - end - - after(:create) do |_record, evaluator| - if evaluator.helm_installed - restore_original_methods(Gitlab::Kubernetes::Helm::V2::Certificate) - end - end - - trait :not_installable do - status { -2 } - end - - trait :errored do - status { -1 } - status_reason { 'something went wrong' } - end - - trait :installable do - status { 0 } - end - - trait :scheduled do - status { 1 } - end - - trait :installing do - status { 2 } - end - - trait :installed do - status { 3 } - end - - trait :updating do - status { 4 } - end - - trait :updated do - status { 5 } - end - - trait :update_errored do - status { 6 } - status_reason { 'something went wrong' } - end - - trait :uninstalling do - status { 7 } - end - - trait :uninstall_errored do - status { 8 } - status_reason { 'something went wrong' } - end - - trait :uninstalled do - status { 10 } - end - - trait :externally_installed do - status { 11 } - end - - trait :timed_out do - installing - updated_at { ClusterWaitForAppInstallationWorker::TIMEOUT.ago } - end - - # Common trait used by the apps below - trait :no_helm_installed do - cluster factory: %i(cluster provided_by_gcp) - - transient do - helm_installed { false } - end - end - - factory :clusters_applications_ingress, class: 'Clusters::Applications::Ingress' do - cluster factory: %i(cluster with_installed_helm provided_by_gcp) - end - - factory :clusters_applications_runner, class: 'Clusters::Applications::Runner' do - cluster factory: %i(cluster with_installed_helm provided_by_gcp) - end - - factory :clusters_applications_knative, class: 'Clusters::Applications::Knative' do - hostname { 'example.com' } - cluster factory: %i(cluster with_installed_helm provided_by_gcp) - end - - factory :clusters_applications_jupyter, class: 'Clusters::Applications::Jupyter' do - oauth_application factory: :oauth_application - cluster factory: %i(cluster with_installed_helm provided_by_gcp project) - end - end -end diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb index d92ee6dcbe7..2785a8c9946 100644 --- a/spec/factories/clusters/clusters.rb +++ b/spec/factories/clusters/clusters.rb @@ -82,22 +82,10 @@ FactoryBot.define do sequence(:environment_scope) { |n| "production#{n}/*" } end - trait :with_installed_helm do - application_helm factory: %i(clusters_applications_helm installed) - end - trait :with_installed_prometheus do integration_prometheus factory: %i(clusters_integrations_prometheus) end - trait :with_all_applications do - application_helm factory: %i(clusters_applications_helm installed) - application_ingress factory: %i(clusters_applications_ingress installed) - application_runner factory: %i(clusters_applications_runner installed) - application_jupyter factory: %i(clusters_applications_jupyter installed) - application_knative factory: %i(clusters_applications_knative installed) - end - trait :with_domain do domain { 'example.com' } end diff --git a/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb b/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb new file mode 100644 index 00000000000..b71b0971417 --- /dev/null +++ b/spec/factories/gitlab/database/background_migration/schema_inconsistencies.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :schema_inconsistency, class: '::Gitlab::Database::SchemaValidation::SchemaInconsistency' do + issue factory: :issue + + object_name { 'name' } + table_name { 'table' } + valitador_name { 'validator' } + end +end diff --git a/spec/factories/group_members.rb b/spec/factories/group_members.rb index 702db45554e..c8ee52019a4 100644 --- a/spec/factories/group_members.rb +++ b/spec/factories/group_members.rb @@ -30,6 +30,12 @@ FactoryBot.define do after(:build) { |group_member, _| group_member.user.block! } end + trait :banned do + after(:create) do |member| + create(:namespace_ban, namespace: member.member_namespace.root_ancestor, user: member.user) unless member.owner? + end + end + trait :minimal_access do to_create { |instance| instance.save!(validate: false) } diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb index caeac6e3b92..d765e5562b6 100644 --- a/spec/factories/integrations.rb +++ b/spec/factories/integrations.rb @@ -88,6 +88,8 @@ FactoryBot.define do jira_issue_transition_automatic { false } jira_issue_transition_id { '56-1' } issues_enabled { false } + jira_issue_prefix { '' } + jira_issue_regex { '' } project_key { nil } vulnerabilities_enabled { false } vulnerabilities_issuetype { nil } @@ -270,6 +272,7 @@ FactoryBot.define do active { true } type { 'Integrations::GooglePlay' } + package_name { 'com.gitlab.foo.bar' } service_account_key_file_name { 'service_account.json' } service_account_key { File.read('spec/fixtures/service_account.json') } end diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb index 70a4a3ec822..67824a10288 100644 --- a/spec/factories/issues.rb +++ b/spec/factories/issues.rb @@ -66,6 +66,11 @@ FactoryBot.define do end end + trait :requirement do + issue_type { :requirement } + association :work_item_type, :default, :requirement + end + trait :task do issue_type { :task } association :work_item_type, :default, :task @@ -81,6 +86,16 @@ FactoryBot.define do association :work_item_type, :default, :key_result end + trait :incident do + issue_type { :incident } + association :work_item_type, :default, :incident + end + + trait :test_case do + issue_type { :test_case } + association :work_item_type, :default, :test_case + end + factory :incident do issue_type { :incident } association :work_item_type, :default, :incident diff --git a/spec/factories/member_roles.rb b/spec/factories/member_roles.rb deleted file mode 100644 index 503438d2521..00000000000 --- a/spec/factories/member_roles.rb +++ /dev/null @@ -1,11 +0,0 @@ -# frozen_string_literal: true - -FactoryBot.define do - factory :member_role do - namespace { association(:group) } - base_access_level { Gitlab::Access::DEVELOPER } - - trait(:developer) { base_access_level { Gitlab::Access::DEVELOPER } } - trait(:guest) { base_access_level { Gitlab::Access::GUEST } } - end -end diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb index 1b41e39d711..9d049987cfd 100644 --- a/spec/factories/ml/candidates.rb +++ b/spec/factories/ml/candidates.rb @@ -1,9 +1,11 @@ # frozen_string_literal: true FactoryBot.define do factory :ml_candidates, class: '::Ml::Candidate' do - association :experiment, factory: :ml_experiments + association :project, factory: :project association :user + experiment { association :ml_experiments, project_id: project.id } + trait :with_metrics_and_params do after(:create) do |candidate| candidate.metrics = FactoryBot.create_list(:ml_candidate_metrics, 2, candidate: candidate ) @@ -19,10 +21,10 @@ FactoryBot.define do trait :with_artifact do after(:create) do |candidate| - FactoryBot.create(:generic_package, - name: candidate.package_name, - version: candidate.package_version, - project: candidate.project) + candidate.package = FactoryBot.create(:generic_package, + name: candidate.package_name, + version: candidate.package_version, + project: candidate.project) end end end diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb index 2a21bde5436..c58e7bb2e79 100644 --- a/spec/factories/notes.rb +++ b/spec/factories/notes.rb @@ -196,6 +196,10 @@ FactoryBot.define do confidential { true } end + trait :internal do + internal { true } + end + trait :with_review do review end diff --git a/spec/factories/packages/debian/file_metadatum.rb b/spec/factories/packages/debian/file_metadatum.rb index ef6c4e1f222..6b6cd9c51f3 100644 --- a/spec/factories/packages/debian/file_metadatum.rb +++ b/spec/factories/packages/debian/file_metadatum.rb @@ -2,11 +2,18 @@ FactoryBot.define do factory :debian_file_metadatum, class: 'Packages::Debian::FileMetadatum' do - package_file { association(:debian_package_file, without_loaded_metadatum: true) } + package_file do + if file_type == 'unknown' + association(:debian_package_file, :unknown, without_loaded_metadatum: true) + else + association(:debian_package_file, without_loaded_metadatum: true) + end + end + file_type { 'deb' } component { 'main' } architecture { 'amd64' } - fields { { 'a': 'b' } } + fields { { 'a' => 'b' } } trait(:unknown) do file_type { 'unknown' } @@ -32,19 +39,20 @@ FactoryBot.define do 'Source' => package_file.package.name, 'Binary' => 'sample-dev, libsample0, sample-udeb, sample-ddeb', 'Architecture' => 'any', - 'Version': package_file.package.version, + 'Version' => package_file.package.version, 'Maintainer' => "#{FFaker::Name.name} <#{FFaker::Internet.email}>", 'Homepage' => FFaker::Internet.http_url, 'Standards-Version' => '4.5.0', 'Build-Depends' => 'debhelper-compat (= 13)', - 'Package-List' => <<~EOF.rstrip, - libsample0 deb libs optional arch=any', - 'sample-ddeb deb libs optional arch=any', - sample-dev deb libdevel optional arch=any', - sample-udeb udeb libs optional arch=any', - EOF + 'Package-List' => <<~PACKAGELIST.rstrip, + libsample0 deb libs optional arch=any + sample-ddeb deb libs optional arch=any + sample-dev deb libdevel optional arch=any + sample-udeb udeb libs optional arch=any + PACKAGELIST 'Checksums-Sha1' => "\n4a9cb2a7c77a68dc0fe54ba8ecef133a7c949e9d 964 sample_1.2.3~alpha2.tar.xz", - 'Checksums-Sha256' => "\nc9d05185ca158bb804977fa9d7b922e8a0f644a2da41f99d2787dd61b1e2e2c5 964 sample_1.2.3~alpha2.tar.xz", + 'Checksums-Sha256' => + "\nc9d05185ca158bb804977fa9d7b922e8a0f644a2da41f99d2787dd61b1e2e2c5 964 sample_1.2.3~alpha2.tar.xz", 'Files' => "\nadc69e57cda38d9bb7c8d59cacfb6869 964 sample_1.2.3~alpha2.tar.xz" } end @@ -56,22 +64,22 @@ FactoryBot.define do architecture { 'amd64' } fields do { - 'Package' => 'libsample0', - 'Source' => package_file.package.name, - 'Version' => package_file.package.version, - 'Architecture' => 'amd64', - 'Maintainer' => "#{FFaker::Name.name} <#{FFaker::Internet.email}>", - 'Installed-Size' => '7', - 'Section' => 'libs', - 'Priority' => 'optional', - 'Multi-Arch' => 'same', - 'Homepage' => FFaker::Internet.http_url, - 'Description' => <<~EOF.rstrip - Some mostly empty lib - Used in GitLab tests. + 'Package' => 'libsample0', + 'Source' => package_file.package.name, + 'Version' => package_file.package.version, + 'Architecture' => 'amd64', + 'Maintainer' => "#{FFaker::NameCN.name} #{FFaker::Name.name} <#{FFaker::Internet.email}>", + 'Installed-Size' => '7', + 'Section' => 'libs', + 'Priority' => 'optional', + 'Multi-Arch' => 'same', + 'Homepage' => FFaker::Internet.http_url, + 'Description' => <<~DESCRIPTION.rstrip + Some mostly empty lib + Used in GitLab tests. - Testing another paragraph. - EOF + Testing another paragraph. + DESCRIPTION } end end @@ -93,12 +101,12 @@ FactoryBot.define do 'Priority' => 'optional', 'Multi-Arch' => 'same', 'Homepage' => FFaker::Internet.http_url, - 'Description' => <<~EOF.rstrip + 'Description' => <<~DESCRIPTION.rstrip Some mostly empty development files Used in GitLab tests. Testing another paragraph. - EOF + DESCRIPTION } end end @@ -107,28 +115,28 @@ FactoryBot.define do file_type { 'udeb' } component { 'main' } architecture { 'amd64' } - fields { { 'a': 'b' } } + fields { { 'a' => 'b' } } end trait(:ddeb) do file_type { 'ddeb' } component { 'main' } architecture { 'amd64' } - fields { { 'a': 'b' } } + fields { { 'a' => 'b' } } end trait(:buildinfo) do file_type { 'buildinfo' } component { 'main' } architecture { nil } - fields { { 'Architecture': 'amd64 source' } } + fields { { 'Architecture' => 'amd64 source' } } end trait(:changes) do file_type { 'changes' } component { nil } architecture { nil } - fields { { 'Architecture': 'source amd64' } } + fields { { 'Architecture' => 'source amd64' } } end end end diff --git a/spec/factories/packages/npm/metadata_cache.rb b/spec/factories/packages/npm/metadata_cache.rb new file mode 100644 index 00000000000..b06915bcb46 --- /dev/null +++ b/spec/factories/packages/npm/metadata_cache.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :npm_metadata_cache, class: 'Packages::Npm::MetadataCache' do + project + sequence(:package_name) { |n| "@#{project.root_namespace.path}/package-#{n}" } + file { 'unnamed' } + size { 100.kilobytes } + end +end diff --git a/spec/factories/packages/package_files.rb b/spec/factories/packages/package_files.rb index ababa8fa7f5..4a2d412832c 100644 --- a/spec/factories/packages/package_files.rb +++ b/spec/factories/packages/package_files.rb @@ -215,6 +215,7 @@ FactoryBot.define do end trait(:keep) do + # do not override attributes end end diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb index 1d5119638ca..283df3428db 100644 --- a/spec/factories/packages/packages.rb +++ b/spec/factories/packages/packages.rb @@ -78,13 +78,17 @@ FactoryBot.define do after :build do |package, evaluator| if evaluator.published_in == :create - create(:debian_publication, package: package) + build(:debian_publication, package: package) elsif !evaluator.published_in.nil? create(:debian_publication, package: package, distribution: evaluator.published_in) end end after :create do |package, evaluator| + if evaluator.published_in == :create + package.debian_publication.save! + end + unless evaluator.without_package_files create :debian_package_file, :source, evaluator.file_metadatum_trait, package: package create :debian_package_file, :dsc, evaluator.file_metadatum_trait, package: package diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb index 57f228650a1..fb62b2ed951 100644 --- a/spec/factories/project_members.rb +++ b/spec/factories/project_members.rb @@ -26,6 +26,12 @@ FactoryBot.define do after(:build) { |project_member, _| project_member.user.block! } end + trait :banned do + after(:create) do |member| + create(:namespace_ban, namespace: member.member_namespace.root_ancestor, user: member.user) unless member.owner? + end + end + trait :awaiting do after(:create) do |member| member.update!(state: ::Member::STATE_AWAITING) diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb index 299dd165807..c078514514b 100644 --- a/spec/factories/projects.rb +++ b/spec/factories/projects.rb @@ -535,4 +535,11 @@ FactoryBot.define do trait :in_subgroup do namespace factory: [:group, :nested] end + + trait :readme do + custom_repo + + name { 'gitlab-profile' } + files { { 'README.md' => 'Hello World' } } + end end diff --git a/spec/factories/projects/data_transfers.rb b/spec/factories/projects/data_transfers.rb index 4184f475663..3c335c876e4 100644 --- a/spec/factories/projects/data_transfers.rb +++ b/spec/factories/projects/data_transfers.rb @@ -5,5 +5,9 @@ FactoryBot.define do project factory: :project namespace { project.root_namespace } date { Time.current.utc.beginning_of_month } + repository_egress { 1 } + artifacts_egress { 2 } + packages_egress { 3 } + registry_egress { 4 } end end diff --git a/spec/factories/resource_events/issue_assignment_events.rb b/spec/factories/resource_events/issue_assignment_events.rb new file mode 100644 index 00000000000..72319905d0d --- /dev/null +++ b/spec/factories/resource_events/issue_assignment_events.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :issue_assignment_event, class: 'ResourceEvents::IssueAssignmentEvent' do + action { :add } + issue + user + end +end diff --git a/spec/factories/resource_events/merge_request_assignment_events.rb b/spec/factories/resource_events/merge_request_assignment_events.rb new file mode 100644 index 00000000000..6d388543648 --- /dev/null +++ b/spec/factories/resource_events/merge_request_assignment_events.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :merge_request_assignment_event, class: 'ResourceEvents::MergeRequestAssignmentEvent' do + action { :add } + merge_request + user + end +end diff --git a/spec/factories/search_index.rb b/spec/factories/search_index.rb new file mode 100644 index 00000000000..15d7024dbf1 --- /dev/null +++ b/spec/factories/search_index.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :search_index, class: 'Search::Index' do + initialize_with { type.present? ? type.new : Search::Index.new } + sequence(:path) { |n| "index-path-#{n}" } + sequence(:bucket_number) { |n| n } + type { Search::NoteIndex } + end +end diff --git a/spec/factories/service_desk/custom_email_credential.rb b/spec/factories/service_desk/custom_email_credential.rb new file mode 100644 index 00000000000..da131dd8250 --- /dev/null +++ b/spec/factories/service_desk/custom_email_credential.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :service_desk_custom_email_credential, class: '::ServiceDesk::CustomEmailCredential' do + project + smtp_address { "smtp.example.com" } + smtp_username { "text@example.com" } + smtp_port { 587 } + smtp_password { "supersecret" } + end +end diff --git a/spec/factories/users.rb b/spec/factories/users.rb index 10de7bc3b5b..368623b9aff 100644 --- a/spec/factories/users.rb +++ b/spec/factories/users.rb @@ -72,6 +72,10 @@ FactoryBot.define do user_type { :security_bot } end + trait :llm_bot do + user_type { :llm_bot } + end + trait :external do external { true } end diff --git a/spec/factories/work_items/resource_link_events.rb b/spec/factories/work_items/resource_link_events.rb new file mode 100644 index 00000000000..696f6dcc43f --- /dev/null +++ b/spec/factories/work_items/resource_link_events.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :resource_link_event, class: 'WorkItems::ResourceLinkEvent' do + action { :add } + issue { association(:issue) } + user { issue&.author || association(:user) } + child_work_item { association(:work_item, :task) } + end +end diff --git a/spec/features/abuse_report_spec.rb b/spec/features/abuse_report_spec.rb index 474ab4c7b8e..272656fb4ca 100644 --- a/spec/features/abuse_report_spec.rb +++ b/spec/features/abuse_report_spec.rb @@ -124,7 +124,7 @@ RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do private def fill_and_submit_abuse_category_form(category = "They're posting spam.") - click_button 'Report abuse to administrator' + click_button 'Report abuse' choose category click_button 'Next' diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb index 9fe72b981f1..1c43faebd78 100644 --- a/spec/features/admin/admin_abuse_reports_spec.rb +++ b/spec/features/admin/admin_abuse_reports_spec.rb @@ -91,6 +91,51 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :shared do expect(report_rows[1].text).to include(report_text(open_report2)) end + it 'can be actioned on' do + open_actions_dropdown(report_rows[0]) + + expect(page).to have_content('Remove user & report') + expect(page).to have_content('Block user') + expect(page).to have_content('Remove report') + + # Remove a report + click_button('Remove report') + wait_for_requests + + expect_displayed_reports_count(1) + expect_report_shown(open_report) + + # Block reported user + open_actions_dropdown(report_rows[0]) + + click_button('Block user') + expect(page).to have_content('USER WILL BE BLOCKED! Are you sure?') + + click_button('OK') + wait_for_requests + + expect(page).to have_content('Successfully blocked') + expect(open_report.user.reload.blocked?).to eq true + + open_actions_dropdown(report_rows[0]) + + expect(page).to have_content('Already blocked') + expect(page).not_to have_content('Block user') + + # Remove user & report + click_button('Remove user & report') + expect(page).to have_content("USER #{open_report.user.name} WILL BE REMOVED! Are you sure?") + + click_button('OK') + expect_displayed_reports_count(0) + end + + def open_actions_dropdown(report_row) + within(report_row) do + find('.dropdown-toggle').click + end + end + def report_rows page.all(abuse_report_row_selector) end diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb index a07a5c48713..34fe98d22bd 100644 --- a/spec/features/admin/admin_groups_spec.rb +++ b/spec/features/admin/admin_groups_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Admin Groups', feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers include Spec::Support::Helpers::ModalHelpers let(:internal) { Gitlab::VisibilityLevel::INTERNAL } diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb index 23a9ab74a7a..66014e676d5 100644 --- a/spec/features/admin/admin_health_check_spec.rb +++ b/spec/features/admin/admin_health_check_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe "Admin Health Check", :js, feature_category: :continuous_verification do +RSpec.describe "Admin Health Check", :js, feature_category: :error_budgets do include StubENV include Spec::Support::Helpers::ModalHelpers let_it_be(:admin) { create(:admin) } diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb index d6507e68692..34208cca113 100644 --- a/spec/features/admin/admin_hook_logs_spec.rb +++ b/spec/features/admin/admin_hook_logs_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Admin::HookLogs', feature_category: :continuous_verification do +RSpec.describe 'Admin::HookLogs', feature_category: :integrations do let_it_be(:system_hook) { create(:system_hook) } let_it_be(:hook_log) { create(:web_hook_log, web_hook: system_hook, internal_error_message: 'some error') } let_it_be(:admin) { create(:admin) } diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb index 363c152371e..a8aa2680b55 100644 --- a/spec/features/admin/admin_hooks_spec.rb +++ b/spec/features/admin/admin_hooks_spec.rb @@ -106,7 +106,7 @@ RSpec.describe 'Admin::Hooks', feature_category: :integrations do visit admin_hooks_path click_button 'Test' - click_button 'Push events' + click_link 'Push events' end it { expect(page).to have_current_path(admin_hooks_path, ignore_query: true) } @@ -142,7 +142,7 @@ RSpec.describe 'Admin::Hooks', feature_category: :integrations do visit admin_hooks_path click_button 'Test' - click_button 'Merge request events' + click_link 'Merge request events' expect(page).to have_content 'Hook executed successfully' end diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb index 25f77da4401..a64d3f241f6 100644 --- a/spec/features/admin/admin_mode/logout_spec.rb +++ b/spec/features/admin/admin_mode/logout_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do include TermsHelper include UserLoginHelper - include Spec::Support::Helpers::Features::TopNavSpecHelpers + include Features::TopNavSpecHelpers let(:user) { create(:admin) } diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb index 305927663e9..124c43eef9d 100644 --- a/spec/features/admin/admin_mode/workers_spec.rb +++ b/spec/features/admin/admin_mode/workers_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' # Test an operation that triggers background jobs requiring administrative rights RSpec.describe 'Admin mode for workers', :request_store, feature_category: :system_access do - include Spec::Support::Helpers::Features::AdminUsersHelpers + include Features::AdminUsersHelpers let(:user) { create(:user) } let(:user_to_delete) { create(:user) } diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb index 3c47a991fd1..f78f32a15fa 100644 --- a/spec/features/admin/admin_mode_spec.rb +++ b/spec/features/admin/admin_mode_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Admin mode', :js, feature_category: :shared do include MobileHelpers - include Spec::Support::Helpers::Features::TopNavSpecHelpers + include Features::TopNavSpecHelpers include StubENV let(:admin) { create(:admin) } diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb index 405a254dc84..ac2e9de7aee 100644 --- a/spec/features/admin/admin_projects_spec.rb +++ b/spec/features/admin/admin_projects_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe "Admin::Projects", feature_category: :projects do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers include Spec::Support::Helpers::ModalHelpers include ListboxHelpers @@ -186,4 +186,19 @@ RSpec.describe "Admin::Projects", feature_category: :projects do end end end + + describe 'project runner registration edit' do + it 'updates runner registration' do + visit edit_admin_namespace_project_path({ id: project.to_param, namespace_id: project.namespace.to_param }) + + expect(find_field('New project runners can be registered')).to be_checked + + uncheck 'New project runners can be registered' + click_button 'Save changes' + + visit edit_admin_namespace_project_path({ id: project.to_param, namespace_id: project.namespace.to_param }) + + expect(find_field('New project runners can be registered')).not_to be_checked + end + end end diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb index d9867c2e704..5703ab1eaff 100644 --- a/spec/features/admin/admin_runners_spec.rb +++ b/spec/features/admin/admin_runners_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe "Admin Runners", feature_category: :runner_fleet do - include Spec::Support::Helpers::Features::RunnersHelpers + include Features::RunnersHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:admin) { create(:admin) } @@ -371,11 +371,9 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do it_behaves_like 'shows no runners found' - it 'shows active tab' do + it 'shows active tab with no runner' do expect(page).to have_link('Instance', class: 'active') - end - it 'shows no runner' do expect(page).not_to have_content 'runner-project' expect(page).not_to have_content 'runner-group' end @@ -469,10 +467,12 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do it_behaves_like 'shows no runners registered' it 'shows tabs with total counts equal to 0' do - expect(page).to have_link('All 0') - expect(page).to have_link('Instance 0') - expect(page).to have_link('Group 0') - expect(page).to have_link('Project 0') + aggregate_failures do + expect(page).to have_link('All 0') + expect(page).to have_link('Instance 0') + expect(page).to have_link('Group 0') + expect(page).to have_link('Project 0') + end end end @@ -496,21 +496,8 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do visit new_admin_runner_path end - context 'when runner is saved' do - before do - fill_in s_('Runners|Runner description'), with: 'runner-foo' - fill_in s_('Runners|Tags'), with: 'tag1' - click_on _('Submit') - wait_for_requests - end - - it 'navigates to registration page and opens install instructions drawer' do - expect(page.find('[data-testid="alert-success"]')).to have_content(s_('Runners|Runner created.')) - expect(current_url).to match(register_admin_runner_path(Ci::Runner.last)) - - click_on 'How do I install GitLab Runner?' - expect(page.find('[data-testid="runner-platforms-drawer"]')).to have_content('gitlab-runner install') - end + it_behaves_like 'creates runner and shows register page' do + let(:register_path_pattern) { register_admin_runner_path('.*') } end end @@ -567,11 +554,8 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do end end - it 'deletes runner' do + it 'deletes runner and redirects to runner list' do expect(page.find('[data-testid="alert-success"]')).to have_content('deleted') - end - - it 'redirects to runner list' do expect(current_url).to match(admin_runners_path) end end @@ -614,12 +598,9 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do wait_for_requests end - it 'show success alert' do - expect(page.find('[data-testid="alert-success"]')).to have_content('saved') - end - - it 'redirects to runner page' do + it 'show success alert and redirects to runner page' do expect(current_url).to match(admin_runner_path(project_runner)) + expect(page.find('[data-testid="alert-success"]')).to have_content('saved') end end @@ -658,7 +639,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do end context 'with project runner' do - let(:project_runner) { create(:ci_runner, :project, projects: [project1]) } + let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project1]) } before do visit edit_admin_runner_path(project_runner) @@ -668,7 +649,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do end context 'with locked runner' do - let(:locked_runner) { create(:ci_runner, :project, projects: [project1], locked: true) } + let_it_be(:locked_runner) { create(:ci_runner, :project, projects: [project1], locked: true) } before do visit edit_admin_runner_path(locked_runner) @@ -679,7 +660,7 @@ RSpec.describe "Admin Runners", feature_category: :runner_fleet do end describe 'disable/destroy' do - let(:runner) { create(:ci_runner, :project, projects: [project1]) } + let_it_be(:runner) { create(:ci_runner, :project, projects: [project1]) } before do visit edit_admin_runner_path(runner) diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb index 3a1aa36208e..9a0d7ea0848 100644 --- a/spec/features/admin/admin_settings_spec.rb +++ b/spec/features/admin/admin_settings_spec.rb @@ -487,7 +487,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do container_registry_delete_tags_service_timeout: 'Container Registry delete tags service execution timeout', container_registry_expiration_policies_worker_capacity: 'Cleanup policy maximum workers running concurrently', container_registry_cleanup_tags_service_max_list_size: 'Cleanup policy maximum number of tags to be deleted', - container_registry_expiration_policies_caching: 'Enable container expiration caching' + container_registry_expiration_policies_caching: 'Enable cleanup policy caching' } end diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb index 342e23d0cab..0350c8ab066 100644 --- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb +++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Admin > Users > Impersonation Tokens', :js, feature_category: :system_access do include Spec::Support::Helpers::ModalHelpers - include Spec::Support::Helpers::AccessTokenHelpers + include Features::AccessTokenHelpers let(:admin) { create(:admin) } let!(:user) { create(:user) } diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb index 66129617220..403fd49fc65 100644 --- a/spec/features/admin/users/user_spec.rb +++ b/spec/features/admin/users/user_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Admin::Users::User', feature_category: :user_management do - include Spec::Support::Helpers::Features::AdminUsersHelpers + include Features::AdminUsersHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') } diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb index 07db0750074..8e80ce5edd9 100644 --- a/spec/features/admin/users/users_spec.rb +++ b/spec/features/admin/users/users_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Admin::Users', feature_category: :user_management do - include Spec::Support::Helpers::Features::AdminUsersHelpers + include Features::AdminUsersHelpers include Spec::Support::Helpers::ModalHelpers include ListboxHelpers @@ -311,6 +311,40 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do end end + describe 'users pending approval' do + it 'sends a welcome email and a password reset email to the user upon admin approval', :sidekiq_inline do + user = create(:user, :blocked_pending_approval, created_by_id: current_user.id) + + visit admin_users_path + + click_link 'Pending approval' + + click_user_dropdown_toggle(user.id) + + find('[data-testid="approve"]').click + + expect(page).to have_content("Approve user #{user.name}?") + + within_modal do + perform_enqueued_jobs do + click_button 'Approve' + end + end + + expect(page).to have_content('Successfully approved') + + welcome_email = ActionMailer::Base.deliveries.find { |m| m.subject == 'Welcome to GitLab!' } + expect(welcome_email.to).to eq([user.email]) + expect(welcome_email.text_part.body).to have_content('Your GitLab account request has been approved!') + + password_reset_email = ActionMailer::Base.deliveries.find { |m| m.subject == 'Account was created for you' } + expect(password_reset_email.to).to eq([user.email]) + expect(password_reset_email.text_part.body).to have_content('Click here to set your password') + + expect(ActionMailer::Base.deliveries.count).to eq(2) + end + end + describe 'internal users' do context 'when showing a `Ghost User`' do let_it_be(:ghost_user) { create(:user, :ghost) } diff --git a/spec/features/admin_variables_spec.rb b/spec/features/admin_variables_spec.rb index 274e62defd9..744d18a3b6d 100644 --- a/spec/features/admin_variables_spec.rb +++ b/spec/features/admin_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Instance variables', :js, feature_category: :pipeline_composition do +RSpec.describe 'Instance variables', :js, feature_category: :secrets_management do let(:admin) { create(:admin) } let(:page_path) { ci_cd_admin_application_settings_path } diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb index dee63be8119..006b7ce45d4 100644 --- a/spec/features/boards/board_filters_spec.rb +++ b/spec/features/boards/board_filters_spec.rb @@ -50,7 +50,7 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do set_filter('assignee') end - it_behaves_like 'loads all the users when opened' do + it_behaves_like 'loads all the users when opened', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/351426' do let(:issue) { issue_2 } end end diff --git a/spec/features/canonical_link_spec.rb b/spec/features/canonical_link_spec.rb index d8f9a7584e7..0ed76c30ce4 100644 --- a/spec/features/canonical_link_spec.rb +++ b/spec/features/canonical_link_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Canonical link', feature_category: :remote_development do - include Spec::Support::Helpers::Features::CanonicalLinkHelpers + include Features::CanonicalLinkHelpers let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public, namespace: user.namespace) } diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb index e8fb5f4105d..31dec5e38da 100644 --- a/spec/features/clusters/cluster_detail_page_spec.rb +++ b/spec/features/clusters/cluster_detail_page_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Clusterable > Show page', feature_category: :kubernetes_management do +RSpec.describe 'Clusterable > Show page', feature_category: :deployment_management do include KubernetesHelpers let(:current_user) { create(:user) } diff --git a/spec/features/clusters/cluster_health_dashboard_spec.rb b/spec/features/clusters/cluster_health_dashboard_spec.rb index b557f803a99..ebbc184eaef 100644 --- a/spec/features/clusters/cluster_health_dashboard_spec.rb +++ b/spec/features/clusters/cluster_health_dashboard_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Cluster Health board', :js, :kubeclient, :use_clean_rails_memory_store_caching, :sidekiq_inline, -feature_category: :kubernetes_management do +feature_category: :deployment_management do include KubernetesHelpers include PrometheusHelpers diff --git a/spec/features/clusters/create_agent_spec.rb b/spec/features/clusters/create_agent_spec.rb index 01902c36e99..93a49151978 100644 --- a/spec/features/clusters/create_agent_spec.rb +++ b/spec/features/clusters/create_agent_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Cluster agent registration', :js, feature_category: :kubernetes_management do +RSpec.describe 'Cluster agent registration', :js, feature_category: :deployment_management do let_it_be(:project) { create(:project, :custom_repo, files: { '.gitlab/agents/example-agent-1/config.yaml' => '' }) } let_it_be(:current_user) { create(:user, maintainer_projects: [project]) } let_it_be(:token) { Devise.friendly_token } diff --git a/spec/features/commits/user_uses_quick_actions_spec.rb b/spec/features/commits/user_uses_quick_actions_spec.rb index 6d043a0bb2f..c83a30c99c3 100644 --- a/spec/features/commits/user_uses_quick_actions_spec.rb +++ b/spec/features/commits/user_uses_quick_actions_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Commit > User uses quick actions', :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers include RepoHelpers let(:project) { create(:project, :public, :repository) } diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb index eafe74f4b0b..aacba6d2af8 100644 --- a/spec/features/commits_spec.rb +++ b/spec/features/commits_spec.rb @@ -165,10 +165,24 @@ RSpec.describe 'Commits', feature_category: :source_code_management do context 'viewing commits for a branch' do let(:branch_name) { 'master' } + let(:ref_selector) { '.ref-selector' } + let(:ref_with_hash) { 'ref-#-hash' } + + def switch_ref_to(ref_name) + first(ref_selector).click + wait_for_requests + + page.within ref_selector do + fill_in 'Search by Git revision', with: ref_name + wait_for_requests + find('li', text: ref_name, match: :prefer_exact).click + end + end before do project.add_maintainer(user) sign_in(user) + project.repository.create_branch(ref_with_hash, branch_name) visit project_commits_path(project, branch_name) end @@ -180,11 +194,17 @@ RSpec.describe 'Commits', feature_category: :source_code_management do end end + it 'switches ref to ref containing a hash', :js do + switch_ref_to(ref_with_hash) + + expect(page).to have_selector ref_selector, text: ref_with_hash + end + it 'shows the ref switcher with the multi-file editor enabled', :js do set_cookie('new_repo', 'true') visit project_commits_path(project, branch_name) - expect(find('.ref-selector')).to have_content branch_name + expect(find(ref_selector)).to have_content branch_name end end diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb index 2f9b7bb7e0f..2345e4be722 100644 --- a/spec/features/dashboard/activity_spec.rb +++ b/spec/features/dashboard/activity_spec.rb @@ -9,7 +9,7 @@ RSpec.describe 'Dashboard > Activity', feature_category: :user_profile do sign_in(user) end - it_behaves_like 'a dashboard page with sidebar', :activity_dashboard_path, :activity + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :activity_dashboard_path, :activity context 'tabs' do it 'shows Your Activity' do diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb index a5d6ba58ffa..3040c97a16f 100644 --- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb +++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'The group dashboard', :js, feature_category: :subgroups do include ExternalAuthorizationServiceHelpers - include Spec::Support::Helpers::Features::TopNavSpecHelpers + include Features::TopNavSpecHelpers let(:user) { create(:user) } diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb index 1fb393e1769..7112b30957a 100644 --- a/spec/features/dashboard/groups_list_spec.rb +++ b/spec/features/dashboard/groups_list_spec.rb @@ -19,7 +19,7 @@ RSpec.describe 'Dashboard Groups page', :js, feature_category: :subgroups do page.find("[data-testid='group-#{group.id}-dropdown-button'").click end - it_behaves_like 'a dashboard page with sidebar', :dashboard_groups_path, :groups + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_groups_path, :groups it 'shows groups user is member of' do group.add_owner(user) diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb index a7734ed50c2..ee1e704c6c4 100644 --- a/spec/features/dashboard/issues_filter_spec.rb +++ b/spec/features/dashboard/issues_filter_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Dashboard Issues filtering', :js, feature_category: :team_planning do - include Spec::Support::Helpers::Features::SortingHelpers + include Features::SortingHelpers include FilteredSearchHelpers let(:user) { create(:user) } @@ -44,7 +44,7 @@ RSpec.describe 'Dashboard Issues filtering', :js, feature_category: :team_planni it 'updates atom feed link' do visit_issues(milestone_title: '', assignee_username: user.username) - link = find('[data-testid="rss-feed-link"]') + link = find_link('Subscribe to RSS feed') params = CGI.parse(URI.parse(link[:href]).query) auto_discovery_link = find('link[type="application/atom+xml"]', visible: false) auto_discovery_params = CGI.parse(URI.parse(auto_discovery_link[:href]).query) diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb index 654cc9978a7..4499aa021ff 100644 --- a/spec/features/dashboard/issues_spec.rb +++ b/spec/features/dashboard/issues_spec.rb @@ -21,7 +21,7 @@ RSpec.describe 'Dashboard Issues', feature_category: :team_planning do visit issues_dashboard_path(assignee_username: current_user.username) end - it_behaves_like 'a dashboard page with sidebar', :issues_dashboard_path, :issues + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :issues_dashboard_path, :issues describe 'issues' do it 'shows issues assigned to current user' do diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb index 34bab9dffd0..d53f5affe64 100644 --- a/spec/features/dashboard/merge_requests_spec.rb +++ b/spec/features/dashboard/merge_requests_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review_workflow do - include Spec::Support::Helpers::Features::SortingHelpers + include Features::SortingHelpers include FilteredSearchHelpers include ProjectForksHelper @@ -19,7 +19,7 @@ RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review_workfl sign_in(current_user) end - it_behaves_like 'a dashboard page with sidebar', :merge_requests_dashboard_path, :merge_requests + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :merge_requests_dashboard_path, :merge_requests it 'disables target branch filter' do visit merge_requests_dashboard_path diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb index 3b197bbf009..0dd25ffaa94 100644 --- a/spec/features/dashboard/milestones_spec.rb +++ b/spec/features/dashboard/milestones_spec.rb @@ -26,7 +26,7 @@ RSpec.describe 'Dashboard > Milestones', feature_category: :team_planning do visit dashboard_milestones_path end - it_behaves_like 'a dashboard page with sidebar', :dashboard_milestones_path, :milestones + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_milestones_path, :milestones it 'sees milestones' do expect(page).to have_current_path dashboard_milestones_path, ignore_query: true diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb index eafc41c0f40..32bce32ec6c 100644 --- a/spec/features/dashboard/projects_spec.rb +++ b/spec/features/dashboard/projects_spec.rb @@ -18,7 +18,7 @@ RSpec.describe 'Dashboard Projects', feature_category: :projects do end end - it_behaves_like "a dashboard page with sidebar", :dashboard_projects_path, :projects + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_projects_path, :projects it 'links to the "Explore projects" page' do visit dashboard_projects_path @@ -112,6 +112,8 @@ RSpec.describe 'Dashboard Projects', feature_category: :projects do end context 'when on Starred projects tab', :js do + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :starred_dashboard_projects_path, :projects + it 'shows the empty state when there are no starred projects' do visit(starred_dashboard_projects_path) diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb index f4234b433f8..da985c6dc07 100644 --- a/spec/features/dashboard/snippets_spec.rb +++ b/spec/features/dashboard/snippets_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe 'Dashboard snippets', feature_category: :source_code_management do let_it_be(:user) { create(:user) } - it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_snippets_path, :snippets it 'links to the "Explore snippets" page' do sign_in(user) @@ -44,7 +44,8 @@ RSpec.describe 'Dashboard snippets', feature_category: :source_code_management d element = page.find('.row.empty-state') expect(element).to have_content("Code snippets") - expect(element.find('.svg-content img.js-lazy-loaded')['src']).to have_content('illustrations/snippets_empty') + expect(element.find('.svg-content img.js-lazy-loaded')['src']) + .to have_content('illustrations/empty-state/empty-snippets-md') end it 'shows new snippet button in main content area' do diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb index 59bb1a452c9..d0003b69415 100644 --- a/spec/features/dashboard/todos/todos_spec.rb +++ b/spec/features/dashboard/todos/todos_spec.rb @@ -15,7 +15,7 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do project.add_developer(user) end - it_behaves_like 'a dashboard page with sidebar', :dashboard_todos_path, :todos + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_todos_path, :todos context 'User does not have todos' do before do diff --git a/spec/features/emails/issues_spec.rb b/spec/features/emails/issues_spec.rb new file mode 100644 index 00000000000..c425dad88aa --- /dev/null +++ b/spec/features/emails/issues_spec.rb @@ -0,0 +1,110 @@ +# frozen_string_literal: true + +require "spec_helper" + +RSpec.describe "E-Mails > Issues", :js, feature_category: :team_planning do + let_it_be(:project) { create(:project_empty_repo, :public, name: 'Long Earth') } + let_it_be(:author) { create(:user, username: 'author', name: 'Sally Linsay') } + let_it_be(:current_user) { create(:user, username: 'current_user', name: 'Shi-mi') } + + before do + project.add_developer(current_user) + sign_in(current_user) + end + + describe 'assignees' do + let_it_be(:assignee) { create(:user, username: 'assignee', name: 'Joshua Valienté') } + let_it_be(:issue_without_assignee) { create(:issue, project: project, author: author, title: 'No milk today!') } + + let_it_be(:issue_with_assignee) do + create( + :issue, project: project, author: author, assignees: [assignee], + title: 'All your base are belong to us') + end + + it 'sends confirmation e-mail for assigning' do + synchronous_notifications + expect(Notify).to receive(:reassigned_issue_email) + .with(author.id, issue_without_assignee.id, [], current_user.id, nil) + .once + .and_call_original + expect(Notify).to receive(:reassigned_issue_email) + .with(assignee.id, issue_without_assignee.id, [], current_user.id, NotificationReason::ASSIGNED) + .once + .and_call_original + + visit issue_path(issue_without_assignee) + assign_to(assignee) + + expect(find('#notes-list')).to have_text("Shi-mi assigned to @assignee just now") + end + + it 'sends confirmation e-mail for reassigning' do + synchronous_notifications + expect(Notify).to receive(:reassigned_issue_email) + .with(author.id, issue_with_assignee.id, [assignee.id], current_user.id, NotificationReason::ASSIGNED) + .once + .and_call_original + expect(Notify).to receive(:reassigned_issue_email) + .with(assignee.id, issue_with_assignee.id, [assignee.id], current_user.id, nil) + .once + .and_call_original + + visit issue_path(issue_with_assignee) + assign_to(author) + + expect(find('#notes-list')).to have_text("Shi-mi assigned to @author and unassigned @assignee just now") + end + + it 'sends confirmation e-mail for unassigning' do + synchronous_notifications + expect(Notify).to receive(:reassigned_issue_email) + .with(author.id, issue_with_assignee.id, [assignee.id], current_user.id, nil) + .once + .and_call_original + expect(Notify).to receive(:reassigned_issue_email) + .with(assignee.id, issue_with_assignee.id, [assignee.id], current_user.id, nil) + .once + .and_call_original + + visit issue_path(issue_with_assignee) + quick_action('/unassign') + + expect(find('#notes-list')).to have_text("Shi-mi unassigned @assignee just now") + end + end + + describe 'closing' do + let_it_be(:issue) { create(:issue, project: project, author: author, title: 'Public Holiday') } + + it 'sends confirmation e-mail for closing' do + synchronous_notifications + expect(Notify).to receive(:closed_issue_email) + .with(author.id, issue.id, current_user.id, { closed_via: nil, reason: nil }) + .once + .and_call_original + + visit issue_path(issue) + quick_action("/close") + + expect(find('#notes-list')).to have_text("Shi-mi closed just now") + end + end + + private + + def assign_to(user) + quick_action("/assign @#{user.username}") + end + + def quick_action(command) + fill_in 'note[note]', with: command + click_button 'Comment' + end + + def synchronous_notifications + expect_next_instance_of(NotificationService) do |service| + expect(service).to receive(:async).and_return(service) + end + end +end diff --git a/spec/features/explore/user_explores_projects_spec.rb b/spec/features/explore/user_explores_projects_spec.rb index 14fddf5d84c..f259ba6a167 100644 --- a/spec/features/explore/user_explores_projects_spec.rb +++ b/spec/features/explore/user_explores_projects_spec.rb @@ -3,6 +3,18 @@ require 'spec_helper' RSpec.describe 'User explores projects', feature_category: :user_profile do + describe '"All" tab' do + it_behaves_like 'an "Explore" page with sidebar and breadcrumbs', :explore_projects_path, :projects + end + + describe '"Most starred" tab' do + it_behaves_like 'an "Explore" page with sidebar and breadcrumbs', :starred_explore_projects_path, :projects + end + + describe '"Trending" tab' do + it_behaves_like 'an "Explore" page with sidebar and breadcrumbs', :trending_explore_projects_path, :projects + end + context 'when some projects exist' do let_it_be(:archived_project) { create(:project, :archived) } let_it_be(:internal_project) { create(:project, :internal) } diff --git a/spec/features/frequently_visited_projects_and_groups_spec.rb b/spec/features/frequently_visited_projects_and_groups_spec.rb index 19495230795..514b642a2d4 100644 --- a/spec/features/frequently_visited_projects_and_groups_spec.rb +++ b/spec/features/frequently_visited_projects_and_groups_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Frequently visited items', :js, feature_category: :shared do - include Spec::Support::Helpers::Features::TopNavSpecHelpers + include Features::TopNavSpecHelpers let_it_be(:user) { create(:user) } diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb index 15393ec4cd6..f94f0288f99 100644 --- a/spec/features/global_search_spec.rb +++ b/spec/features/global_search_spec.rb @@ -13,64 +13,8 @@ RSpec.describe 'Global search', :js, feature_category: :global_search do sign_in(user) end - describe 'when new_header_search feature is disabled' do + describe 'when header search' do before do - # TODO: Remove this along with feature flag #339348 - stub_feature_flags(new_header_search: false) - visit dashboard_projects_path - end - - it 'increases usage ping searches counter' do - expect(Gitlab::UsageDataCounters::SearchCounter).to receive(:count).with(:navbar_searches) - expect(Gitlab::UsageDataCounters::SearchCounter).to receive(:count).with(:all_searches) - - submit_search('foobar') - end - - describe 'I search through the issues and I see pagination' do - before do - allow_next(SearchService).to receive(:per_page).and_return(1) - create_list(:issue, 2, project: project, title: 'initial') - end - - it "has a pagination" do - submit_search('initial') - select_search_scope('Issues') - - expect(page).to have_selector('.gl-pagination .next') - end - end - - it 'closes the dropdown on blur' do - find('#search').click - fill_in 'search', with: "a" - - expect(page).to have_selector("div[data-testid='dashboard-search-options'].show") - - find('#search').send_keys(:backspace) - find('body').click - - expect(page).to have_no_selector("div[data-testid='dashboard-search-options'].show") - end - - it 'renders legacy search bar' do - expect(page).to have_selector('.search-form') - expect(page).to have_no_selector('#js-header-search') - end - - it 'focuses search input when shortcut "s" is pressed' do - expect(page).not_to have_selector('#search:focus') - - find('body').native.send_key('s') - - expect(page).to have_selector('#search:focus') - end - end - - describe 'when new_header_search feature is enabled' do - before do - # TODO: Remove this along with feature flag #339348 - stub_feature_flags(new_header_search: true) visit dashboard_projects_path end diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb index 8644a15a093..3e87c90e7dc 100644 --- a/spec/features/group_variables_spec.rb +++ b/spec/features/group_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Group variables', :js, feature_category: :pipeline_composition do +RSpec.describe 'Group variables', :js, feature_category: :secrets_management do let(:user) { create(:user) } let(:group) { create(:group) } let!(:variable) { create(:ci_group_variable, key: 'test_key', value: 'test_value', masked: true, group: group) } diff --git a/spec/features/groups/container_registry_spec.rb b/spec/features/groups/container_registry_spec.rb index 11f94967aaf..ab8d8238bdc 100644 --- a/spec/features/groups/container_registry_spec.rb +++ b/spec/features/groups/container_registry_spec.rb @@ -95,7 +95,11 @@ RSpec.describe 'Container Registry', :js, feature_category: :container_registry first('[data-testid="additional-actions"]').click first('[data-testid="single-delete-button"]').click expect(find('.modal .modal-title')).to have_content _('Remove tag') + stub_container_registry_tags(repository: %r{my/image}, tags: [], with_manifest: true) find('.modal .modal-footer .btn-danger').click + + expect(page).to have_content '0 tags' + expect(page).not_to have_content '1 tag' end end end diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb index ae757e04716..514110d78ae 100644 --- a/spec/features/groups/group_runners_spec.rb +++ b/spec/features/groups/group_runners_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe "Group Runners", feature_category: :runner_fleet do - include Spec::Support::Helpers::Features::RunnersHelpers + include Features::RunnersHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:group_owner) { create(:user) } @@ -16,10 +16,12 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do end describe "Group runners page", :js do - let!(:group_registration_token) { group.runners_token } + describe "legacy runners registration" do + let_it_be(:group_registration_token) { group.runners_token } - describe "runners registration" do before do + stub_feature_flags(create_runner_workflow_for_namespace: false) + visit group_runners_path(group) end @@ -60,15 +62,11 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do let(:runner) { group_runner } end - it 'shows a group badge' do - within_runner_row(group_runner.id) do - expect(page).to have_selector '.badge', text: s_('Runners|Group') - end - end - - it 'can edit runner information' do + it 'shows an editable group badge' do within_runner_row(group_runner.id) do expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, group_runner)) + + expect(page).to have_selector '.badge', text: s_('Runners|Group') end end @@ -102,15 +100,11 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do let(:runner) { project_runner } end - it 'shows a project badge' do - within_runner_row(project_runner.id) do - expect(page).to have_selector '.badge', text: s_('Runners|Project') - end - end - - it 'can edit runner information' do + it 'shows an editable project runner' do within_runner_row(project_runner.id) do expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, project_runner)) + + expect(page).to have_selector '.badge', text: s_('Runners|Project') end end end @@ -202,6 +196,16 @@ RSpec.describe "Group Runners", feature_category: :runner_fleet do end end + describe "Group runner create page", :js do + before do + visit new_group_runner_path(group) + end + + it_behaves_like 'creates runner and shows register page' do + let(:register_path_pattern) { register_group_runner_path(group, '.*') } + end + end + describe "Group runner show page", :js do let_it_be(:group_runner) do create(:ci_runner, :group, groups: [group], description: 'runner-foo') diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb index 2aa70ec1953..bb61431d773 100644 --- a/spec/features/groups/group_settings_spec.rb +++ b/spec/features/groups/group_settings_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe 'Edit group settings', feature_category: :subgroups do + include Spec::Support::Helpers::ModalHelpers + let(:user) { create(:user) } let(:group) { create(:group, path: 'foo') } @@ -148,13 +150,15 @@ RSpec.describe 'Edit group settings', feature_category: :subgroups do end it 'can successfully transfer the group' do + selected_group_path = selected_group.path + visit edit_group_path(selected_group) page.within('[data-testid="transfer-locations-dropdown"]') do click_button _('Select parent group') - fill_in _('Search'), with: target_group_name + fill_in _('Search'), with: target_group&.name || '' wait_for_requests - click_button(target_group_name || 'No parent group') + click_button(target_group&.name || 'No parent group') end click_button s_('GroupSettings|Transfer group') @@ -167,10 +171,15 @@ RSpec.describe 'Edit group settings', feature_category: :subgroups do end within('[data-testid="breadcrumb-links"]') do - expect(page).to have_content(target_group_name) if target_group_name + expect(page).to have_content(target_group.name) if target_group expect(page).to have_content(selected_group.name) end - expect(current_url).to include(selected_group.reload.full_path) + + if target_group + expect(current_url).to include("#{target_group.path}/#{selected_group_path}") + else + expect(current_url).to include(selected_group_path) + end end end @@ -178,14 +187,13 @@ RSpec.describe 'Edit group settings', feature_category: :subgroups do let(:selected_group) { create(:group, path: 'foo-subgroup', parent: group) } context 'when transfering to no parent group' do - let(:target_group_name) { nil } + let(:target_group) { nil } it_behaves_like 'can transfer the group' end context 'when transfering to a parent group' do let(:target_group) { create(:group, path: 'foo-parentgroup') } - let(:target_group_name) { target_group.name } before do target_group.add_owner(user) @@ -197,7 +205,7 @@ RSpec.describe 'Edit group settings', feature_category: :subgroups do context 'when transfering from a root group to a parent group' do let(:selected_group) { create(:group, path: 'foo-rootgroup') } - let(:target_group_name) { group.name } + let(:target_group) { group } it_behaves_like 'can transfer the group' end @@ -238,6 +246,67 @@ RSpec.describe 'Edit group settings', feature_category: :subgroups do end end + describe 'group README', :js do + let_it_be(:group) { create(:group) } + + context 'with gitlab-profile project and README.md' do + let_it_be(:project) { create(:project, :readme, namespace: group) } + + it 'renders link to Group README and navigates to it on click' do + visit edit_group_path(group) + wait_for_requests + + click_link('README') + wait_for_requests + + expect(page).to have_current_path(project_blob_path(project, "#{project.default_branch}/README.md")) + expect(page).to have_text('README.md') + end + end + + context 'with gitlab-profile project and no README.md' do + let_it_be(:project) { create(:project, name: 'gitlab-profile', namespace: group) } + + it 'renders Add README button and allows user to create a README via the IDE' do + visit edit_group_path(group) + wait_for_requests + + expect(page).not_to have_selector('.ide') + + click_button('Add README') + + accept_gl_confirm("This will create a README.md for project #{group.readme_project.present.path_with_namespace}.", button_text: 'Add README') + wait_for_requests + + expect(page).to have_current_path("/-/ide/project/#{group.readme_project.present.path_with_namespace}/edit/main/-/README.md/") + + page.within('.ide') do + expect(page).to have_text('README.md') + end + end + end + + context 'with no gitlab-profile project and no README.md' do + it 'renders Add README button and allows user to create both the gitlab-profile project and README via the IDE' do + visit edit_group_path(group) + wait_for_requests + + expect(page).not_to have_selector('.ide') + + click_button('Add README') + + accept_gl_confirm("This will create a project #{group.full_path}/gitlab-profile and add a README.md.", button_text: 'Create and add README') + wait_for_requests + + expect(page).to have_current_path("/-/ide/project/#{group.full_path}/gitlab-profile/edit/main/-/README.md/") + + page.within('.ide') do + expect(page).to have_text('README.md') + end + end + end + end + def update_path(new_group_path) visit edit_group_path(group) diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb index 00c0d4c3ebe..9f6fa146972 100644 --- a/spec/features/groups/issues_spec.rb +++ b/spec/features/groups/issues_spec.rb @@ -30,29 +30,15 @@ RSpec.describe 'Group issues page', feature_category: :subgroups do user_in_group end + it_behaves_like "it has an RSS link with current_user's feed token" it_behaves_like "an autodiscoverable RSS feed with current_user's feed token" - - # Note: The one from rss_shared_example.rb uses a css pseudo-class `:has` - # which is VERY experimental and only supported in Nokogiri used by Capybara - # However,`:js` option forces Capybara to use Selenium that doesn't support`:has` - context "it has an RSS button with current_user's feed token" do - it "shows the RSS button with current_user's feed token" do - expect(page).to have_link 'Subscribe to RSS feed', href: /feed_token=#{user.feed_token}/ - end - end end context 'when signed out' do let(:user) { nil } + it_behaves_like "it has an RSS link without a feed token" it_behaves_like "an autodiscoverable RSS feed without a feed token" - - # Note: please see the above - context "it has an RSS button without a feed token" do - it "shows the RSS button without a feed token" do - expect(page).not_to have_link 'Subscribe to RSS feed', href: /feed_token/ - end - end end end diff --git a/spec/features/groups/members/filter_members_spec.rb b/spec/features/groups/members/filter_members_spec.rb index dc33bb11bea..c2ec709576b 100644 --- a/spec/features/groups/members/filter_members_spec.rb +++ b/spec/features/groups/members/filter_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Filter members', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user) { create(:user) } let(:nested_group_user) { create(:user) } diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb index cfb1b24bccb..e1c2d8c0547 100644 --- a/spec/features/groups/members/leave_group_spec.rb +++ b/spec/features/groups/members/leave_group_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Leave group', feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers include Spec::Support::Helpers::ModalHelpers let(:user) { create(:user) } diff --git a/spec/features/groups/members/list_members_spec.rb b/spec/features/groups/members/list_members_spec.rb index 1aea5a76b41..6e20f92c16b 100644 --- a/spec/features/groups/members/list_members_spec.rb +++ b/spec/features/groups/members/list_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Groups > Members > List members', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user1) { create(:user, name: 'John Doe') } let(:user2) { create(:user, name: 'Mary Jane') } diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb index ee8786a2e36..f9c11dd0183 100644 --- a/spec/features/groups/members/manage_groups_spec.rb +++ b/spec/features/groups/members/manage_groups_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Manage groups', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:user) { create(:user) } diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb index 5cd5908b359..2d5a3dbb8f8 100644 --- a/spec/features/groups/members/manage_members_spec.rb +++ b/spec/features/groups/members/manage_members_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Manage members', feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:user1) { create(:user, name: 'John Doe') } diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb index e9f80b05fa7..4f56c807ec8 100644 --- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb +++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers let_it_be(:user1) { create(:user, name: 'John Doe') } let_it_be(:group) { create(:group) } diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb index 6b2896b194c..80de1cabd1e 100644 --- a/spec/features/groups/members/search_members_spec.rb +++ b/spec/features/groups/members/search_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Search group member', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user) { create :user } let(:member) { create :user } diff --git a/spec/features/groups/members/sort_members_spec.rb b/spec/features/groups/members/sort_members_spec.rb index fa5a14f18b4..d2e5445deae 100644 --- a/spec/features/groups/members/sort_members_spec.rb +++ b/spec/features/groups/members/sort_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:owner) { create(:user, name: 'John Doe', created_at: 5.days.ago, last_activity_on: Date.today) } let(:developer) { create(:user, name: 'Mary Jane', created_at: 1.day.ago, last_sign_in_at: 5.days.ago, last_activity_on: Date.today - 5) } diff --git a/spec/features/groups/new_group_page_spec.rb b/spec/features/groups/new_group_page_spec.rb index 6d9513ce84f..1efdc3fff07 100644 --- a/spec/features/groups/new_group_page_spec.rb +++ b/spec/features/groups/new_group_page_spec.rb @@ -36,7 +36,7 @@ RSpec.describe 'New group page', :js, feature_category: :subgroups do end context 'for a new top-level group' do - it_behaves_like 'a dashboard page with sidebar', :new_group_path, :groups + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :new_group_path, :groups end context 'for a new subgroup' do diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb index a6c980f539c..8ea8dc9219a 100644 --- a/spec/features/groups/settings/packages_and_registries_spec.rb +++ b/spec/features/groups/settings/packages_and_registries_spec.rb @@ -61,7 +61,8 @@ RSpec.describe 'Group Package and registry settings', feature_category: :package wait_for_requests - expect(page).to be_axe_clean.within '[data-testid="packages-and-registries-group-settings"]' + expect(page).to be_axe_clean.within('[data-testid="packages-and-registries-group-settings"]') + .skipping :'link-in-text-block' end it 'has a Duplicate packages section', :js do diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb index 5cab79b40cf..0f936173e5d 100644 --- a/spec/features/groups/show_spec.rb +++ b/spec/features/groups/show_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Group show page', feature_category: :subgroups do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb index 8806d1c2219..e91e6673498 100644 --- a/spec/features/groups_spec.rb +++ b/spec/features/groups_spec.rb @@ -512,6 +512,20 @@ RSpec.describe 'Group', feature_category: :subgroups do end end + describe 'group README', :js do + context 'with gitlab-profile project and README.md' do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :readme, namespace: group) } + + it 'renders README block on group page' do + visit group_path(group) + wait_for_requests + + expect(page).to have_text('README.md') + end + end + end + def remove_with_confirm(button_text, confirm_with) click_button button_text fill_in 'confirm_name_input', with: confirm_with diff --git a/spec/features/ide_spec.rb b/spec/features/ide_spec.rb index 2ca8d3f7156..615f2a30b34 100644 --- a/spec/features/ide_spec.rb +++ b/spec/features/ide_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'IDE', :js, feature_category: :web_ide do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers let_it_be(:ide_iframe_selector) { '#ide iframe' } let_it_be(:normal_project) { create(:project, :repository) } diff --git a/spec/features/incidents/incident_timeline_events_spec.rb b/spec/features/incidents/incident_timeline_events_spec.rb index a4449ee2608..4d51ed652c9 100644 --- a/spec/features/incidents/incident_timeline_events_spec.rb +++ b/spec/features/incidents/incident_timeline_events_spec.rb @@ -43,9 +43,7 @@ RSpec.describe 'Incident timeline events', :js, feature_category: :incident_mana expect(page).to have_content(s_('Incident|No timeline items have been added yet.')) end - it 'submits event data on save with feature flag on' do - stub_feature_flags(incident_event_tags: true) - + it 'submits event data on save' do # Add event click_button(s_('Incident|Add new timeline event')) diff --git a/spec/features/incidents/user_uses_quick_actions_spec.rb b/spec/features/incidents/user_uses_quick_actions_spec.rb index 3740f2fca47..27facbcafe8 100644 --- a/spec/features/incidents/user_uses_quick_actions_spec.rb +++ b/spec/features/incidents/user_uses_quick_actions_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Incidents > User uses quick actions', :js, feature_category: :incident_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers describe 'incident-only commands' do let_it_be(:user) { create(:user) } diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb index cb7e933e472..a4cb8a37e93 100644 --- a/spec/features/invites_spec.rb +++ b/spec/features/invites_spec.rb @@ -151,7 +151,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate end end - context 'when inviting an unregistered user' do + context 'when inviting an unregistered user', :js do let(:new_user) { build_stubbed(:user) } let(:invite_email) { new_user.email } let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) } @@ -208,7 +208,7 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate context 'email confirmation enabled' do context 'when user is not valid in sign up form' do - let(:new_user) { build_stubbed(:user, first_name: '', last_name: '') } + let(:new_user) { build_stubbed(:user, password: '11111111') } it 'fails sign up and redirects back to sign up', :aggregate_failures do expect { fill_in_sign_up_form(new_user) }.not_to change { User.count } diff --git a/spec/features/issuables/markdown_references/internal_references_spec.rb b/spec/features/issuables/markdown_references/internal_references_spec.rb index aeae76b1b77..04950c7c7d4 100644 --- a/spec/features/issuables/markdown_references/internal_references_spec.rb +++ b/spec/features/issuables/markdown_references/internal_references_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe "Internal references", :js, feature_category: :team_planning do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers let(:private_project_user) { private_project.first_owner } let(:private_project) { create(:project, :private, :repository) } diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb index 0bdb5930f30..c982052fc0e 100644 --- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb +++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb @@ -20,6 +20,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j before do stub_feature_flags(moved_mr_sidebar: false) + stub_feature_flags(hide_create_issue_resolve_all: false) end describe 'as a user with access to the project' do @@ -33,7 +34,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j find('.discussions-counter .dropdown-toggle').click within('.discussions-counter') do - expect(page).to have_link(_("Create issue to resolve all threads"), href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)) + expect(page).to have_link(_("Resolve all with new issue"), href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)) end end @@ -44,7 +45,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j it 'hides the link for creating a new issue' do expect(page).not_to have_selector resolve_all_discussions_link_selector - expect(page).not_to have_content "Create issue to resolve all threads" + expect(page).not_to have_content "Resolve all with new issue" end end @@ -69,7 +70,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j end it 'does not show a link to create a new issue' do - expect(page).not_to have_link 'Create issue to resolve all threads' + expect(page).not_to have_link 'Resolve all with new issue' end end @@ -83,13 +84,13 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j end it 'has a link to resolve all threads by creating an issue' do - expect(page).to have_link 'Create issue to resolve all threads', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid) + expect(page).to have_link 'Resolve all with new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid) end context 'creating an issue for threads' do before do page.within '.mr-state-widget' do - page.click_link 'Create issue to resolve all threads', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid) + page.click_link 'Resolve all with new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid) wait_for_all_requests end diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb index fa5dd8c893c..a9605b214bd 100644 --- a/spec/features/issues/form_spec.rb +++ b/spec/features/issues/form_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do include ActionView::Helpers::JavaScriptHelper + include ListboxHelpers let_it_be(:project) { create(:project, :repository) } let_it_be(:user) { create(:user) } @@ -249,19 +250,15 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do describe 'displays issue type options in the dropdown' do shared_examples 'type option is visible' do |label:, identifier:| it "shows #{identifier} option", :aggregate_failures do - page.within('[data-testid="issue-type-select-dropdown"]') do - expect(page).to have_selector(%([data-testid="issue-type-#{identifier}-icon"])) - expect(page).to have_content(label) - end + wait_for_requests + expect_listbox_item(label) end end shared_examples 'type option is missing' do |label:, identifier:| it "does not show #{identifier} option", :aggregate_failures do - page.within('[data-testid="issue-type-select-dropdown"]') do - expect(page).not_to have_selector(%([data-testid="issue-type-#{identifier}-icon"])) - expect(page).not_to have_content(label) - end + wait_for_requests + expect_no_listbox_item(label) end end @@ -504,7 +501,7 @@ RSpec.describe 'New/edit issue', :js, feature_category: :team_planning do end describe 'when user has made changes' do - it 'shows a warning and can stay on page' do + it 'shows a warning and can stay on page', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397683' do content = 'new issue content' find('body').send_keys('e') diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb index 41bbd79202f..145b51d207a 100644 --- a/spec/features/issues/incident_issue_spec.rb +++ b/spec/features/issues/incident_issue_spec.rb @@ -51,8 +51,8 @@ RSpec.describe 'Incident Detail', :js, feature_category: :team_planning do aggregate_failures 'when on summary tab (default tab)' do hidden_items = find_all('.js-issue-widgets') - # Linked Issues/MRs and comment box and emoji block - expect(hidden_items.count).to eq(3) + # Description footer + Linked Issues/MRs + comment box + emoji block + expect(hidden_items.count).to eq(4) expect(hidden_items).to all(be_visible) edit_button = find_all('[aria-label="Edit title and description"]') diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb index 95277caf0f5..2ae347d4f9e 100644 --- a/spec/features/issues/issue_sidebar_spec.rb +++ b/spec/features/issues/issue_sidebar_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Issue Sidebar', feature_category: :team_planning do include MobileHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:group) { create(:group, :nested) } let_it_be(:project) { create(:project, :public, namespace: group) } @@ -86,12 +86,12 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do end within '.js-right-sidebar' do - find('.block.assignee').click(x: 0, y: 0) + find('.block.assignee').click(x: 0, y: 0, offset: 0) find('.block.assignee .edit-link').click end - expect(page.all('.dropdown-menu-user li').length).to eq(1) - expect(find('.dropdown-input-field').value).to eq(user2.name) + expect(page.all('.dropdown-menu-user li').length).to eq(6) + expect(find('.dropdown-input-field').value).to eq('') end it 'shows label text as "Apply" when assignees are changed' do diff --git a/spec/features/issues/move_spec.rb b/spec/features/issues/move_spec.rb index e2329e5e287..4512e88ae72 100644 --- a/spec/features/issues/move_spec.rb +++ b/spec/features/issues/move_spec.rb @@ -106,8 +106,8 @@ RSpec.describe 'issue move to another project', feature_category: :team_planning let(:service_desk_issue) { create(:issue, project: service_desk_project, author: ::User.support_bot) } before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?).and_return(true) regular_project.add_reporter(user) service_desk_project.add_reporter(user) diff --git a/spec/features/issues/rss_spec.rb b/spec/features/issues/rss_spec.rb index 36dffeded50..75e7cd03a65 100644 --- a/spec/features/issues/rss_spec.rb +++ b/spec/features/issues/rss_spec.rb @@ -25,10 +25,7 @@ RSpec.describe 'Project Issues RSS', :js, feature_category: :team_planning do visit path end - it "shows the RSS button with current_user's feed token" do - expect(page).to have_link 'Subscribe to RSS feed', href: /feed_token=#{user.feed_token}/ - end - + it_behaves_like "it has an RSS link with current_user's feed token" it_behaves_like "an autodiscoverable RSS feed with current_user's feed token" end @@ -37,10 +34,7 @@ RSpec.describe 'Project Issues RSS', :js, feature_category: :team_planning do visit path end - it "shows the RSS button without a feed token" do - expect(page).not_to have_link 'Subscribe to RSS feed', href: /feed_token/ - end - + it_behaves_like "it has an RSS link without a feed token" it_behaves_like "an autodiscoverable RSS feed without a feed token" end diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb index 922ab95538b..0cadeb62fa2 100644 --- a/spec/features/issues/service_desk_spec.rb +++ b/spec/features/issues/service_desk_spec.rb @@ -10,8 +10,8 @@ RSpec.describe 'Service Desk Issue Tracker', :js, feature_category: :team_planni before do # The following two conditions equate to Gitlab::ServiceDesk.supported == true - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?).and_return(true) project.add_maintainer(user) sign_in(user) diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb index 145fa3c4a9e..3ace560fb40 100644 --- a/spec/features/issues/user_comments_on_issue_spec.rb +++ b/spec/features/issues/user_comments_on_issue_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe "User comments on issue", :js, feature_category: :team_planning do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers let_it_be(:project) { create(:project, :public) } let_it_be(:issue) { create(:issue, project: project) } diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb index 6325f226ccf..6d9eb3a7191 100644 --- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb +++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb @@ -73,8 +73,8 @@ RSpec.describe 'User creates branch and merge request on issue page', :js, featu expect(page).to have_content('New merge request') expect(page).to have_content("From #{issue.to_branch_name} into #{project.default_branch}") - expect(page).to have_content("Closes ##{issue.iid}") expect(page).to have_field("Title", with: "Draft: Resolve \"Cherry-Coloured Funk\"") + expect(page).to have_field("Description", with: "Closes ##{issue.iid}") expect(page).to have_current_path(project_new_merge_request_path(project, merge_request: { source_branch: issue.to_branch_name, target_branch: project.default_branch, issue_iid: issue.iid })) end end @@ -98,8 +98,8 @@ RSpec.describe 'User creates branch and merge request on issue page', :js, featu expect(page).to have_content('New merge request') expect(page).to have_content("From #{branch_name} into #{project.default_branch}") - expect(page).to have_content("Closes ##{issue.iid}") expect(page).to have_field("Title", with: "Draft: Resolve \"Cherry-Coloured Funk\"") + expect(page).to have_field("Description", with: "Closes ##{issue.iid}") expect(page).to have_current_path(project_new_merge_request_path(project, merge_request: { source_branch: branch_name, target_branch: project.default_branch, issue_iid: issue.iid })) end end diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb index c5d0791dc57..4e3968230b4 100644 --- a/spec/features/issues/user_creates_issue_spec.rb +++ b/spec/features/issues/user_creates_issue_spec.rb @@ -59,22 +59,22 @@ RSpec.describe "User creates issue", feature_category: :team_planning do textarea = first(".gfm-form textarea") page.within(form) do - click_button("Preview") + click_link("Preview") - preview = find(".js-md-preview") # this element is findable only when the "Preview" link is clicked. + preview = find(".js-vue-md-preview") # this element is findable only when the "Preview" link is clicked. expect(preview).to have_content("Nothing to preview.") - click_button("Write") + click_link("Write") fill_in("Description", with: "Bug fixed :smile:") - click_button("Preview") + click_link("Preview") expect(preview).to have_css("gl-emoji") expect(textarea).not_to be_visible - click_button("Write") + click_link("Write") fill_in("Description", with: "/confidential") - click_button("Preview") + click_link("Preview") expect(form).to have_content('Makes this issue confidential.') end @@ -127,6 +127,8 @@ RSpec.describe "User creates issue", feature_category: :team_planning do end end + it_behaves_like 'edits content using the content editor' + context 'dropzone upload file', :js do before do visit new_project_issue_path(project) @@ -184,7 +186,7 @@ RSpec.describe "User creates issue", feature_category: :team_planning do end it 'pre-fills the issue type dropdown with issue type' do - expect(find('.js-issuable-type-filter-dropdown-wrap .dropdown-toggle-text')).to have_content('Issue') + expect(find('.js-issuable-type-filter-dropdown-wrap .gl-button-text')).to have_content('Issue') end it 'does not hide the milestone select' do @@ -200,7 +202,7 @@ RSpec.describe "User creates issue", feature_category: :team_planning do end it 'does not pre-fill the issue type dropdown with incident type' do - expect(find('.js-issuable-type-filter-dropdown-wrap .dropdown-toggle-text')).not_to have_content('Incident') + expect(find('.js-issuable-type-filter-dropdown-wrap .gl-button-text')).not_to have_content('Incident') end it 'shows the milestone select' do @@ -257,7 +259,7 @@ RSpec.describe "User creates issue", feature_category: :team_planning do end it 'pre-fills the issue type dropdown with incident type' do - expect(find('.js-issuable-type-filter-dropdown-wrap .dropdown-toggle-text')).to have_content('Incident') + expect(find('.js-issuable-type-filter-dropdown-wrap .gl-button-text')).to have_content('Incident') end it 'hides the epic select' do diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb index 06c1b2afdb0..c6cedbc83cd 100644 --- a/spec/features/issues/user_edits_issue_spec.rb +++ b/spec/features/issues/user_edits_issue_spec.rb @@ -26,6 +26,8 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin visit edit_project_issue_path(project, issue) end + it_behaves_like 'edits content using the content editor' + it "previews content", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/391757' do form = first(".gfm-form") @@ -116,7 +118,7 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin expect(issuable_form).to have_selector(markdown_field_focused_selector) page.within issuable_form do - click_on _('Viewing markdown') + click_on _('Editing markdown') click_on _('Rich text') end @@ -129,7 +131,7 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin expect(issuable_form).to have_selector(content_editor_focused_selector) page.within issuable_form do - click_on _('Viewing rich text') + click_on _('Editing rich text') click_on _('Markdown') end diff --git a/spec/features/issues/user_uses_quick_actions_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb index 963f1c56fef..e85a521e242 100644 --- a/spec/features/issues/user_uses_quick_actions_spec.rb +++ b/spec/features/issues/user_uses_quick_actions_spec.rb @@ -8,7 +8,7 @@ require 'spec_helper' # Because this kind of spec takes more time to run there is no need to add new ones # for each existing quick action unless they test something not tested by existing tests. RSpec.describe 'Issues > User uses quick actions', :js, feature_category: :team_planning do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers context "issuable common quick actions" do let(:new_url_opts) { {} } diff --git a/spec/features/jira_connect/branches_spec.rb b/spec/features/jira_connect/branches_spec.rb index c90c0d2dda9..25dc14a1dc9 100644 --- a/spec/features/jira_connect/branches_spec.rb +++ b/spec/features/jira_connect/branches_spec.rb @@ -75,7 +75,7 @@ RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integ select_listbox_item(source_branch) fill_in 'Branch name', with: new_branch - click_on 'Create branch' + click_button 'Create branch' expect(page).to have_text('New branch was successfully created. You can now close this window and return to Jira.') diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb index b5e42b16f87..45b5d2f78e8 100644 --- a/spec/features/markdown/metrics_spec.rb +++ b/spec/features/markdown/metrics_spec.rb @@ -120,7 +120,7 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st allow(Grafana::ProxyService).to receive(:new).and_call_original end - it 'shows embedded metrics' do + it 'shows embedded metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/402973' do visit project_issue_path(project, issue) expect(page).to have_css('div.prometheus-graph') diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb index b8dc3af8a6a..c9aa22e396b 100644 --- a/spec/features/merge_request/maintainer_edits_fork_spec.rb +++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork', :js, :sidekiq_might_not_need_inline, feature_category: :code_review_workflow do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers include ProjectForksHelper let(:user) { create(:user, username: 'the-maintainer') } let(:target_project) { create(:project, :public, :repository) } diff --git a/spec/features/merge_request/user_accepts_merge_request_spec.rb b/spec/features/merge_request/user_accepts_merge_request_spec.rb index 8ff0c294b24..e3989a8a192 100644 --- a/spec/features/merge_request/user_accepts_merge_request_spec.rb +++ b/spec/features/merge_request/user_accepts_merge_request_spec.rb @@ -16,7 +16,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli it 'when merge method is set to merge commit' do visit(merge_request_path(merge_request)) - click_button('Merge') + click_merge_button puts merge_request.short_merged_commit_sha @@ -31,7 +31,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli visit(merge_request_path(merge_request)) - click_button('Merge') + click_merge_button expect(page).to have_content("Changes merged into #{merge_request.target_branch} with #{merge_request.short_merged_commit_sha}") end @@ -41,7 +41,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli visit(merge_request_path(merge_request)) - click_button('Merge') + click_merge_button expect(page).to have_content("Changes merged into #{merge_request.target_branch} with #{merge_request.short_merged_commit_sha}") end @@ -55,7 +55,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli it 'accepts a merge request' do check('Delete source branch') - click_button('Merge') + click_merge_button expect(page).to have_content('Changes merged into') expect(page).not_to have_selector('.js-remove-branch-button') @@ -72,7 +72,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli end it 'accepts a merge request' do - click_button('Merge') + click_merge_button expect(page).to have_content('Changes merged into') expect(page).to have_selector('.js-remove-branch-button') @@ -90,7 +90,7 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli it 'accepts a merge request' do check('Delete source branch') - click_button('Merge') + click_merge_button expect(page).to have_content('Changes merged into') expect(page).not_to have_selector('.js-remove-branch-button') @@ -112,9 +112,15 @@ RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inli find('[data-testid="widget_edit_commit_message"]').click fill_in('merge-message-edit', with: 'wow such merge') - click_button('Merge') + click_merge_button expect(page).to have_selector('.gl-badge', text: 'Merged') end end + + def click_merge_button + page.within('.mr-state-widget') do + click_button 'Merge' + end + end end diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb index becbf0ccfa7..faef4f6f517 100644 --- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb +++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_category: :code_review_workflow do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let(:project) { create(:project, :public, :repository) } let(:protected_branch) { create(:protected_branch, :maintainers_can_push, name: 'master', project: project) } diff --git a/spec/features/merge_request/user_edits_merge_request_spec.rb b/spec/features/merge_request/user_edits_merge_request_spec.rb index 839081d00dc..584a17ae33d 100644 --- a/spec/features/merge_request/user_edits_merge_request_spec.rb +++ b/spec/features/merge_request/user_edits_merge_request_spec.rb @@ -108,4 +108,6 @@ RSpec.describe 'User edits a merge request', :js, feature_category: :code_review end end end + + it_behaves_like 'edits content using the content editor' end diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb index cdc00017ab3..19b5ad0fa84 100644 --- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb +++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb @@ -13,11 +13,29 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea context 'project does not have CI enabled' do it 'allows MR to be merged' do + stub_feature_flags(auto_merge_labels_mr_widget: false) + visit project_merge_request_path(project, merge_request) wait_for_requests - expect(page).to have_button 'Merge' + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end + end + end + + context 'project does not have CI enabled and auto_merge_labels_mr_widget on' do + it 'allows MR to be merged' do + stub_feature_flags(auto_merge_labels_mr_widget: true) + + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end end end @@ -33,6 +51,8 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea context 'when merge requests can only be merged if the pipeline succeeds' do before do project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true) + + stub_feature_flags(auto_merge_labels_mr_widget: false) end context 'when CI is running' do @@ -56,7 +76,78 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea wait_for_requests - expect(page).not_to have_button('Merge') + expect(page).not_to have_button('Merge', exact: true) + expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.') + end + end + + context 'when CI canceled' do + let(:status) { :canceled } + + it 'does not allow MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).not_to have_button('Merge', exact: true) + expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.') + end + end + + context 'when CI succeeded' do + let(:status) { :success } + + it 'allows MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).to have_button('Merge', exact: true) + end + end + + context 'when CI skipped' do + let(:status) { :skipped } + + it 'does not allow MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).not_to have_button('Merge', exact: true) + end + end + end + + context 'when merge requests can only be merged if the pipeline succeeds with auto_merge_labels_mr_widget on' do + before do + project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true) + + stub_feature_flags(auto_merge_labels_mr_widget: true) + end + + context 'when CI is running' do + let(:status) { :running } + + it 'does not allow to merge immediately' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).to have_button 'Set to auto-merge' + expect(page).not_to have_button '.js-merge-moment' + end + end + + context 'when CI failed' do + let(:status) { :failed } + + it 'does not allow MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).not_to have_button('Merge', exact: true) expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.') end end @@ -69,7 +160,7 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea wait_for_requests - expect(page).not_to have_button 'Merge' + expect(page).not_to have_button('Merge', exact: true) expect(page).to have_content('Merge blocked: pipeline must succeed. Push a commit that fixes the failure or learn about other solutions.') end end @@ -82,7 +173,7 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea wait_for_requests - expect(page).to have_button 'Merge' + expect(page).to have_button('Merge', exact: true) end end @@ -94,7 +185,7 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea wait_for_requests - expect(page).not_to have_button 'Merge' + expect(page).not_to have_button('Merge', exact: true) end end end @@ -102,6 +193,8 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea context 'when merge requests can be merged when the build failed' do before do project.update_attribute(:only_allow_merge_if_pipeline_succeeds, false) + + stub_feature_flags(auto_merge_labels_mr_widget: false) end context 'when CI is running' do @@ -126,8 +219,59 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea visit project_merge_request_path(project, merge_request) wait_for_requests + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end + end + end - expect(page).to have_button 'Merge' + context 'when CI succeeded' do + let(:status) { :success } + + it 'allows MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end + end + end + end + + context 'when merge requests can be merged when the build failed with auto_merge_labels_mr_widget on' do + before do + project.update_attribute(:only_allow_merge_if_pipeline_succeeds, false) + + stub_feature_flags(auto_merge_labels_mr_widget: true) + end + + context 'when CI is running' do + let(:status) { :running } + + it 'allows MR to be merged immediately' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + + expect(page).to have_button 'Set to auto-merge' + + page.find('.js-merge-moment').click + expect(page).to have_content 'Merge immediately' + end + end + + context 'when CI failed' do + let(:status) { :failed } + + it 'allows MR to be merged' do + visit project_merge_request_path(project, merge_request) + + wait_for_requests + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end end end @@ -139,7 +283,9 @@ RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, fea wait_for_requests - expect(page).to have_button 'Merge' + page.within('.mr-state-widget') do + expect(page).to have_button 'Merge' + end end end end diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb index 6d2c8f15a82..28a994545bd 100644 --- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb +++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb @@ -26,6 +26,8 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur context 'when there is active pipeline for merge request' do before do create(:ci_build, pipeline: pipeline) + stub_feature_flags(auto_merge_labels_mr_widget: false) + sign_in(user) visit project_merge_request_path(project, merge_request) end @@ -98,6 +100,69 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur end end + context 'when there is active pipeline for merge request with auto_merge_labels_mr_widget on' do + before do + create(:ci_build, pipeline: pipeline) + stub_feature_flags(auto_merge_labels_mr_widget: true) + + sign_in(user) + visit project_merge_request_path(project, merge_request) + end + + describe 'enabling Merge when pipeline succeeds' do + shared_examples 'Set to auto-merge activator' do + it 'activates the Merge when pipeline succeeds feature' do + click_button "Set to auto-merge" + + expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds" + expect(page).to have_content "Source branch will not be deleted" + expect(page).to have_selector ".js-cancel-auto-merge" + visit project_merge_request_path(project, merge_request) # Needed to refresh the page + expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i + end + end + + context "when enabled immediately" do + it_behaves_like 'Set to auto-merge activator' + end + + context 'when enabled after it was previously canceled' do + before do + click_button "Set to auto-merge" + + wait_for_requests + + click_button "Cancel auto-merge" + + wait_for_requests + + expect(page).to have_content 'Set to auto-merge' + end + + it_behaves_like 'Set to auto-merge activator' + end + + context 'when it was enabled and then canceled' do + let(:merge_request) do + create(:merge_request_with_diffs, + :merge_when_pipeline_succeeds, + source_project: project, + title: 'Bug NS-04', + author: user, + merge_user: user) + end + + before do + merge_request.merge_params['force_remove_source_branch'] = '0' + merge_request.save! + click_button "Cancel auto-merge" + end + + it_behaves_like 'Set to auto-merge activator' + end + end + end + context 'when merge when pipeline succeeds is enabled' do let(:merge_request) do create(:merge_request_with_diffs, :simple, :merge_when_pipeline_succeeds, @@ -112,6 +177,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur end before do + stub_feature_flags(auto_merge_labels_mr_widget: false) sign_in user visit project_merge_request_path(project, merge_request) end @@ -177,11 +243,53 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur end end + context 'when merge when pipeline succeeds is enabled and auto_merge_labels_mr_widget on' do + let(:merge_request) do + create(:merge_request_with_diffs, :simple, :merge_when_pipeline_succeeds, + source_project: project, + author: user, + merge_user: user, + title: 'MepMep') + end + + let!(:build) do + create(:ci_build, pipeline: pipeline) + end + + before do + stub_feature_flags(auto_merge_labels_mr_widget: true) + sign_in user + visit project_merge_request_path(project, merge_request) + end + + it 'allows to cancel the automatic merge' do + click_button "Cancel auto-merge" + + expect(page).to have_button "Set to auto-merge" + + refresh + + expect(page).to have_content "canceled the automatic merge" + end + end + context 'when pipeline is not active' do it 'does not allow to enable merge when pipeline succeeds' do + stub_feature_flags(auto_merge_labels_mr_widget: false) + visit project_merge_request_path(project, merge_request) expect(page).not_to have_link 'Merge when pipeline succeeds' end end + + context 'when pipeline is not active and auto_merge_labels_mr_widget on' do + it 'does not allow to enable merge when pipeline succeeds' do + stub_feature_flags(auto_merge_labels_mr_widget: true) + + visit project_merge_request_path(project, merge_request) + + expect(page).not_to have_link 'Set to auto-merge' + end + end end diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb index 7b1afd786f7..0f283f1194f 100644 --- a/spec/features/merge_request/user_resolves_conflicts_spec.rb +++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category: :code_review_workflow do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers let(:project) { create(:project, :repository) } let(:user) { project.creator } diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb index e09a4569caf..da48a31abbd 100644 --- a/spec/features/merge_request/user_reverts_merge_request_spec.rb +++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb @@ -13,7 +13,9 @@ RSpec.describe 'User reverts a merge request', :js, feature_category: :code_revi visit(merge_request_path(merge_request)) - click_button('Merge') + page.within('.mr-state-widget') do + click_button 'Merge' + end wait_for_requests diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb index 12fdcf4859e..3fb3ef12fcc 100644 --- a/spec/features/merge_request/user_sees_diff_spec.rb +++ b/spec/features/merge_request/user_sees_diff_spec.rb @@ -60,7 +60,7 @@ RSpec.describe 'Merge request > User sees diff', :js, feature_category: :code_re visit diffs_project_merge_request_path(project, merge_request) page.within('.gl-alert') do - expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed. Plain diff Email patch") + expect(page).to have_text("Some changes are not shown. For a faster browsing experience, only 3 of 3+ files are shown. Download one of the files below to see all changes. Plain diff Patches") end end end diff --git a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb index b83580565e4..476be5ab599 100644 --- a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb +++ b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb @@ -21,7 +21,7 @@ feature_category: :code_review_workflow do context 'with unresolved threads' do it 'does not allow to merge' do - expect(page).not_to have_button 'Merge' + expect(page).not_to have_button('Merge', exact: true) expect(page).to have_content('all threads must be resolved') end end @@ -33,7 +33,7 @@ feature_category: :code_review_workflow do end it 'allows MR to be merged' do - expect(page).to have_button 'Merge' + expect(page).to have_button('Merge', exact: true) end end end @@ -46,7 +46,7 @@ feature_category: :code_review_workflow do context 'with unresolved threads' do it 'does not allow to merge' do - expect(page).to have_button 'Merge' + expect(page).to have_button('Merge', exact: true) end end @@ -57,7 +57,7 @@ feature_category: :code_review_workflow do end it 'allows MR to be merged' do - expect(page).to have_button 'Merge' + expect(page).to have_button('Merge', exact: true) end end end diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb index 458746f0854..7d024103943 100644 --- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb +++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb @@ -56,6 +56,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request', end before do + stub_feature_flags(auto_merge_labels_mr_widget: false) + visit project_merge_request_path(project, merge_request) page.within('.merge-request-tabs') do @@ -185,6 +187,48 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request', end end + context 'when a user created a merge request in the parent project with auto_merge_labels_mr_widget on' do + before do + stub_feature_flags(auto_merge_labels_mr_widget: true) + + visit project_merge_request_path(project, merge_request) + + page.within('.merge-request-tabs') do + click_link('Pipelines') + end + end + + context 'when a user merges a merge request in the parent project', :sidekiq_might_not_need_inline do + before do + click_link 'Overview' + click_button 'Set to auto-merge' + + wait_for_requests + end + + context 'when detached merge request pipeline is pending' do + it 'waits the head pipeline' do + expect(page).to have_content('to be merged automatically when the pipeline succeeds') + expect(page).to have_button('Cancel auto-merge') + end + end + + context 'when branch pipeline succeeds' do + before do + click_link 'Overview' + push_pipeline.reload.succeed! + + wait_for_requests + end + + it 'waits the head pipeline' do + expect(page).to have_content('to be merged automatically when the pipeline succeeds') + expect(page).to have_button('Cancel auto-merge') + end + end + end + end + context 'when there are no `merge_requests` keyword in .gitlab-ci.yml' do let(:config) do { @@ -244,6 +288,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request', before do forked_project.add_maintainer(user2) + stub_feature_flags(auto_merge_labels_mr_widget: false) + visit project_merge_request_path(project, merge_request) page.within('.merge-request-tabs') do diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb index acf2893b513..eb293fbbd20 100644 --- a/spec/features/merge_request/user_sees_merge_widget_spec.rb +++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb @@ -396,7 +396,9 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category: end it 'updates the MR widget', :sidekiq_might_not_need_inline do - click_button 'Merge' + page.within('.mr-state-widget') do + click_button 'Merge' + end expect(page).to have_content('An error occurred while merging') end @@ -452,7 +454,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category: wait_for_requests - expect(page).not_to have_button('Merge') + expect(page).not_to have_button('Merge', exact: true) expect(page).to have_content('Merging!') end end diff --git a/spec/features/merge_request/user_uses_quick_actions_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb index 1a88918da65..1ec86948065 100644 --- a/spec/features/merge_request/user_uses_quick_actions_spec.rb +++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb @@ -9,7 +9,7 @@ require 'spec_helper' # for each existing quick action unless they test something not tested by existing tests. RSpec.describe 'Merge request > User uses quick actions', :js, :use_clean_rails_redis_caching, feature_category: :code_review_workflow do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers let(:project) { create(:project, :public, :repository) } let(:user) { project.creator } diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb index afa57cb0f8f..095607b61fb 100644 --- a/spec/features/merge_request/user_views_open_merge_request_spec.rb +++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb @@ -56,25 +56,25 @@ RSpec.describe 'User views an open merge request', feature_category: :code_revie end it 'renders empty description preview' do - find('.gfm-form').fill_in(:merge_request_description, with: '') + fill_in(:merge_request_description, with: '') - page.within('.gfm-form') do - click_button('Preview') + page.within('.js-vue-markdown-field') do + click_link('Preview') - expect(find('.js-md-preview')).to have_content('Nothing to preview.') + expect(find('.js-vue-md-preview')).to have_content('Nothing to preview.') end end it 'renders description preview' do - find('.gfm-form').fill_in(:merge_request_description, with: ':+1: Nice') + fill_in(:merge_request_description, with: ':+1: Nice') - page.within('.gfm-form') do - click_button('Preview') + page.within('.js-vue-markdown-field') do + click_link('Preview') - expect(find('.js-md-preview')).to have_css('gl-emoji') + expect(find('.js-vue-md-preview')).to have_css('gl-emoji') end - expect(find('.gfm-form')).to have_css('.js-md-preview').and have_button('Write') + expect(find('.js-vue-markdown-field')).to have_css('.js-vue-md-preview').and have_link('Write') expect(find('#merge_request_description', visible: false)).not_to be_visible end end diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb index 58d796f8288..5ccc24ebca1 100644 --- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb +++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review_workflow do include CookieHelper - include Spec::Support::Helpers::Features::SortingHelpers + include Features::SortingHelpers let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) } let!(:merge_request2) do diff --git a/spec/features/milestones/user_deletes_milestone_spec.rb b/spec/features/milestones/user_deletes_milestone_spec.rb index 141e626c6f3..a7f2457de04 100644 --- a/spec/features/milestones/user_deletes_milestone_spec.rb +++ b/spec/features/milestones/user_deletes_milestone_spec.rb @@ -18,6 +18,7 @@ RSpec.describe "User deletes milestone", :js, feature_category: :team_planning d project.add_developer(user) visit(project_milestones_path(project)) click_link(milestone.title) + click_button("Milestone actions") click_button("Delete") click_button("Delete milestone") @@ -38,6 +39,7 @@ RSpec.describe "User deletes milestone", :js, feature_category: :team_planning d visit(group_milestones_path(group)) click_link(milestone_to_be_deleted.title) + click_button("Milestone actions") click_button("Delete") click_button("Delete milestone") diff --git a/spec/features/nav/pinned_nav_items_spec.rb b/spec/features/nav/pinned_nav_items_spec.rb new file mode 100644 index 00000000000..fa8224848f9 --- /dev/null +++ b/spec/features/nav/pinned_nav_items_spec.rb @@ -0,0 +1,179 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Navigation menu item pinning', :js, feature_category: :navigation do + let_it_be(:user) { create(:user, use_new_navigation: true) } + + before do + sign_in(user) + end + + describe 'non-pinnable navigation menu' do + before do + visit explore_projects_path + end + + it 'does not show the Pinned section' do + within '#super-sidebar' do + expect(page).not_to have_content 'Pinned' + end + end + + it 'does not show the buttons to pin items' do + within '#super-sidebar' do + expect(page).not_to have_css 'button svg[data-testid="thumbtack-icon"]' + end + end + end + + describe 'pinnable navigation menu' do + let_it_be(:project) { create(:project) } + + before do + project.add_member(user, :owner) + visit project_path(project) + end + + it 'shows the Pinned section' do + within '#super-sidebar' do + expect(page).to have_content 'Pinned' + end + end + + it 'allows to pin items' do + within '#super-sidebar' do + click_on 'Manage' + add_pin('Activity') + add_pin('Members') + end + + within '[data-testid="pinned-nav-items"]' do + expect(page).to have_link 'Activity' + expect(page).to have_link 'Members' + end + end + + describe 'collapsible section' do + it 'shows the Pinned section as expanded by default' do + within '#super-sidebar' do + expect(page).to have_content 'Your pinned items appear here.' + end + end + + it 'maintains the collapsed/expanded state between page loads' do + within '#super-sidebar' do + click_on 'Pinned' + visit project_path(project) + expect(page).not_to have_content 'Your pinned items appear here.' + + click_on 'Pinned' + visit project_path(project) + expect(page).to have_content 'Your pinned items appear here.' + end + end + end + + describe 'pinned items' do + before do + within '#super-sidebar' do + click_on 'Operate' + add_pin('Package Registry') + add_pin('Terraform modules') + wait_for_requests + end + end + + it 'can be unpinned from within the pinned section' do + within '[data-testid="pinned-nav-items"]' do + remove_pin('Package Registry') + expect(page).not_to have_content 'Package Registry' + end + end + + it 'can be unpinned from within its section' do + section = find("[data-testid=\"nav-item-link\"]", text: 'Operate') + + within(section.sibling('ul')) do + remove_pin('Terraform modules') + end + + within '[data-testid="pinned-nav-items"]' do + expect(page).not_to have_content 'Terraform modules' + end + end + + it 'can be reordered' do + within '[data-testid="pinned-nav-items"]' do + pinned_items = page.find_all('a').map(&:text) + item1 = page.find('a', text: 'Package Registry') + item2 = page.find('a', text: 'Terraform modules') + expect(pinned_items).to eq [item1.text, item2.text] + + drag_item(item2, to: item1) + + pinned_items = page.find_all('a').map(&:text) + expect(pinned_items).to eq [item2.text, item1.text] + end + end + end + end + + describe 'reordering pins with hidden pins from non-available features' do + let_it_be(:project_with_repo) { create(:project, :repository) } + let_it_be(:project_without_repo) { create(:project, :repository_disabled) } + + before do + project_with_repo.add_member(user, :owner) + project_without_repo.add_member(user, :owner) + + visit project_path(project_with_repo) + within '#super-sidebar' do + click_on 'Code' + add_pin('Commits') + click_on 'Manage' + add_pin('Activity') + add_pin('Members') + end + + visit project_path(project_without_repo) + within '[data-testid="pinned-nav-items"]' do + activity_item = page.find('a', text: 'Activity') + members_item = page.find('a', text: 'Members') + drag_item(members_item, to: activity_item) + end + + visit project_path(project_with_repo) + end + + it 'keeps pins of non-available features' do + within '[data-testid="pinned-nav-items"]' do + pinned_items = page.find_all('a').map(&:text) + expect(pinned_items).to eq %w[Commits Members Activity] + end + end + end + + private + + def add_pin(menu_item_title) + menu_item = find("[data-testid=\"nav-item-link\"]", text: menu_item_title) + menu_item.hover + menu_item.find("[data-testid=\"thumbtack-icon\"]").click + wait_for_requests + end + + def remove_pin(menu_item_title) + menu_item = find("[data-testid=\"nav-item-link\"]", text: menu_item_title) + menu_item.hover + menu_item.find("[data-testid=\"thumbtack-solid-icon\"]").click + wait_for_requests + end + + def drag_item(item, to:) + item.hover + drag_handle = item.find('[data-testid="grip-icon"]') + drag_handle.drag_to(to) + wait_for_requests + end +end diff --git a/spec/features/nav/top_nav_responsive_spec.rb b/spec/features/nav/top_nav_responsive_spec.rb index 9ac63c26ba0..ff8132dc087 100644 --- a/spec/features/nav/top_nav_responsive_spec.rb +++ b/spec/features/nav/top_nav_responsive_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'top nav responsive', :js, feature_category: :navigation do include MobileHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:user) { create(:user) } @@ -22,7 +22,7 @@ RSpec.describe 'top nav responsive', :js, feature_category: :navigation do context 'when menu is closed' do it 'has page content and hides responsive menu', :aggregate_failures do expect(page).to have_css('.page-title', text: 'Explore projects') - expect(page).to have_link('Dashboard', id: 'logo') + expect(page).to have_link('Homepage', id: 'logo') expect(page).to have_no_css('.top-nav-responsive') end @@ -35,7 +35,7 @@ RSpec.describe 'top nav responsive', :js, feature_category: :navigation do it 'hides everything and shows responsive menu', :aggregate_failures do expect(page).to have_no_css('.page-title', text: 'Explore projects') - expect(page).to have_no_link('Dashboard', id: 'logo') + expect(page).to have_no_link('Homepage', id: 'logo') within '.top-nav-responsive' do expect(page).to have_link(nil, href: search_path) diff --git a/spec/features/nav/top_nav_spec.rb b/spec/features/nav/top_nav_spec.rb index d2c0286cb4d..74022a4a976 100644 --- a/spec/features/nav/top_nav_spec.rb +++ b/spec/features/nav/top_nav_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'top nav responsive', :js, feature_category: :navigation do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:user) { create(:user) } diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb index b3ba0a874e9..bcda30ccb84 100644 --- a/spec/features/populate_new_pipeline_vars_with_params_spec.rb +++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe "Populate new pipeline CI variables with url params", :js, feature_category: :pipeline_composition do +RSpec.describe "Populate new pipeline CI variables with url params", :js, feature_category: :secrets_management do let(:user) { create(:user) } let(:project) { create(:project) } let(:page_path) { new_project_pipeline_path(project) } diff --git a/spec/features/profiles/chat_names_spec.rb b/spec/features/profiles/chat_names_spec.rb index 9e1bd69a239..105d7d4ec16 100644 --- a/spec/features/profiles/chat_names_spec.rb +++ b/spec/features/profiles/chat_names_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Profile > Chat', feature_category: :user_profile do +RSpec.describe 'Profile > Chat', feature_category: :integrations do let_it_be(:user) { create(:user) } before do @@ -11,7 +11,12 @@ RSpec.describe 'Profile > Chat', feature_category: :user_profile do describe 'uses authorization link' do let(:params) do - { team_id: 'T00', team_domain: 'my_chat_team', user_id: 'U01', user_name: 'my_chat_user' } + { + team_id: 'f1924a8db44ff3bb41c96424cdc20676', + team_domain: 'my_chat_team', + user_id: 'ay5sq51sebfh58ktrce5ijtcwy', + user_name: 'my_chat_user' + } end let!(:authorize_url) { ChatNames::AuthorizeUserService.new(params).execute } @@ -21,6 +26,13 @@ RSpec.describe 'Profile > Chat', feature_category: :user_profile do visit authorize_path end + it 'names the integration correctly' do + expect(page).to have_content( + 'An application called Mattermost slash commands is requesting access to your GitLab account' + ) + expect(page).to have_content('Authorize Mattermost slash commands') + end + context 'clicks authorize' do before do click_button 'Authorize' diff --git a/spec/features/profiles/list_users_comment_template_spec.rb b/spec/features/profiles/list_users_comment_template_spec.rb new file mode 100644 index 00000000000..85e455ba988 --- /dev/null +++ b/spec/features/profiles/list_users_comment_template_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Profile > Comment templates > List users comment templates', :js, + feature_category: :user_profile do + let_it_be(:user) { create(:user) } + let_it_be(:saved_reply) { create(:saved_reply, user: user) } + + before do + sign_in(user) + end + + it 'shows the user a list of their comment templates' do + visit profile_comment_templates_path + + expect(page).to have_content('My comment templates (1)') + expect(page).to have_content(saved_reply.name) + expect(page).to have_content(saved_reply.content) + end +end diff --git a/spec/features/profiles/list_users_saved_replies_spec.rb b/spec/features/profiles/list_users_saved_replies_spec.rb deleted file mode 100644 index 4f3678f8051..00000000000 --- a/spec/features/profiles/list_users_saved_replies_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Profile > Notifications > List users saved replies', :js, - feature_category: :user_profile do - let_it_be(:user) { create(:user) } - let_it_be(:saved_reply) { create(:saved_reply, user: user) } - - before do - sign_in(user) - end - - it 'shows the user a list of their saved replies' do - visit profile_saved_replies_path - - expect(page).to have_content('My saved replies (1)') - expect(page).to have_content(saved_reply.name) - expect(page).to have_content(saved_reply.content) - end -end diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb index a050e87241b..65fe1330be2 100644 --- a/spec/features/profiles/personal_access_tokens_spec.rb +++ b/spec/features/profiles/personal_access_tokens_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Profile > Personal Access Tokens', :js, feature_category: :user_profile do include Spec::Support::Helpers::ModalHelpers - include Spec::Support::Helpers::AccessTokenHelpers + include Features::AccessTokenHelpers let(:user) { create(:user) } let(:pat_create_service) { double('PersonalAccessTokens::CreateService', execute: ServiceResponse.error(message: 'error', payload: { personal_access_token: PersonalAccessToken.new })) } diff --git a/spec/features/profiles/user_creates_comment_template_spec.rb b/spec/features/profiles/user_creates_comment_template_spec.rb new file mode 100644 index 00000000000..44e2b932c00 --- /dev/null +++ b/spec/features/profiles/user_creates_comment_template_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Profile > Comment templates > User creates comment template', :js, + feature_category: :user_profile do + let_it_be(:user) { create(:user) } + + before do + sign_in(user) + + visit profile_comment_templates_path + + wait_for_requests + end + + it 'shows the user a list of their saved replies' do + find('[data-testid="comment-template-name-input"]').set('test') + find('[data-testid="comment-template-content-input"]').set('Test content') + + click_button 'Save' + + wait_for_requests + + expect(page).to have_content('My comment templates (1)') + expect(page).to have_content('test') + expect(page).to have_content('Test content') + end +end diff --git a/spec/features/profiles/user_creates_saved_reply_spec.rb b/spec/features/profiles/user_creates_saved_reply_spec.rb deleted file mode 100644 index 1d851b5cea0..00000000000 --- a/spec/features/profiles/user_creates_saved_reply_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Profile > Saved replies > User creates saved reply', :js, - feature_category: :user_profile do - let_it_be(:user) { create(:user) } - - before do - sign_in(user) - - visit profile_saved_replies_path - - wait_for_requests - end - - it 'shows the user a list of their saved replies' do - find('[data-testid="saved-reply-name-input"]').set('test') - find('[data-testid="saved-reply-content-input"]').set('Test content') - - click_button 'Save' - - wait_for_requests - - expect(page).to have_content('My saved replies (1)') - expect(page).to have_content('test') - expect(page).to have_content('Test content') - end -end diff --git a/spec/features/profiles/user_deletes_comment_template_spec.rb b/spec/features/profiles/user_deletes_comment_template_spec.rb new file mode 100644 index 00000000000..7ef857e9622 --- /dev/null +++ b/spec/features/profiles/user_deletes_comment_template_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Profile > Comment templates > User deletes comment template', :js, + feature_category: :user_profile do + let_it_be(:user) { create(:user) } + let_it_be(:saved_reply) { create(:saved_reply, user: user) } + + before do + sign_in(user) + end + + it 'shows the user a list of their comment template' do + visit profile_comment_templates_path + + click_button 'Comment template actions' + find('[data-testid="comment-template-delete-btn"]').click + + page.within('.gl-modal') do + click_button 'Delete' + end + + wait_for_requests + + expect(page).not_to have_content(saved_reply.name) + end +end diff --git a/spec/features/profiles/user_deletes_saved_reply_spec.rb b/spec/features/profiles/user_deletes_saved_reply_spec.rb deleted file mode 100644 index 35bd6018ee3..00000000000 --- a/spec/features/profiles/user_deletes_saved_reply_spec.rb +++ /dev/null @@ -1,27 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Profile > Saved replies > User deletes saved reply', :js, - feature_category: :user_profile do - let_it_be(:user) { create(:user) } - let_it_be(:saved_reply) { create(:saved_reply, user: user) } - - before do - sign_in(user) - end - - it 'shows the user a list of their saved replies' do - visit profile_saved_replies_path - - find('[data-testid="saved-reply-delete-btn"]').click - - page.within('.gl-modal') do - click_button 'Delete' - end - - wait_for_requests - - expect(page).not_to have_content(saved_reply.name) - end -end diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb index 196134a0bda..8c9d73f9c78 100644 --- a/spec/features/profiles/user_edit_profile_spec.rb +++ b/spec/features/profiles/user_edit_profile_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User edit profile', feature_category: :user_profile do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers let_it_be(:user) { create(:user) } @@ -97,8 +97,8 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do expect(page).to have_content('Website url is not a valid URL') end - it 'validates that the dicord id has a valid length', :js do - valid_dicord_id = '123456789123456789' + it 'validates that the discord id has a valid length', :js do + valid_discord_id = '123456789123456789' too_short_discord_id = '123456' too_long_discord_id = '123456789abcdefghijkl' @@ -108,12 +108,12 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do fill_in 'user_discord', with: too_long_discord_id expect(page).to have_content('Discord ID is too long') - fill_in 'user_discord', with: valid_dicord_id + fill_in 'user_discord', with: valid_discord_id submit_settings expect(user.reload).to have_attributes( - discord: valid_dicord_id + discord: valid_discord_id ) end @@ -297,7 +297,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do end page.within '.dropdown-menu-user' do - expect(page).to have_content("#{user.name} (Busy)") + expect(page).to have_content("#{user.name} Busy") end end @@ -308,7 +308,7 @@ RSpec.describe 'User edit profile', feature_category: :user_profile do visit project_issue_path(project, issue) wait_for_requests - expect(page.find('.issuable-assignees')).to have_content("#{user.name} (Busy)") + expect(page.find('.issuable-assignees')).to have_content("#{user.name} Busy") end end end diff --git a/spec/features/profiles/user_updates_comment_template_spec.rb b/spec/features/profiles/user_updates_comment_template_spec.rb new file mode 100644 index 00000000000..2e6bfdcc407 --- /dev/null +++ b/spec/features/profiles/user_updates_comment_template_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Profile > Comment templates > User updated comment template', :js, + feature_category: :user_profile do + let_it_be(:user) { create(:user) } + let_it_be(:saved_reply) { create(:saved_reply, user: user) } + + before do + sign_in(user) + + visit profile_comment_templates_path + + wait_for_requests + end + + it 'shows the user a list of their comment template' do + click_button 'Comment template actions' + + find('[data-testid="comment-template-edit-btn"]').click + find('[data-testid="comment-template-name-input"]').set('test') + + click_button 'Save' + + wait_for_requests + + expect(page).to have_selector('[data-testid="comment-template-name"]', text: 'test') + end +end diff --git a/spec/features/profiles/user_updates_saved_reply_spec.rb b/spec/features/profiles/user_updates_saved_reply_spec.rb deleted file mode 100644 index e341076ed0a..00000000000 --- a/spec/features/profiles/user_updates_saved_reply_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'Profile > Saved replies > User updated saved reply', :js, - feature_category: :user_profile do - let_it_be(:user) { create(:user) } - let_it_be(:saved_reply) { create(:saved_reply, user: user) } - - before do - sign_in(user) - - visit profile_saved_replies_path - - wait_for_requests - end - - it 'shows the user a list of their saved replies' do - find('[data-testid="saved-reply-edit-btn"]').click - find('[data-testid="saved-reply-name-input"]').set('test') - - click_button 'Save' - - wait_for_requests - - expect(page).to have_selector('[data-testid="saved-reply-name"]', text: 'test') - end -end diff --git a/spec/features/profiles/user_uses_comment_template_spec.rb b/spec/features/profiles/user_uses_comment_template_spec.rb new file mode 100644 index 00000000000..b426e3fb433 --- /dev/null +++ b/spec/features/profiles/user_uses_comment_template_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'User uses comment template', :js, + feature_category: :user_profile do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:merge_request) { create(:merge_request, source_project: project) } + let_it_be(:user) { create(:user) } + let_it_be(:saved_reply) { create(:saved_reply, user: user) } + + before do + project.add_owner(user) + + sign_in(user) + end + + it 'applies comment template' do + visit project_merge_request_path(merge_request.project, merge_request) + + find('[data-testid="comment-template-dropdown-toggle"]').click + + wait_for_requests + + find('.gl-new-dropdown-item').click + + expect(find('.note-textarea').value).to eq(saved_reply.content) + end +end diff --git a/spec/features/profiles/user_uses_saved_reply_spec.rb b/spec/features/profiles/user_uses_saved_reply_spec.rb deleted file mode 100644 index f9a4f4a7fa6..00000000000 --- a/spec/features/profiles/user_uses_saved_reply_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'User uses saved reply', :js, - feature_category: :user_profile do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:merge_request) { create(:merge_request, source_project: project) } - let_it_be(:user) { create(:user) } - let_it_be(:saved_reply) { create(:saved_reply, user: user) } - - before do - project.add_owner(user) - - sign_in(user) - end - - it 'applies saved reply' do - visit project_merge_request_path(merge_request.project, merge_request) - - find('[data-testid="saved-replies-dropdown-toggle"]').click - - wait_for_requests - - find('[data-testid="saved-reply-dropdown-item"]').click - - expect(find('.note-textarea').value).to eq(saved_reply.content) - end -end diff --git a/spec/features/project_group_variables_spec.rb b/spec/features/project_group_variables_spec.rb index 8d600edadde..966c05bb4cb 100644 --- a/spec/features/project_group_variables_spec.rb +++ b/spec/features/project_group_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Project group variables', :js, feature_category: :pipeline_composition do +RSpec.describe 'Project group variables', :js, feature_category: :secrets_management do let(:user) { create(:user) } let(:group) { create(:group) } let(:subgroup) { create(:group, parent: group) } diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb index 69b8408dcd6..c4f78bf4ea3 100644 --- a/spec/features/project_variables_spec.rb +++ b/spec/features/project_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Project variables', :js, feature_category: :pipeline_composition do +RSpec.describe 'Project variables', :js, feature_category: :secrets_management do let(:user) { create(:user) } let(:project) { create(:project) } let(:variable) { create(:ci_variable, key: 'test_key', value: 'test_value', masked: true) } diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb index d3558af81b8..6f5bf8ac26e 100644 --- a/spec/features/projects/blobs/blame_spec.rb +++ b/spec/features/projects/blobs/blame_spec.rb @@ -44,7 +44,7 @@ RSpec.describe 'File blame', :js, feature_category: :projects do context 'when blob length is over the blame range limit' do before do - stub_const('Projects::BlameService::PER_PAGE', 2) + stub_const('Gitlab::Git::BlamePagination::PAGINATION_PER_PAGE', 2) end it 'displays two first lines of the file with pagination' do @@ -112,7 +112,7 @@ RSpec.describe 'File blame', :js, feature_category: :projects do context 'when streaming is enabled' do before do - stub_const('Projects::BlameService::STREAMING_PER_PAGE', 50) + stub_const('Gitlab::Git::BlamePagination::STREAMING_PER_PAGE', 50) end it_behaves_like 'a full blame page' @@ -143,7 +143,7 @@ RSpec.describe 'File blame', :js, feature_category: :projects do context 'when blob length is over global max page limit' do before do - stub_const('Projects::BlameService::PER_PAGE', 200) + stub_const('Gitlab::Git::BlamePagination::PAGINATION_PER_PAGE', 200) end let(:path) { 'files/markdown/ruby-style-guide.md' } diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb index 144b4ed85cd..2b6b09ccc10 100644 --- a/spec/features/projects/blobs/edit_spec.rb +++ b/spec/features/projects/blobs/edit_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' RSpec.describe 'Editing file blob', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers include TreeHelper - include BlobSpecHelpers + include Features::BlobSpecHelpers let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') } diff --git a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb index 2f67e909543..3b383793de2 100644 --- a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb +++ b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User views pipeline editor button on root ci config file', :js, feature_category: :projects do - include BlobSpecHelpers + include Features::BlobSpecHelpers let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public, :repository) } diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb index 60bd77393e9..5aa10a8d4b0 100644 --- a/spec/features/projects/branches/user_creates_branch_spec.rb +++ b/spec/features/projects/branches/user_creates_branch_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User creates branch', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::BranchesHelpers + include Features::BranchesHelpers let_it_be(:group) { create(:group, :public) } let_it_be(:user) { create(:user) } @@ -81,7 +81,9 @@ RSpec.describe 'User creates branch', :js, feature_category: :projects do it 'does not create new branch' do invalid_branch_name = '1.0 stable' - create_branch(invalid_branch_name) + fill_in("branch_name", with: invalid_branch_name) + find('body').click + click_button("Create branch") expect(page).to have_content('Branch name is invalid') expect(page).to have_content("can't contain spaces") diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb index ed03491d69a..20c1ef1b21f 100644 --- a/spec/features/projects/ci/editor_spec.rb +++ b/spec/features/projects/ci/editor_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Pipeline Editor', :js, feature_category: :pipeline_composition do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers let(:project) { create(:project_empty_repo, :public) } let(:user) { create(:user) } diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb index aa9556761c6..bc370a296e4 100644 --- a/spec/features/projects/ci/lint_spec.rb +++ b/spec/features/projects/ci/lint_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'CI Lint', :js, feature_category: :pipeline_composition do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers let_it_be(:project) { create(:project, :repository) } let_it_be(:user) { create(:user) } diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb index 114182982e2..f9195904ea3 100644 --- a/spec/features/projects/clusters/gcp_spec.rb +++ b/spec/features/projects/clusters/gcp_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Gcp Cluster', :js, feature_category: :kubernetes_management do +RSpec.describe 'Gcp Cluster', :js, feature_category: :deployment_management do include GoogleApi::CloudPlatformHelpers let(:project) { create(:project) } diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb index 34fc0a76c7f..eb2601bb85f 100644 --- a/spec/features/projects/clusters/user_spec.rb +++ b/spec/features/projects/clusters/user_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'User Cluster', :js, feature_category: :kubernetes_management do +RSpec.describe 'User Cluster', :js, feature_category: :deployment_management do include GoogleApi::CloudPlatformHelpers let(:project) { create(:project) } diff --git a/spec/features/projects/commit/comments/user_adds_comment_spec.rb b/spec/features/projects/commit/comments/user_adds_comment_spec.rb index c53ac27bb5f..91b838116e9 100644 --- a/spec/features/projects/commit/comments/user_adds_comment_spec.rb +++ b/spec/features/projects/commit/comments/user_adds_comment_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe "User adds a comment on a commit", :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers include RepoHelpers let(:comment_text) { "XML attached" } diff --git a/spec/features/projects/commit/comments/user_deletes_comments_spec.rb b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb index a1e7ddb4d6e..e23eb1cada8 100644 --- a/spec/features/projects/commit/comments/user_deletes_comments_spec.rb +++ b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe "User deletes comments on a commit", :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers include Spec::Support::Helpers::ModalHelpers include RepoHelpers diff --git a/spec/features/projects/commit/comments/user_edits_comments_spec.rb b/spec/features/projects/commit/comments/user_edits_comments_spec.rb index 9019a981a18..b0b963de91b 100644 --- a/spec/features/projects/commit/comments/user_edits_comments_spec.rb +++ b/spec/features/projects/commit/comments/user_edits_comments_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe "User edits a comment on a commit", :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers include RepoHelpers let(:project) { create(:project, :repository) } diff --git a/spec/features/projects/commit/diff_notes_spec.rb b/spec/features/projects/commit/diff_notes_spec.rb index f29e0803f61..1f4358db9cd 100644 --- a/spec/features/projects/commit/diff_notes_spec.rb +++ b/spec/features/projects/commit/diff_notes_spec.rb @@ -8,18 +8,15 @@ RSpec.describe 'Commit diff', :js, feature_category: :source_code_management do let(:user) { create(:user) } let(:project) { create(:project, :public, :repository) } - using RSpec::Parameterized::TableSyntax - - where(:view, :async_diff_file_loading) do - 'inline' | true - 'inline' | false - 'parallel' | true - 'parallel' | false + where(:view) do + [ + ['inline'], + ['parallel'] + ] end with_them do before do - stub_feature_flags(async_commit_diff_files: async_diff_file_loading) project.add_maintainer(user) sign_in user visit project_commit_path(project, sample_commit.id, view: view) diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb index 66a407b5ff6..c4019b4d123 100644 --- a/spec/features/projects/commit/user_comments_on_commit_spec.rb +++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.describe "User comments on commit", :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers include Spec::Support::Helpers::ModalHelpers include RepoHelpers diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb index 8284299443f..4c13d23559b 100644 --- a/spec/features/projects/compare_spec.rb +++ b/spec/features/projects/compare_spec.rb @@ -11,7 +11,7 @@ RSpec.describe "Compare", :js, feature_category: :projects do sign_in user end - describe "branches" do + shared_examples "compare view of branches" do shared_examples 'compares branches' do it 'compares branches' do visit project_compare_index_path(project, from: 'master', to: 'master') @@ -114,7 +114,7 @@ RSpec.describe "Compare", :js, feature_category: :projects do click_button('Compare') page.within('[data-testid="too-many-changes-alert"]') do - expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed.") + expect(page).to have_text("Some changes are not shown. For a faster browsing experience, only 3 of 3+ files are shown. Download one of the files below to see all changes.") end end end @@ -148,7 +148,7 @@ RSpec.describe "Compare", :js, feature_category: :projects do end end - describe "tags" do + shared_examples "compare view of tags" do it "compares tags" do visit project_compare_index_path(project, from: "master", to: "master") @@ -182,4 +182,17 @@ RSpec.describe "Compare", :js, feature_category: :projects do dropdown.all(".js-compare-#{dropdown_type}-dropdown .dropdown-item", text: selection).first.click end end + + it_behaves_like "compare view of branches" + it_behaves_like "compare view of tags" + + context "when super sidebar is enabled" do + before do + user.update!(use_new_navigation: true) + stub_feature_flags(super_sidebar_nav: true) + end + + it_behaves_like "compare view of branches" + it_behaves_like "compare view of tags" + end end diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb index 98cf024afa8..08e6b097eb4 100644 --- a/spec/features/projects/container_registry_spec.rb +++ b/spec/features/projects/container_registry_spec.rb @@ -101,7 +101,11 @@ RSpec.describe 'Container Registry', :js, feature_category: :projects do first('[data-testid="additional-actions"]').click first('[data-testid="single-delete-button"]').click expect(find('.modal .modal-title')).to have_content _('Remove tag') + stub_container_registry_tags(repository: %r{my/image}, tags: ('1'..'19').to_a, with_manifest: true) find('.modal .modal-footer .btn-danger').click + + expect(page).to have_content '19 tags' + expect(page).not_to have_content '20 tags' end it('pagination navigate to the second page') do diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb index 91401d19fd1..527a146ff73 100644 --- a/spec/features/projects/environments/environment_spec.rb +++ b/spec/features/projects/environments/environment_spec.rb @@ -94,6 +94,36 @@ RSpec.describe 'Environment', feature_category: :projects do expect(page).to have_link("#{build.name} (##{build.id})") end end + + context 'with related deployable present' do + let_it_be(:previous_pipeline) { create(:ci_pipeline, project: project) } + + let_it_be(:previous_build) do + create(:ci_build, :success, pipeline: previous_pipeline, environment: environment.name) + end + + let_it_be(:previous_deployment) do + create(:deployment, :success, environment: environment, deployable: previous_build) + end + + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) } + + let_it_be(:deployment) do + create(:deployment, :success, environment: environment, deployable: build) + end + + before do + visit_environment(environment) + end + + it 'shows deployment information and buttons', :js do + wait_for_requests + expect(page).to have_button('Re-deploy to environment') + expect(page).to have_button('Rollback environment') + expect(page).to have_link("#{build.name} (##{build.id})") + end + end end end diff --git a/spec/features/projects/files/dockerfile_dropdown_spec.rb b/spec/features/projects/files/dockerfile_dropdown_spec.rb index 1e05bdae204..ec1f03570d9 100644 --- a/spec/features/projects/files/dockerfile_dropdown_spec.rb +++ b/spec/features/projects/files/dockerfile_dropdown_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User wants to add a Dockerfile file', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers before do project = create(:project, :repository) diff --git a/spec/features/projects/files/editing_a_file_spec.rb b/spec/features/projects/files/editing_a_file_spec.rb index 04f45de42cc..1f928da0427 100644 --- a/spec/features/projects/files/editing_a_file_spec.rb +++ b/spec/features/projects/files/editing_a_file_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User wants to edit a file', feature_category: :projects do - let(:project) { create(:project, :repository) } + include ProjectForksHelper + let(:project) { create(:project, :repository, :public) } let(:user) { project.first_owner } let(:commit_params) do { @@ -17,17 +18,48 @@ RSpec.describe 'Projects > Files > User wants to edit a file', feature_category: } end - before do - sign_in user - visit project_edit_blob_path(project, - File.join(project.default_branch, '.gitignore')) + context 'when the user has write access' do + before do + sign_in user + visit project_edit_blob_path(project, + File.join(project.default_branch, '.gitignore')) + end + + it 'file has been updated since the user opened the edit page' do + Files::UpdateService.new(project, user, commit_params).execute + + click_button 'Commit changes' + + expect(page).to have_content 'Someone edited the file the same time you did.' + end end - it 'file has been updated since the user opened the edit page' do - Files::UpdateService.new(project, user, commit_params).execute + context 'when the user does not have write access' do + let(:user) { create(:user) } + + context 'and the user has a fork of the project' do + let(:forked_project) { fork_project(project, user, namespace: user.namespace, repository: true) } + + before do + forked_project + sign_in user + visit project_edit_blob_path(project, + File.join(project.default_branch, '.gitignore')) + end + + context 'and the forked project is ahead of the upstream project' do + before do + Files::UpdateService.new(forked_project, user, commit_params).execute + end - click_button 'Commit changes' + it 'renders an error message' do + click_button 'Commit changes' - expect(page).to have_content 'Someone edited the file the same time you did.' + expect(page).to have_content( + %(Error: Can't edit this file. The fork and upstream project have diverged. Edit the file on the fork) + ) + end + end + end end end diff --git a/spec/features/projects/files/gitignore_dropdown_spec.rb b/spec/features/projects/files/gitignore_dropdown_spec.rb index 5e11a94e65b..eedb79167bd 100644 --- a/spec/features/projects/files/gitignore_dropdown_spec.rb +++ b/spec/features/projects/files/gitignore_dropdown_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User wants to add a .gitignore file', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers before do project = create(:project, :repository) diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb index 67678a937e5..f2d657b3513 100644 --- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb +++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers let(:params) { {} } let(:filename) { '.gitlab-ci.yml' } diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb index 8d64151e680..cfa55eba188 100644 --- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb +++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Projects > Files > Project owner sees a link to create a license file in empty project', :js, feature_category: :projects do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers let(:project) { create(:project_empty_repo) } let(:project_maintainer) { project.first_owner } diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb index 97ccb45dfc6..42aceef256a 100644 --- a/spec/features/projects/files/user_creates_files_spec.rb +++ b/spec/features/projects/files/user_creates_files_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User creates files', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers - include BlobSpecHelpers + include Features::SourceEditorSpecHelpers + include Features::BlobSpecHelpers let(:fork_message) do "You're not allowed to make changes to this project directly. "\ diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb index 5a61aa146a2..779257b2e2b 100644 --- a/spec/features/projects/files/user_edits_files_spec.rb +++ b/spec/features/projects/files/user_edits_files_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' RSpec.describe 'Projects > Files > User edits files', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers + include Features::SourceEditorSpecHelpers include ProjectForksHelper - include BlobSpecHelpers + include Features::BlobSpecHelpers let(:project) { create(:project, :repository, name: 'Shop') } let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') } diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb index 8b484141a95..39cdc8faa85 100644 --- a/spec/features/projects/fork_spec.rb +++ b/spec/features/projects/fork_spec.rb @@ -119,16 +119,6 @@ RSpec.describe 'Project fork', feature_category: :projects do end end - shared_examples "increments the fork counter on the source project's page" do - specify :sidekiq_might_not_need_inline do - create_forks - - visit project_path(project) - - expect(page).to have_css('.fork-count', text: 2) - end - end - it_behaves_like 'fork button on project page' it_behaves_like 'create fork page', 'Fork project' @@ -185,25 +175,17 @@ RSpec.describe 'Project fork', feature_category: :projects do end end - context 'with cache_home_panel feature flag' do + context 'when user is a maintainer in multiple groups' do before do create(:group_member, :maintainer, user: user, group: group2) end - context 'when caching is enabled' do - before do - stub_feature_flags(cache_home_panel: project) - end - - it_behaves_like "increments the fork counter on the source project's page" - end + it "increments the fork counter on the source project's page", :sidekiq_might_not_need_inline do + create_forks - context 'when caching is disabled' do - before do - stub_feature_flags(cache_home_panel: false) - end + visit project_path(project) - it_behaves_like "increments the fork counter on the source project's page" + expect(page).to have_css('.fork-count', text: 2) end end end diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb index 6630956f835..3c39d8745a4 100644 --- a/spec/features/projects/import_export/export_file_spec.rb +++ b/spec/features/projects/import_export/export_file_spec.rb @@ -40,59 +40,28 @@ RSpec.describe 'Import/Export - project export integration test', :js, feature_c sign_in(user) end - context "with streaming serializer" do - before do - stub_feature_flags(project_export_as_ndjson: false) - end - - it 'exports a project successfully', :sidekiq_inline do - export_project_and_download_file(page, project) - - in_directory_with_expanded_export(project) do |exit_status, tmpdir| - expect(exit_status).to eq(0) + it 'exports a project successfully', :sidekiq_inline do + export_project_and_download_file(page, project) - project_json_path = File.join(tmpdir, 'project.json') - expect(File).to exist(project_json_path) + in_directory_with_expanded_export(project) do |exit_status, tmpdir| + expect(exit_status).to eq(0) - project_hash = Gitlab::Json.parse(File.read(project_json_path)) - - sensitive_words.each do |sensitive_word| - found = find_sensitive_attributes(sensitive_word, project_hash) + project_json_path = File.join(tmpdir, 'tree', 'project.json') + expect(File).to exist(project_json_path) - expect(found).to be_nil, failure_message(found.try(:key_found), found.try(:parent), sensitive_word) + relations = [] + relations << Gitlab::Json.parse(File.read(project_json_path)) + Dir.glob(File.join(tmpdir, 'tree/project', '*.ndjson')) do |rb_filename| + File.foreach(rb_filename) do |line| + relations << Gitlab::Json.parse(line) end end - end - end - context "with ndjson" do - before do - stub_feature_flags(project_export_as_ndjson: true) - end - - it 'exports a project successfully', :sidekiq_inline do - export_project_and_download_file(page, project) - - in_directory_with_expanded_export(project) do |exit_status, tmpdir| - expect(exit_status).to eq(0) - - project_json_path = File.join(tmpdir, 'tree', 'project.json') - expect(File).to exist(project_json_path) - - relations = [] - relations << Gitlab::Json.parse(File.read(project_json_path)) - Dir.glob(File.join(tmpdir, 'tree/project', '*.ndjson')) do |rb_filename| - File.foreach(rb_filename) do |line| - relations << Gitlab::Json.parse(line) - end - end - - relations.each do |relation_hash| - sensitive_words.each do |sensitive_word| - found = find_sensitive_attributes(sensitive_word, relation_hash) + relations.each do |relation_hash| + sensitive_words.each do |sensitive_word| + found = find_sensitive_attributes(sensitive_word, relation_hash) - expect(found).to be_nil, failure_message(found.try(:key_found), found.try(:parent), sensitive_word) - end + expect(found).to be_nil, failure_message(found.try(:key_found), found.try(:parent), sensitive_word) end end end diff --git a/spec/features/projects/import_export/test_project_export.tar.gz b/spec/features/projects/import_export/test_project_export.tar.gz index b93da033aea..d34d72920dd 100644 Binary files a/spec/features/projects/import_export/test_project_export.tar.gz and b/spec/features/projects/import_export/test_project_export.tar.gz differ diff --git a/spec/features/projects/integrations/apple_app_store_spec.rb b/spec/features/projects/integrations/apple_app_store_spec.rb index b6dc6557e20..a5ae7df4a89 100644 --- a/spec/features/projects/integrations/apple_app_store_spec.rb +++ b/spec/features/projects/integrations/apple_app_store_spec.rb @@ -14,9 +14,9 @@ RSpec.describe 'Upload Dropzone Field', feature_category: :integrations do find("input[name='service[dropzone_file_name]']", visible: false).set(Rails.root.join('spec/fixtures/auth_key.p8')) - expect(find("input[name='service[app_store_private_key]']", - visible: false).value).to eq(File.read(Rails.root.join('spec/fixtures/auth_key.p8'))) - expect(find("input[name='service[app_store_private_key_file_name]']", visible: false).value).to eq('auth_key.p8') + expect(page).to have_field("service[app_store_private_key]", type: :hidden, + with: File.read(Rails.root.join('spec/fixtures/auth_key.p8'))) + expect(page).to have_field("service[app_store_private_key_file_name]", type: :hidden, with: 'auth_key.p8') expect(page).not_to have_content('Drag your Private Key file here or click to upload.') expect(page).to have_content('auth_key.p8') diff --git a/spec/features/projects/integrations/google_play_spec.rb b/spec/features/projects/integrations/google_play_spec.rb index 5db4bc8809f..db867fc40d7 100644 --- a/spec/features/projects/integrations/google_play_spec.rb +++ b/spec/features/projects/integrations/google_play_spec.rb @@ -14,10 +14,9 @@ RSpec.describe 'Upload Dropzone Field', feature_category: :integrations do find("input[name='service[dropzone_file_name]']", visible: false).set(Rails.root.join('spec/fixtures/service_account.json')) - expect(find("input[name='service[service_account_key]']", - visible: false).value).to eq(File.read(Rails.root.join('spec/fixtures/service_account.json'))) - expect(find("input[name='service[service_account_key_file_name]']", - visible: false).value).to eq('service_account.json') + expect(page).to have_field("service[service_account_key]", type: :hidden, + with: File.read(Rails.root.join('spec/fixtures/service_account.json'))) + expect(page).to have_field("service[service_account_key_file_name]", type: :hidden, with: 'service_account.json') expect(page).not_to have_content('Drag your key file here or click to upload.') expect(page).to have_content('service_account.json') diff --git a/spec/features/projects/issues/viewing_relocated_issues_spec.rb b/spec/features/projects/issues/viewing_relocated_issues_spec.rb index abd36b3ceef..f86f7bfacbd 100644 --- a/spec/features/projects/issues/viewing_relocated_issues_spec.rb +++ b/spec/features/projects/issues/viewing_relocated_issues_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'issues canonical link', feature_category: :team_planning do - include Spec::Support::Helpers::Features::CanonicalLinkHelpers + include Features::CanonicalLinkHelpers let_it_be(:original_project) { create(:project, :public) } let_it_be(:original_issue) { create(:issue, project: original_project) } diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb index 07b8f8339eb..5f0f1255001 100644 --- a/spec/features/projects/jobs_spec.rb +++ b/spec/features/projects/jobs_spec.rb @@ -269,13 +269,15 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :proj let(:resource_group) { create(:ci_resource_group, project: project) } before do + resource_group.assign_resource_to(create(:ci_build)) + visit project_job_path(project, job) wait_for_requests end it 'shows correct UI components' do expect(page).to have_content("This job is waiting for resource: #{resource_group.key}") - expect(page).to have_link("Cancel this job") + expect(page).to have_link("View job currently using resource") end end diff --git a/spec/features/projects/members/anonymous_user_sees_members_spec.rb b/spec/features/projects/members/anonymous_user_sees_members_spec.rb index 6b92581d704..0b8661cce82 100644 --- a/spec/features/projects/members/anonymous_user_sees_members_spec.rb +++ b/spec/features/projects/members/anonymous_user_sees_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Anonymous user sees members' do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user) { create(:user) } let(:group) { create(:group, :public) } diff --git a/spec/features/projects/members/group_members_spec.rb b/spec/features/projects/members/group_members_spec.rb index 416b96ab668..c0257446a37 100644 --- a/spec/features/projects/members/group_members_spec.rb +++ b/spec/features/projects/members/group_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects members', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user) { create(:user) } let(:developer) { create(:user) } diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb index 51acba246c5..8238f95fd47 100644 --- a/spec/features/projects/members/groups_with_access_list_spec.rb +++ b/spec/features/projects/members/groups_with_access_list_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Groups with access list', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers include Spec::Support::Helpers::ModalHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group, :public) } diff --git a/spec/features/projects/members/manage_groups_spec.rb b/spec/features/projects/members/manage_groups_spec.rb index b78bfacf171..5efb5abefc6 100644 --- a/spec/features/projects/members/manage_groups_spec.rb +++ b/spec/features/projects/members/manage_groups_spec.rb @@ -4,8 +4,8 @@ require 'spec_helper' RSpec.describe 'Project > Members > Manage groups', :js, feature_category: :subgroups do include ActionView::Helpers::DateHelper - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers let_it_be(:maintainer) { create(:user) } diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb index 615ef1b03dd..5ae6eb83b6b 100644 --- a/spec/features/projects/members/manage_members_spec.rb +++ b/spec/features/projects/members/manage_members_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :onboarding do - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers include Spec::Support::Helpers::ModalHelpers let_it_be(:user1) { create(:user, name: 'John Doe') } diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb index 31c8237aacc..be778def833 100644 --- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb +++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb @@ -4,8 +4,8 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Maintainer adds member with expiration date', :js, feature_category: :subgroups do include ActiveSupport::Testing::TimeHelpers - include Spec::Support::Helpers::Features::MembersHelpers - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::MembersHelpers + include Features::InviteMembersModalHelpers let_it_be(:maintainer) { create(:user) } let_it_be(:project) { create(:project, :with_namespace_settings) } diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb index 2632bc2f5bd..91e30b3396e 100644 --- a/spec/features/projects/members/member_leaves_project_spec.rb +++ b/spec/features/projects/members/member_leaves_project_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Member leaves project', feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers include Spec::Support::Helpers::ModalHelpers let(:user) { create(:user) } diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb index 78fad9b0b55..85bf381404c 100644 --- a/spec/features/projects/members/sorting_spec.rb +++ b/spec/features/projects/members/sorting_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Sorting', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:maintainer) { create(:user, name: 'John Doe', created_at: 5.days.ago, last_activity_on: Date.today) } let(:developer) { create(:user, name: 'Mary Jane', created_at: 1.day.ago, last_sign_in_at: 5.days.ago, last_activity_on: Date.today - 5) } diff --git a/spec/features/projects/members/tabs_spec.rb b/spec/features/projects/members/tabs_spec.rb index 232420224fc..9ee06edc0c1 100644 --- a/spec/features/projects/members/tabs_spec.rb +++ b/spec/features/projects/members/tabs_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Members > Tabs', :js, feature_category: :subgroups do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers using RSpec::Parameterized::TableSyntax let_it_be(:user) { create(:user) } diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb index 03ad5f9a292..31f4e9dcf95 100644 --- a/spec/features/projects/navbar_spec.rb +++ b/spec/features/projects/navbar_spec.rb @@ -19,6 +19,7 @@ RSpec.describe 'Project navbar', :with_license, feature_category: :projects do stub_config(registry: { enabled: false }) stub_feature_flags(harbor_registry_integration: false) + stub_feature_flags(ml_experiment_tracking: false) insert_package_nav(_('Deployments')) insert_infrastructure_registry_nav insert_infrastructure_google_cloud_nav @@ -91,7 +92,19 @@ RSpec.describe 'Project navbar', :with_license, feature_category: :projects do before do stub_feature_flags(harbor_registry_integration: true) - insert_harbor_registry_nav(_('Infrastructure Registry')) + insert_harbor_registry_nav(_('Terraform modules')) + + visit project_path(project) + end + + it_behaves_like 'verified navigation bar' + end + + context 'when models experiments is available' do + before do + stub_feature_flags(ml_experiment_tracking: true) + + insert_model_experiments_nav(_('Terraform modules')) visit project_path(project) end diff --git a/spec/features/projects/network_graph_spec.rb b/spec/features/projects/network_graph_spec.rb index a29c9f58195..af976b8ffb0 100644 --- a/spec/features/projects/network_graph_spec.rb +++ b/spec/features/projects/network_graph_spec.rb @@ -6,10 +6,13 @@ RSpec.describe 'Project Network Graph', :js, feature_category: :projects do let(:user) { create :user } let(:project) { create :project, :repository, namespace: user.namespace } let(:ref_selector) { '.ref-selector' } + let(:ref_with_hash) { 'ref-#-hash' } before do sign_in(user) + project.repository.create_branch(ref_with_hash, 'master') + # Stub Graph max_size to speed up test (10 commits vs. 650) allow(Network::Graph).to receive(:max_count).and_return(10) end @@ -52,6 +55,12 @@ RSpec.describe 'Project Network Graph', :js, feature_category: :projects do end end + it 'switches ref to branch containing a hash' do + switch_ref_to(ref_with_hash) + + expect(page).to have_selector ref_selector, text: ref_with_hash + end + it 'switches ref to tag' do switch_ref_to('v1.0.0') diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb index 439ae4275ae..e9e8c0e2386 100644 --- a/spec/features/projects/new_project_spec.rb +++ b/spec/features/projects/new_project_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'New project', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::TopNavSpecHelpers + include Features::TopNavSpecHelpers context 'as a user' do let_it_be(:user) { create(:user) } @@ -594,7 +594,7 @@ RSpec.describe 'New project', :js, feature_category: :projects do end context 'for a new top-level project' do - it_behaves_like 'a dashboard page with sidebar', :new_project_path, :projects + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :new_project_path, :projects end context 'for a new group project' do diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb index acb2af07e50..81e003d7d1c 100644 --- a/spec/features/projects/pipeline_schedules_spec.rb +++ b/spec/features/projects/pipeline_schedules_spec.rb @@ -7,295 +7,394 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :projects do let!(:project) { create(:project, :repository) } let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project) } - let!(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) } + let!(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule, project: project) } let(:scope) { nil } let!(:user) { create(:user) } + let!(:maintainer) { create(:user) } - before do - stub_feature_flags(pipeline_schedules_vue: false) - end - - context 'logged in as the pipeline schedule owner' do + context 'with pipeline_schedules_vue feature flag turned off' do before do - project.add_developer(user) - pipeline_schedule.update!(owner: user) - gitlab_sign_in(user) + stub_feature_flags(pipeline_schedules_vue: false) end - describe 'GET /projects/pipeline_schedules' do + context 'logged in as the pipeline schedule owner' do before do - visit_pipelines_schedules + project.add_developer(user) + pipeline_schedule.update!(owner: user) + gitlab_sign_in(user) end - it 'edits the pipeline' do - page.within('.pipeline-schedule-table-row') do - click_link 'Edit' + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules end - expect(page).to have_content('Edit Pipeline Schedule') - end - end + it 'edits the pipeline' do + page.within('.pipeline-schedule-table-row') do + click_link 'Edit' + end - describe 'PATCH /projects/pipelines_schedules/:id/edit' do - before do - edit_pipeline_schedule + expect(page).to have_content('Edit Pipeline Schedule') + end end - it 'displays existing properties' do - description = find_field('schedule_description').value - expect(description).to eq('pipeline schedule') - expect(page).to have_button('master') - expect(page).to have_button('Select timezone') - end + describe 'PATCH /projects/pipelines_schedules/:id/edit' do + before do + edit_pipeline_schedule + end - it 'edits the scheduled pipeline' do - fill_in 'schedule_description', with: 'my brand new description' + it 'displays existing properties' do + description = find_field('schedule_description').value + expect(description).to eq('pipeline schedule') + expect(page).to have_button('master') + expect(page).to have_button('Select timezone') + end - save_pipeline_schedule + it 'edits the scheduled pipeline' do + fill_in 'schedule_description', with: 'my brand new description' - expect(page).to have_content('my brand new description') - end + save_pipeline_schedule - context 'when ref is nil' do - before do - pipeline_schedule.update_attribute(:ref, nil) - edit_pipeline_schedule + expect(page).to have_content('my brand new description') end - it 'shows the pipeline schedule with default ref' do - page.within('[data-testid="schedule-target-ref"]') do - expect(first('.gl-button-text').text).to eq('master') + context 'when ref is nil' do + before do + pipeline_schedule.update_attribute(:ref, nil) + edit_pipeline_schedule end - end - end - context 'when ref is empty' do - before do - pipeline_schedule.update_attribute(:ref, '') - edit_pipeline_schedule + it 'shows the pipeline schedule with default ref' do + page.within('[data-testid="schedule-target-ref"]') do + expect(first('.gl-button-text').text).to eq('master') + end + end end - it 'shows the pipeline schedule with default ref' do - page.within('[data-testid="schedule-target-ref"]') do - expect(first('.gl-button-text').text).to eq('master') + context 'when ref is empty' do + before do + pipeline_schedule.update_attribute(:ref, '') + edit_pipeline_schedule + end + + it 'shows the pipeline schedule with default ref' do + page.within('[data-testid="schedule-target-ref"]') do + expect(first('.gl-button-text').text).to eq('master') + end end end end end - end - context 'logged in as a project maintainer' do - before do - project.add_maintainer(user) - gitlab_sign_in(user) - end - - describe 'GET /projects/pipeline_schedules' do + context 'logged in as a project maintainer' do before do - visit_pipelines_schedules + project.add_maintainer(user) + gitlab_sign_in(user) end - describe 'The view' do - it 'displays the required information description' do - page.within('.pipeline-schedule-table-row') do - expect(page).to have_content('pipeline schedule') - expect(find("[data-testid='next-run-cell'] time")['title']) - .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y')) - expect(page).to have_link('master') - expect(page).to have_link("##{pipeline.id}") - end + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules end - it 'creates a new scheduled pipeline' do - click_link 'New schedule' + describe 'The view' do + it 'displays the required information description' do + page.within('.pipeline-schedule-table-row') do + expect(page).to have_content('pipeline schedule') + expect(find("[data-testid='next-run-cell'] time")['title']) + .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y')) + expect(page).to have_link('master') + expect(page).to have_link("##{pipeline.id}") + end + end - expect(page).to have_content('Schedule a new pipeline') - end + it 'creates a new scheduled pipeline' do + click_link 'New schedule' + + expect(page).to have_content('Schedule a new pipeline') + end - it 'changes ownership of the pipeline' do - click_button 'Take ownership' + it 'changes ownership of the pipeline' do + click_button 'Take ownership' - page.within('#pipeline-take-ownership-modal') do - click_link 'Take ownership' + page.within('#pipeline-take-ownership-modal') do + click_link 'Take ownership' + end + + page.within('.pipeline-schedule-table-row') do + expect(page).not_to have_content('No owner') + expect(page).to have_link('Sidney Jones') + end end - page.within('.pipeline-schedule-table-row') do - expect(page).not_to have_content('No owner') - expect(page).to have_link('Sidney Jones') + it 'deletes the pipeline' do + click_link 'Delete' + + accept_gl_confirm(button_text: 'Delete pipeline schedule') + + expect(page).not_to have_css(".pipeline-schedule-table-row") end end - it 'deletes the pipeline' do - click_link 'Delete' + context 'when ref is nil' do + before do + pipeline_schedule.update_attribute(:ref, nil) + visit_pipelines_schedules + end + + it 'shows a list of the pipeline schedules with empty ref column' do + expect(first('.branch-name-cell').text).to eq('') + end + end - accept_gl_confirm(button_text: 'Delete pipeline schedule') + context 'when ref is empty' do + before do + pipeline_schedule.update_attribute(:ref, '') + visit_pipelines_schedules + end - expect(page).not_to have_css(".pipeline-schedule-table-row") + it 'shows a list of the pipeline schedules with empty ref column' do + expect(first('.branch-name-cell').text).to eq('') + end end end - context 'when ref is nil' do + describe 'POST /projects/pipeline_schedules/new' do before do - pipeline_schedule.update_attribute(:ref, nil) - visit_pipelines_schedules + visit_new_pipeline_schedule + end + + it 'sets defaults for timezone and target branch' do + expect(page).to have_button('master') + expect(page).to have_button('Select timezone') end - it 'shows a list of the pipeline schedules with empty ref column' do - expect(first('.branch-name-cell').text).to eq('') + it 'creates a new scheduled pipeline' do + fill_in_schedule_form + save_pipeline_schedule + + expect(page).to have_content('my fancy description') + end + + it 'prevents an invalid form from being submitted' do + save_pipeline_schedule + + expect(page).to have_content('This field is required') end end - context 'when ref is empty' do + context 'when user creates a new pipeline schedule with variables' do before do - pipeline_schedule.update_attribute(:ref, '') visit_pipelines_schedules + click_link 'New schedule' + fill_in_schedule_form + all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA') + all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123') + all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB') + all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123') + save_pipeline_schedule end - it 'shows a list of the pipeline schedules with empty ref column' do - expect(first('.branch-name-cell').text).to eq('') + it 'user sees the new variable in edit window', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397040' do + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('AAA') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('AAA123') + expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-key").value).to eq('BBB') + expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-value", visible: false).value).to eq('BBB123') + end end end - end - describe 'POST /projects/pipeline_schedules/new' do - before do - visit_new_pipeline_schedule - end + context 'when user edits a variable of a pipeline schedule' do + before do + create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| + create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule) + end - it 'sets defaults for timezone and target branch' do - expect(page).to have_button('master') - expect(page).to have_button('Select timezone') + visit_pipelines_schedules + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + find('.js-ci-variable-list-section .js-secret-value-reveal-button').click + first('.js-ci-variable-input-key').set('foo') + first('.js-ci-variable-input-value').set('bar') + click_button 'Save pipeline schedule' + end + + it 'user sees the updated variable in edit window' do + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('foo') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('bar') + end + end end - it 'creates a new scheduled pipeline' do - fill_in_schedule_form - save_pipeline_schedule + context 'when user removes a variable of a pipeline schedule' do + before do + create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| + create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule) + end - expect(page).to have_content('my fancy description') + visit_pipelines_schedules + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + find('.ci-variable-list .ci-variable-row-remove-button').click + click_button 'Save pipeline schedule' + end + + it 'user does not see the removed variable in edit window' do + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + page.within('.ci-variable-list') do + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('') + expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('') + end + end end - it 'prevents an invalid form from being submitted' do - save_pipeline_schedule + context 'when active is true and next_run_at is NULL' do + before do + create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| + pipeline_schedule.update_attribute(:next_run_at, nil) # Consequently next_run_at will be nil + end + end + + it 'user edit and recover the problematic pipeline schedule' do + visit_pipelines_schedules + find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click + fill_in 'schedule_cron', with: '* 1 2 3 4' + click_button 'Save pipeline schedule' - expect(page).to have_content('This field is required') + page.within('.pipeline-schedule-table-row:nth-child(1)') do + expect(page).to have_css("[data-testid='next-run-cell'] time") + end + end end end - context 'when user creates a new pipeline schedule with variables' do + context 'logged in as non-member' do before do - visit_pipelines_schedules - click_link 'New schedule' - fill_in_schedule_form - all('[name="schedule[variables_attributes][][key]"]')[0].set('AAA') - all('[name="schedule[variables_attributes][][secret_value]"]')[0].set('AAA123') - all('[name="schedule[variables_attributes][][key]"]')[1].set('BBB') - all('[name="schedule[variables_attributes][][secret_value]"]')[1].set('BBB123') - save_pipeline_schedule + gitlab_sign_in(user) end - it 'user sees the new variable in edit window', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397040' do - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.ci-variable-list') do - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('AAA') - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('AAA123') - expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-key").value).to eq('BBB') - expect(find(".ci-variable-row:nth-child(2) .js-ci-variable-input-value", visible: false).value).to eq('BBB123') + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules + end + + describe 'The view' do + it 'does not show create schedule button' do + expect(page).not_to have_link('New schedule') + end end end end - context 'when user edits a variable of a pipeline schedule' do - before do - create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| - create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule) + context 'not logged in' do + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules end - visit_pipelines_schedules - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - - find('.js-ci-variable-list-section .js-secret-value-reveal-button').click - first('.js-ci-variable-input-key').set('foo') - first('.js-ci-variable-input-value').set('bar') - click_button 'Save pipeline schedule' - end - - it 'user sees the updated variable in edit window' do - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.ci-variable-list') do - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('foo') - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('bar') + describe 'The view' do + it 'does not show create schedule button' do + expect(page).not_to have_link('New schedule') + end end end end + end - context 'when user removes a variable of a pipeline schedule' do + context 'with pipeline_schedules_vue feature flag turned on' do + context 'logged in as a project maintainer' do before do - create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| - create(:ci_pipeline_schedule_variable, key: 'AAA', value: 'AAA123', pipeline_schedule: pipeline_schedule) - end - - visit_pipelines_schedules - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - find('.ci-variable-list .ci-variable-row-remove-button').click - click_button 'Save pipeline schedule' + project.add_maintainer(maintainer) + pipeline_schedule.update!(owner: user) + gitlab_sign_in(maintainer) end - it 'user does not see the removed variable in edit window' do - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - page.within('.ci-variable-list') do - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-key").value).to eq('') - expect(find(".ci-variable-row:nth-child(1) .js-ci-variable-input-value", visible: false).value).to eq('') - end - end - end + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules - context 'when active is true and next_run_at is NULL' do - before do - create(:ci_pipeline_schedule, project: project, owner: user).tap do |pipeline_schedule| - pipeline_schedule.update_attribute(:next_run_at, nil) # Consequently next_run_at will be nil + wait_for_requests end - end - it 'user edit and recover the problematic pipeline schedule' do - visit_pipelines_schedules - find(".content-list .pipeline-schedule-table-row:nth-child(1) .btn-group a[title='Edit']").click - fill_in 'schedule_cron', with: '* 1 2 3 4' - click_button 'Save pipeline schedule' + describe 'The view' do + it 'displays the required information description' do + page.within('[data-testid="pipeline-schedule-table-row"]') do + expect(page).to have_content('pipeline schedule') + expect(find("[data-testid='next-run-cell'] time")['title']) + .to include(pipeline_schedule.real_next_run.strftime('%b %-d, %Y')) + expect(page).to have_link('master') + expect(find("[data-testid='last-pipeline-status'] a")['href']).to include(pipeline.id.to_s) + end + end + + it 'changes ownership of the pipeline' do + click_button 'Take ownership' + + page.within('#pipeline-take-ownership-modal') do + click_button 'Take ownership' + + wait_for_requests + end + + page.within('[data-testid="pipeline-schedule-table-row"]') do + expect(page).not_to have_content('No owner') + expect(page).to have_link('Sidney Jones') + end + end - page.within('.pipeline-schedule-table-row:nth-child(1)') do - expect(page).to have_css("[data-testid='next-run-cell'] time") + it 'runs the pipeline' do + click_button 'Run pipeline schedule' + + wait_for_requests + + expect(page).to have_content("Successfully scheduled a pipeline to run. Go to the Pipelines page for details.") + end + + it 'deletes the pipeline' do + click_button 'Delete pipeline schedule' + + accept_gl_confirm(button_text: 'Delete pipeline schedule') + + expect(page).not_to have_css('[data-testid="pipeline-schedule-table-row"]') + end end end end - end - - context 'logged in as non-member' do - before do - gitlab_sign_in(user) - end - describe 'GET /projects/pipeline_schedules' do + context 'logged in as non-member' do before do - visit_pipelines_schedules + gitlab_sign_in(user) end - describe 'The view' do - it 'does not show create schedule button' do - expect(page).not_to have_link('New schedule') + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules + + wait_for_requests + end + + describe 'The view' do + it 'does not show create schedule button' do + expect(page).not_to have_link('New schedule') + end end end end - end - context 'not logged in' do - describe 'GET /projects/pipeline_schedules' do - before do - visit_pipelines_schedules - end + context 'not logged in' do + describe 'GET /projects/pipeline_schedules' do + before do + visit_pipelines_schedules + + wait_for_requests + end - describe 'The view' do - it 'does not show create schedule button' do - expect(page).not_to have_link('New schedule') + describe 'The view' do + it 'does not show create schedule button' do + expect(page).not_to have_link('New schedule') + end end end end @@ -332,5 +431,6 @@ RSpec.describe 'Pipeline Schedules', :js, feature_category: :projects do select_timezone select_target_branch + find('body').click # close dropdown end end diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb index 098d1201939..01d3d81deb6 100644 --- a/spec/features/projects/pipelines/pipeline_spec.rb +++ b/spec/features/projects/pipelines/pipeline_spec.rb @@ -133,17 +133,6 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do expect(page).to have_content("and was queued for #{finished_pipeline.queued_duration} seconds") end end - - it 'shows pipeline stats with flag off' do - stub_feature_flags(refactor_ci_minutes_consumption: false) - - visit project_pipeline_path(project, finished_pipeline) - - within '.pipeline-info' do - expect(page).to have_content("in #{finished_pipeline.duration} seconds " \ - "and was queued for #{finished_pipeline.queued_duration} seconds") - end - end end context 'pipeline has not finished' do diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index c46605fa9a8..637f1843e86 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -278,7 +278,6 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do end before do - stub_feature_flags(lazy_load_pipeline_dropdown_actions: false) visit_project_pipelines end @@ -289,12 +288,17 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do it 'has link to the manual action' do find('[data-testid="pipelines-manual-actions-dropdown"]').click + wait_for_requests + expect(page).to have_button('manual build') end context 'when manual action was played' do before do find('[data-testid="pipelines-manual-actions-dropdown"]').click + + wait_for_requests + click_button('manual build') end @@ -309,11 +313,11 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do create(:ci_build, :scheduled, pipeline: pipeline, name: 'delayed job 1', - stage: 'test') + stage: 'test', + scheduled_at: 2.hours.since + 2.minutes) end before do - stub_feature_flags(lazy_load_pipeline_dropdown_actions: false) visit_project_pipelines end @@ -324,9 +328,12 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do it "has link to the delayed job's action" do find('[data-testid="pipelines-manual-actions-dropdown"]').click - time_diff = [0, delayed_job.scheduled_at - Time.zone.now].max + wait_for_requests + expect(page).to have_button('delayed job 1') - expect(page).to have_content(Time.at(time_diff).utc.strftime("%H:%M:%S")) + + time_diff = [0, delayed_job.scheduled_at - Time.zone.now].max + expect(page).to have_content(Time.at(time_diff).utc.strftime("%H:%M")) end context 'when delayed job is expired already' do @@ -340,6 +347,8 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do it "shows 00:00:00 as the remaining time" do find('[data-testid="pipelines-manual-actions-dropdown"]').click + wait_for_requests + expect(page).to have_content("00:00:00") end end diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb index f678d77b002..c282067f3ad 100644 --- a/spec/features/projects/releases/user_creates_release_spec.rb +++ b/spec/features/projects/releases/user_creates_release_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User creates release', :js, feature_category: :continuous_delivery do - include Spec::Support::Helpers::Features::ReleasesHelpers + include Features::ReleasesHelpers let_it_be(:project) { create(:project, :repository) } let_it_be(:milestone_1) { create(:milestone, project: project, title: '1.1') } @@ -36,6 +36,7 @@ RSpec.describe 'User creates release', :js, feature_category: :continuous_delive it 'defaults the "Create from" dropdown to the project\'s default branch' do select_new_tag_name(tag_name) + expect(page).to have_button(project.default_branch) expect(page.find('[data-testid="create-from-field"] .ref-selector button')).to have_content(project.default_branch) end @@ -123,13 +124,12 @@ RSpec.describe 'User creates release', :js, feature_category: :continuous_delive let(:new_page_url) { new_project_release_path(project, tag_name: 'v1.1.0') } it 'creates release with preselected tag' do - page.within '[data-testid="tag-name-field"]' do - expect(page).to have_text('v1.1.0') - end + expect(page).to have_button 'v1.1.0' + + open_tag_popover 'v1.1.0' expect(page).not_to have_selector('[data-testid="create-from-field"]') - fill_release_title("test release") click_button('Create release') wait_for_all_requests diff --git a/spec/features/projects/settings/branch_rules_settings_spec.rb b/spec/features/projects/settings/branch_rules_settings_spec.rb index 71d9c559b77..59609fecd93 100644 --- a/spec/features/projects/settings/branch_rules_settings_spec.rb +++ b/spec/features/projects/settings/branch_rules_settings_spec.rb @@ -28,6 +28,17 @@ RSpec.describe 'Projects > Settings > Repository > Branch rules settings', featu let(:role) { :maintainer } context 'Branch rules', :js do + it 'renders breadcrumbs' do + request + + page.within '.breadcrumbs' do + expect(page).to have_link('Repository Settings', href: project_settings_repository_path(project)) + expect(page).to have_link('Branch rules', + href: project_settings_repository_path(project, anchor: 'branch-rules')) + expect(page).to have_link('Details', href: '#') + end + end + it 'renders branch rules page' do request diff --git a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb index 57aa3a56c6d..bdfe6a06dd1 100644 --- a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb +++ b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb @@ -32,10 +32,10 @@ feature_category: :projects do it 'shows available section' do subject - expect(find('.breadcrumbs')).to have_content('Clean up image tags') + expect(find('.breadcrumbs')).to have_content('Cleanup policies') section = find('[data-testid="container-expiration-policy-project-settings"]') - expect(section).to have_text 'Clean up image tags' + expect(section).to have_text 'Cleanup policies' end it 'passes axe automated accessibility testing' do diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb index 628fa23afdc..68e9b0225ea 100644 --- a/spec/features/projects/settings/registry_settings_spec.rb +++ b/spec/features/projects/settings/registry_settings_spec.rb @@ -42,10 +42,10 @@ feature_category: :projects do subject settings_block = find('[data-testid="container-expiration-policy-project-settings"]') - expect(settings_block).to have_text 'Clean up image tags' + expect(settings_block).to have_text 'Cleanup policies' end - it 'contains link to clean up image tags page' do + it 'contains link to cleanup policies page' do subject expect(page).to have_link('Edit cleanup rules', href: cleanup_image_tags_project_settings_packages_and_registries_path(project)) diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb index a0625c93b1a..5e45d1683e7 100644 --- a/spec/features/projects/settings/repository_settings_spec.rb +++ b/spec/features/projects/settings/repository_settings_spec.rb @@ -61,6 +61,10 @@ RSpec.describe 'Projects > Settings > Repository settings', feature_category: :p let(:new_ssh_key) { attributes_for(:key)[:key] } + around do |example| + travel_to Time.zone.local(2022, 3, 1, 1, 0, 0) { example.run } + end + it 'get list of keys' do project.deploy_keys << private_deploy_key project.deploy_keys << public_deploy_key @@ -83,6 +87,21 @@ RSpec.describe 'Projects > Settings > Repository settings', feature_category: :p expect(page).to have_content('Grant write permissions to this key') end + it 'add a new deploy key with expiration' do + one_month = Time.zone.local(2022, 4, 1, 1, 0, 0) + visit project_settings_repository_path(project) + + fill_in 'deploy_key_title', with: 'new_deploy_key_with_expiry' + fill_in 'deploy_key_key', with: new_ssh_key + fill_in 'deploy_key_expires_at', with: one_month.to_s + check 'deploy_key_deploy_keys_projects_attributes_0_can_push' + click_button 'Add key' + + expect(page).to have_content('new_deploy_key_with_expiry') + expect(page).to have_content('in 1 month') + expect(page).to have_content('Grant write permissions to this key') + end + it 'edit an existing deploy key' do project.deploy_keys << private_deploy_key visit project_settings_repository_path(project) diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb index 859c738731b..74139aa0d7f 100644 --- a/spec/features/projects/settings/service_desk_setting_spec.rb +++ b/spec/features/projects/settings/service_desk_setting_spec.rb @@ -12,8 +12,8 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c sign_in(user) allow_any_instance_of(Project).to receive(:present).with(current_user: user).and_return(presenter) - allow(::Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(::Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } end it 'shows activation checkbox' do @@ -24,7 +24,7 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c context 'when service_desk_email is disabled' do before do - allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(false) + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:enabled?).and_return(false) visit edit_project_path(project) end @@ -43,8 +43,8 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache, feature_c context 'when service_desk_email is enabled' do before do - allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?) { true } - allow(::Gitlab::ServiceDeskEmail).to receive(:address_for_key) { 'address-suffix@example.com' } + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:enabled?) { true } + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:address_for_key) { 'address-suffix@example.com' } visit edit_project_path(project) end diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb index 159a83a261d..b7463537fb2 100644 --- a/spec/features/projects/settings/user_manages_project_members_spec.rb +++ b/spec/features/projects/settings/user_manages_project_members_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Settings > User manages project members', feature_category: :projects do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers include Spec::Support::Helpers::ModalHelpers include ListboxHelpers diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb index 3b8b982b621..e527d0c9c74 100644 --- a/spec/features/projects/settings/webhooks_settings_spec.rb +++ b/spec/features/projects/settings/webhooks_settings_spec.rb @@ -83,7 +83,7 @@ RSpec.describe 'Projects > Settings > Webhook Settings', feature_category: :proj visit webhooks_path click_button 'Test' - click_button 'Push events' + click_link 'Push events' expect(page).to have_current_path(webhooks_path, ignore_query: true) end diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb index 06e48bc82c0..a28416f3ca3 100644 --- a/spec/features/projects/snippets/create_snippet_spec.rb +++ b/spec/features/projects/snippets/create_snippet_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'Projects > Snippets > Create Snippet', :js, feature_category: :source_code_management do include DropzoneHelper - include Spec::Support::Helpers::Features::SnippetSpecHelpers + include Features::SnippetSpecHelpers let_it_be(:user) { create(:user) } let_it_be(:project) do diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb index 014bf63c696..dda9a556d17 100644 --- a/spec/features/projects/snippets/user_updates_snippet_spec.rb +++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects > Snippets > User updates a snippet', :js, feature_category: :source_code_management do - include Spec::Support::Helpers::Features::SnippetSpecHelpers + include Features::SnippetSpecHelpers let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, namespace: user.namespace) } diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb index 58f572bc021..8fae8f38025 100644 --- a/spec/features/projects/tree/create_directory_spec.rb +++ b/spec/features/projects/tree/create_directory_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Multi-file editor new directory', :js, feature_category: :web_ide do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers let(:user) { create(:user) } let(:project) { create(:project, :repository) } diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb index 674aef8e6f4..2f8935b9ce3 100644 --- a/spec/features/projects/tree/create_file_spec.rb +++ b/spec/features/projects/tree/create_file_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Multi-file editor new file', :js, feature_category: :web_ide do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers let(:user) { create(:user) } let(:project) { create(:project, :repository) } diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb index 52c6cb2192b..3becc48d450 100644 --- a/spec/features/projects/tree/tree_show_spec.rb +++ b/spec/features/projects/tree/tree_show_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Projects tree', :js, feature_category: :web_ide do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers include RepoHelpers include ListboxHelpers diff --git a/spec/features/projects/tree/upload_file_spec.rb b/spec/features/projects/tree/upload_file_spec.rb index 42fa88a0d3e..6ec57af2590 100644 --- a/spec/features/projects/tree/upload_file_spec.rb +++ b/spec/features/projects/tree/upload_file_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Multi-file editor upload file', :js, feature_category: :web_ide do - include WebIdeSpecHelpers + include Features::WebIdeSpecHelpers let(:user) { create(:user) } let(:project) { create(:project, :repository) } diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb index 5badcd99dff..9d8d06c514e 100644 --- a/spec/features/projects/user_sees_user_popover_spec.rb +++ b/spec/features/projects/user_sees_user_popover_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User sees user popover', :js, feature_category: :projects do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers let_it_be(:user) { create(:user, pronouns: 'they/them') } let_it_be(:project) { create(:project, :repository, creator: user) } diff --git a/spec/features/projects/user_views_empty_project_spec.rb b/spec/features/projects/user_views_empty_project_spec.rb index e2b56e8ced6..e38cfc2273a 100644 --- a/spec/features/projects/user_views_empty_project_spec.rb +++ b/spec/features/projects/user_views_empty_project_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'User views an empty project', feature_category: :projects do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers let_it_be(:project) { create(:project, :empty_repo) } let_it_be(:user) { create(:user) } diff --git a/spec/features/projects/work_items/work_item_spec.rb b/spec/features/projects/work_items/work_item_spec.rb index d0d458350b5..b731c462f0c 100644 --- a/spec/features/projects/work_items/work_item_spec.rb +++ b/spec/features/projects/work_items/work_item_spec.rb @@ -8,40 +8,32 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do let_it_be(:work_item) { create(:work_item, project: project) } let_it_be(:milestone) { create(:milestone, project: project) } let_it_be(:milestones) { create_list(:milestone, 25, project: project) } + let_it_be(:note) { create(:note, noteable: work_item, project: work_item.project) } + let(:work_items_path) { project_work_items_path(project, work_items_path: work_item.iid, iid_path: true) } context 'for signed in user' do before do project.add_developer(user) sign_in(user) - end - - context 'with internal id' do - before do - visit project_work_items_path(project, work_items_path: work_item.iid, iid_path: true) - end - it_behaves_like 'work items title' - it_behaves_like 'work items status' - it_behaves_like 'work items assignees' - it_behaves_like 'work items labels' - it_behaves_like 'work items comments' - it_behaves_like 'work items description' - it_behaves_like 'work items milestone' + visit work_items_path end - context 'with global id' do - before do - stub_feature_flags(use_iid_in_work_items_path: false) - visit project_work_items_path(project, work_items_path: work_item.id) + it 'uses IID path in breadcrumbs' do + within('[data-testid="breadcrumb-current-link"]') do + expect(page).to have_link('Work Items', href: work_items_path) end - - it_behaves_like 'work items status' - it_behaves_like 'work items assignees' - it_behaves_like 'work items labels' - it_behaves_like 'work items comments' - it_behaves_like 'work items description' end + + it_behaves_like 'work items title' + it_behaves_like 'work items status' + it_behaves_like 'work items assignees' + it_behaves_like 'work items labels' + it_behaves_like 'work items comments', :issue + it_behaves_like 'work items description' + it_behaves_like 'work items milestone' + it_behaves_like 'work items notifications' end context 'for signed in owner' do @@ -50,9 +42,31 @@ RSpec.describe 'Work item', :js, feature_category: :team_planning do sign_in(user) - visit project_work_items_path(project, work_items_path: work_item.id) + visit work_items_path end it_behaves_like 'work items invite members' end + + context 'for guest users' do + before do + project.add_guest(user) + + sign_in(user) + + visit work_items_path + end + + it_behaves_like 'work items comment actions for guest users' + end + + context 'for user not signed in' do + before do + visit work_items_path + end + + it 'actions dropdown is not displayed' do + expect(page).not_to have_selector('[data-testid="work-item-actions-dropdown"]') + end + end end diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb index 04096b3e4f9..e4a64d391b0 100644 --- a/spec/features/protected_branches_spec.rb +++ b/spec/features/protected_branches_spec.rb @@ -96,6 +96,15 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem expect(ProtectedBranch.last.name).to eq('some->branch') end + it "shows success alert once protected branch is created" do + visit project_protected_branches_path(project) + set_defaults + set_protected_branch_name('some->branch') + click_on "Protect" + wait_for_requests + expect(page).to have_content(s_('ProtectedBranch|View protected branches as branch rules')) + end + it "displays the last commit on the matching branch if it exists" do commit = create(:commit, project: project) project.repository.add_branch(admin, 'some-branch', commit.id) diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb index b7d06a3a962..976324a5032 100644 --- a/spec/features/search/user_searches_for_code_spec.rb +++ b/spec/features/search/user_searches_for_code_spec.rb @@ -99,64 +99,11 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat end end - context 'when :new_header_search is true' do + context 'when header search' do context 'search code within refs' do let(:ref_name) { 'v1.0.0' } before do - # This feature is disabled by default in spec_helper.rb. - # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec. - # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348. - stub_feature_flags(new_header_search: true) - visit(project_tree_path(project, ref_name)) - - submit_search('gitlab-grack') - select_search_scope('Code') - end - - it 'shows ref switcher in code result summary' do - expect(find('.ref-selector')).to have_text(ref_name) - end - - it 'persists branch name across search' do - find('.gl-search-box-by-click-search-button').click - expect(find('.ref-selector')).to have_text(ref_name) - end - - # this example is use to test the design that the refs is not - # only represent the branch as well as the tags. - it 'ref switcher list all the branches and tags' do - find('.ref-selector').click - wait_for_requests - - page.within('.ref-selector') do - expect(page).to have_selector('li', text: 'add-ipython-files') - expect(page).to have_selector('li', text: 'v1.0.0') - end - end - - it 'search result changes when refs switched' do - expect(find('.results')).not_to have_content('path = gitlab-grack') - - find('.ref-selector').click - wait_for_requests - - select_listbox_item('add-ipython-files') - - expect(page).to have_selector('.results', text: 'path = gitlab-grack') - end - end - end - - context 'when :new_header_search is false' do - context 'search code within refs' do - let(:ref_name) { 'v1.0.0' } - - before do - # This feature is disabled by default in spec_helper.rb. - # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec. - # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348. - stub_feature_flags(new_header_search: false) visit(project_tree_path(project, ref_name)) submit_search('gitlab-grack') diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb index 127176da3fb..71d0f8d6d7f 100644 --- a/spec/features/search/user_uses_header_search_field_spec.rb +++ b/spec/features/search/user_uses_header_search_field_spec.rb @@ -38,7 +38,7 @@ RSpec.describe 'User uses header search field', :js, :disable_rate_limiter, feat end it 'renders breadcrumbs' do - page.within('.breadcrumbs-links') do + page.within('.breadcrumbs') do expect(page).to have_content('Search') end end diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb index c281e5906ad..5aac27a71e4 100644 --- a/spec/features/snippets/notes_on_personal_snippets_spec.rb +++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb @@ -119,20 +119,36 @@ RSpec.describe 'Comments on personal snippets', :js, feature_category: :source_c end context 'when editing a note' do - it 'changes the text' do - find('.js-note-edit').click + context 'when note is empty' do + before do + find('.js-note-edit').click - page.within('.current-note-edit-form') do - fill_in 'note[note]', with: 'new content' - find('.btn-confirm').click + page.within('.current-note-edit-form') do + fill_in 'note[note]', with: '' + end end - page.within("#notes-list li#note_#{snippet_notes[0].id}") do - edited_text = find('.edited-text') + it 'disables save button' do + expect(page).to have_button('Save comment', disabled: true) + end + end + + context 'when note is not empty' do + it 'changes the text' do + find('.js-note-edit').click + + page.within('.current-note-edit-form') do + fill_in 'note[note]', with: 'new content' + find('.btn-confirm').click + end + + page.within("#notes-list li#note_#{snippet_notes[0].id}") do + edited_text = find('.edited-text') - expect(page).to have_css('.note_edited_ago') - expect(page).to have_content('new content') - expect(edited_text).to have_selector('.note_edited_ago') + expect(page).to have_css('.note_edited_ago') + expect(page).to have_content('new content') + expect(edited_text).to have_selector('.note_edited_ago') + end end end end diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb index d6ff8c066c4..2673ad5e1d7 100644 --- a/spec/features/snippets/show_spec.rb +++ b/spec/features/snippets/show_spec.rb @@ -25,7 +25,7 @@ RSpec.describe 'Snippet', :js, feature_category: :source_code_management do subject { visit snippet_path(snippet) } end - it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_snippets_path, :snippets context 'when unauthenticated' do it 'shows the "Explore" sidebar' do @@ -42,6 +42,6 @@ RSpec.describe 'Snippet', :js, feature_category: :source_code_management do sign_in(different_user) end - it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :dashboard_snippets_path, :snippets end end diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb index 03f569fe4b0..945785cf1c3 100644 --- a/spec/features/snippets/user_creates_snippet_spec.rb +++ b/spec/features/snippets/user_creates_snippet_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'User creates snippet', :js, feature_category: :source_code_management do include DropzoneHelper - include Spec::Support::Helpers::Features::SnippetSpecHelpers + include Features::SnippetSpecHelpers let_it_be(:user) { create(:user) } @@ -21,7 +21,7 @@ RSpec.describe 'User creates snippet', :js, feature_category: :source_code_manag visit new_snippet_path end - it_behaves_like 'a dashboard page with sidebar', :new_snippet_path, :snippets + it_behaves_like 'a "Your work" page with sidebar and breadcrumbs', :new_snippet_path, :snippets def fill_form snippet_fill_in_form(title: title, content: file_content, description: md_description) diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb index 5096472ebe1..f58fda67b59 100644 --- a/spec/features/snippets/user_edits_snippet_spec.rb +++ b/spec/features/snippets/user_edits_snippet_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe 'User edits snippet', :js, feature_category: :source_code_management do include DropzoneHelper - include Spec::Support::Helpers::Features::SnippetSpecHelpers + include Features::SnippetSpecHelpers let_it_be(:file_name) { 'test.rb' } let_it_be(:content) { 'puts "test"' } diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb index dc9f38f1d83..81a41951377 100644 --- a/spec/features/tags/developer_views_tags_spec.rb +++ b/spec/features/tags/developer_views_tags_spec.rb @@ -60,7 +60,6 @@ RSpec.describe 'Developer views tags', feature_category: :source_code_management expect(page).to have_current_path( project_tag_path(project, 'v1.0.0'), ignore_query: true) expect(page).to have_content 'v1.0.0' - expect(page).to have_content 'This tag has no release notes.' end describe 'links on the tag page' do diff --git a/spec/features/user_can_display_performance_bar_spec.rb b/spec/features/user_can_display_performance_bar_spec.rb index 4f6ce6e8f71..caf13c4111b 100644 --- a/spec/features/user_can_display_performance_bar_spec.rb +++ b/spec/features/user_can_display_performance_bar_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'User can display performance bar', :js, feature_category: :continuous_verification do +RSpec.describe 'User can display performance bar', :js, feature_category: :application_performance do shared_examples 'performance bar cannot be displayed' do it 'does not show the performance bar by default' do expect(page).not_to have_css('#js-peek') diff --git a/spec/features/user_sees_revert_modal_spec.rb b/spec/features/user_sees_revert_modal_spec.rb index ae3158e4270..1c754943acb 100644 --- a/spec/features/user_sees_revert_modal_spec.rb +++ b/spec/features/user_sees_revert_modal_spec.rb @@ -21,7 +21,9 @@ feature_category: :code_review_workflow do before do sign_in(user) visit(project_merge_request_path(project, merge_request)) - click_button('Merge') + page.within('.mr-state-widget') do + click_button 'Merge' + end wait_for_requests end diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb index b45de88832c..bc377fb1f8f 100644 --- a/spec/features/user_sorts_things_spec.rb +++ b/spec/features/user_sorts_things_spec.rb @@ -7,7 +7,7 @@ require "spec_helper" # The `it`s are named here by convention `starting point -> some pages -> final point`. # All those specs are moved out to this spec intentionally to keep them all in one place. RSpec.describe "User sorts things", :js do - include Spec::Support::Helpers::Features::SortingHelpers + include Features::SortingHelpers include DashboardHelper let_it_be(:project) { create(:project_empty_repo, :public) } diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb index 37b5d80ed61..e6e5a1f9894 100644 --- a/spec/features/users/login_spec.rb +++ b/spec/features/users/login_spec.rb @@ -208,17 +208,14 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ end describe 'with two-factor authentication', :js do - def enter_code(code) - if page.has_content?("Sign in via 2FA code") - click_on("Sign in via 2FA code") - enter_code(code) - else - fill_in 'user_otp_attempt', with: code - click_button 'Verify code' - end + def enter_code(code, only_two_factor_webauthn_enabled: false) + click_on("Sign in via 2FA code") if only_two_factor_webauthn_enabled + + fill_in 'user_otp_attempt', with: code + click_button 'Verify code' end - shared_examples_for 'can login with recovery codes' do + shared_examples_for 'can login with recovery codes' do |only_two_factor_webauthn_enabled: false| context 'using backup code' do let(:codes) { user.generate_otp_backup_codes! } @@ -235,7 +232,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ .to increment(:user_authenticated_counter) .and increment(:user_two_factor_authenticated_counter) - enter_code(codes.sample) + enter_code(codes.sample, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) expect(page).to have_current_path root_path, ignore_query: true end @@ -245,7 +242,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ .to increment(:user_authenticated_counter) .and increment(:user_two_factor_authenticated_counter) - expect { enter_code(codes.sample) } + expect { enter_code(codes.sample, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) } .to change { user.reload.otp_backup_codes.size }.by(-1) end @@ -256,13 +253,13 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ .and increment(:user_session_destroyed_counter) random_code = codes.delete(codes.sample) - expect { enter_code(random_code) } + expect { enter_code(random_code, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) } .to change { user.reload.otp_backup_codes.size }.by(-1) gitlab_sign_out gitlab_sign_in(user) - expect { enter_code(codes.sample) } + expect { enter_code(codes.sample, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) } .to change { user.reload.otp_backup_codes.size }.by(-1) end @@ -272,7 +269,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ .and increment(:user_two_factor_authenticated_counter) expect(ActiveSession).to receive(:cleanup).with(user).once.and_call_original - enter_code(codes.sample) + enter_code(codes.sample, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) end end @@ -287,14 +284,16 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ user.save!(touch: false) expect(user.reload.otp_backup_codes.size).to eq 9 - enter_code(code) + enter_code(code, only_two_factor_webauthn_enabled: only_two_factor_webauthn_enabled) expect(page).to have_content('Invalid two-factor code.') end end end end - context 'with valid username/password' do + # Freeze time to prevent failures when time between code being entered and + # validated greater than otp_allowed_drift + context 'with valid username/password', :freeze_time do let(:user) { create(:user, :two_factor) } before do @@ -380,7 +379,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ context 'when user with only Webauthn enabled' do let(:user) { create(:user, :two_factor_via_webauthn, registrations_count: 1) } - include_examples 'can login with recovery codes' + include_examples 'can login with recovery codes', only_two_factor_webauthn_enabled: true end end @@ -418,7 +417,9 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ end end - context 'when two factor authentication is required' do + # Freeze time to prevent failures when time between code being entered and + # validated greater than otp_allowed_drift + context 'when two factor authentication is required', :freeze_time do it 'shows 2FA prompt after OAuth login' do expect(authentication_metrics) .to increment(:user_authenticated_counter) @@ -610,23 +611,21 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_ end context 'within the grace period' do - it 'redirects to two-factor configuration page' do - freeze_time do - expect(authentication_metrics) - .to increment(:user_authenticated_counter) - - gitlab_sign_in(user) - - expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true - expect(page).to have_content( - 'The group settings for Group 1 and Group 2 require you to enable '\ - 'Two-Factor Authentication for your account. '\ - 'You can leave Group 1 and leave Group 2. '\ - 'You need to do this '\ - 'before '\ - "#{(Time.zone.now + 2.days).strftime("%a, %d %b %Y %H:%M:%S %z")}" - ) - end + it 'redirects to two-factor configuration page', :freeze_time do + expect(authentication_metrics) + .to increment(:user_authenticated_counter) + + gitlab_sign_in(user) + + expect(page).to have_current_path profile_two_factor_auth_path, ignore_query: true + expect(page).to have_content( + 'The group settings for Group 1 and Group 2 require you to enable '\ + 'Two-Factor Authentication for your account. '\ + 'You can leave Group 1 and leave Group 2. '\ + 'You need to do this '\ + 'before '\ + "#{(Time.zone.now + 2.days).strftime("%a, %d %b %Y %H:%M:%S %z")}" + ) end it 'allows skipping two-factor configuration', :js do diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb index 9aef3ed7cd6..9c4a1b36ecc 100644 --- a/spec/features/users/show_spec.rb +++ b/spec/features/users/show_spec.rb @@ -15,6 +15,14 @@ RSpec.describe 'User page', feature_category: :user_profile do expect(page).to have_content("User ID: #{user.id}") end + it 'shows name on breadcrumbs' do + subject + + page.within '.breadcrumbs' do + expect(page).to have_content(user.name) + end + end + context 'with public profile' do context 'with `profile_tabs_vue` feature flag disabled' do before do @@ -538,4 +546,36 @@ RSpec.describe 'User page', feature_category: :user_profile do end end end + + context 'achievements' do + it 'renders the user achievements mount point' do + subject + + expect(page).to have_selector('#js-user-achievements') + end + + context 'when the user has chosen not to display achievements' do + let(:user) { create(:user) } + + before do + user.update!(achievements_enabled: false) + end + + it 'does not render the user achievements mount point' do + subject + + expect(page).not_to have_selector('#js-user-achievements') + end + end + + context 'when the profile is private' do + let(:user) { create(:user, private_profile: true) } + + it 'does not render the user achievements mount point' do + subject + + expect(page).not_to have_selector('#js-user-achievements') + end + end + end end diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb index a762198d3c3..8820d29ced4 100644 --- a/spec/features/users/signup_spec.rb +++ b/spec/features/users/signup_spec.rb @@ -10,7 +10,7 @@ RSpec.shared_examples 'Signup name validation' do |field, max_length, label| visit new_user_registration_path end - describe "#{field} validation", :js do + describe "#{field} validation" do it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do fill_in field, with: 'u' * max_length @@ -44,7 +44,7 @@ RSpec.shared_examples 'Signup name validation' do |field, max_length, label| end end -RSpec.describe 'Signup', feature_category: :user_profile do +RSpec.describe 'Signup', :js, feature_category: :user_profile do include TermsHelper let(:new_user) { build_stubbed(:user) } @@ -71,7 +71,7 @@ RSpec.describe 'Signup', feature_category: :user_profile do stub_application_setting(require_admin_approval_after_user_signup: false) end - describe 'username validation', :js do + describe 'username validation' do before do visit new_user_registration_path end @@ -356,6 +356,8 @@ RSpec.describe 'Signup', feature_category: :user_profile do visit new_user_registration_path fill_in_signup_form + wait_for_all_requests + click_button 'Register' visit new_project_path @@ -383,7 +385,7 @@ RSpec.describe 'Signup', feature_category: :user_profile do expect(page.body).not_to match(/#{new_user.password}/) end - context 'with invalid email', :js do + context 'with invalid email' do it_behaves_like 'user email validation' do let(:path) { new_user_registration_path } end diff --git a/spec/features/webauthn_spec.rb b/spec/features/webauthn_spec.rb index fbbc746c0b0..5c42facfa8b 100644 --- a/spec/features/webauthn_spec.rb +++ b/spec/features/webauthn_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe 'Using WebAuthn Devices for Authentication', :js, feature_category: :system_access do - include Spec::Support::Helpers::Features::TwoFactorHelpers + include Features::TwoFactorHelpers let(:app_id) { "http://#{Capybara.current_session.server.host}:#{Capybara.current_session.server.port}" } before do diff --git a/spec/finders/abuse_reports_finder_spec.rb b/spec/finders/abuse_reports_finder_spec.rb index d3b148375d4..ee93d042ca2 100644 --- a/spec/finders/abuse_reports_finder_spec.rb +++ b/spec/finders/abuse_reports_finder_spec.rb @@ -78,6 +78,24 @@ RSpec.describe AbuseReportsFinder, '#execute' do expect(subject).to match_array([abuse_report_2]) end end + + context 'when value is not a valid status' do + let(:params) { { status: 'partial' } } + + it 'defaults to returning open abuse reports' do + expect(subject).to match_array([abuse_report_1]) + end + end + + context 'when abuse_reports_list feature flag is disabled' do + before do + stub_feature_flags(abuse_reports_list: false) + end + + it 'does not filter by status' do + expect(subject).to match_array([abuse_report_1, abuse_report_2]) + end + end end context 'when params[:category] is present' do diff --git a/spec/finders/access_requests_finder_spec.rb b/spec/finders/access_requests_finder_spec.rb index b82495d55fd..5d7f35581ee 100644 --- a/spec/finders/access_requests_finder_spec.rb +++ b/spec/finders/access_requests_finder_spec.rb @@ -96,13 +96,4 @@ RSpec.describe AccessRequestsFinder do it_behaves_like '#execute' it_behaves_like '#execute!' - - context 'when project_members_index_by_project_namespace feature flag is disabled' do - before do - stub_feature_flags(project_members_index_by_project_namespace: false) - end - - it_behaves_like '#execute' - it_behaves_like '#execute!' - end end diff --git a/spec/finders/achievements/achievements_finder_spec.rb b/spec/finders/achievements/achievements_finder_spec.rb new file mode 100644 index 00000000000..3ac18c27494 --- /dev/null +++ b/spec/finders/achievements/achievements_finder_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Achievements::AchievementsFinder, feature_category: :user_profile do + let_it_be(:group) { create(:group) } + let_it_be(:achievements) { create_list(:achievement, 3, namespace: group) } + + let(:params) { {} } + + describe '#execute' do + subject { described_class.new(group, params).execute } + + it 'returns all achievements' do + expect(subject).to match_array(achievements) + end + + context 'when ids param provided' do + let(:params) { { ids: [achievements[0].id, achievements[1].id] } } + + it 'returns specified achievements' do + expect(subject).to contain_exactly(achievements[0], achievements[1]) + end + end + end +end diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb index 7fcbc7b20a1..3c37d52d6c3 100644 --- a/spec/finders/alert_management/alerts_finder_spec.rb +++ b/spec/finders/alert_management/alerts_finder_spec.rb @@ -222,14 +222,15 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do context 'search query given' do let_it_be(:alert) do - create(:alert_management_alert, - :with_fingerprint, - project: project, - title: 'Title', - description: 'Desc', - service: 'Service', - monitoring_tool: 'Monitor' - ) + create( + :alert_management_alert, + :with_fingerprint, + project: project, + title: 'Title', + description: 'Desc', + service: 'Service', + monitoring_tool: 'Monitor' + ) end context 'searching title' do diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb index 6e218db1254..35effc265c4 100644 --- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb +++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb @@ -17,8 +17,10 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do let_it_be(:forked_project) { fork_project(parent_project, nil, repository: true, target_project: create(:project, :private, :repository)) } let(:merge_request) do - create(:merge_request, source_project: forked_project, source_branch: 'feature', - target_project: parent_project, target_branch: 'master') + create( + :merge_request, source_project: forked_project, source_branch: 'feature', + target_project: parent_project, target_branch: 'master' + ) end let!(:pipeline_in_parent) do @@ -125,8 +127,10 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do let(:merge_request) { build(:merge_request, source_project: create(:project, :repository)) } let!(:pipeline) do - create(:ci_empty_pipeline, project: project, - sha: merge_request.diff_head_sha, ref: merge_request.source_branch) + create( + :ci_empty_pipeline, project: project, + sha: merge_request.diff_head_sha, ref: merge_request.source_branch + ) end it 'returns pipelines from diff_head_sha' do @@ -139,8 +143,10 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do let(:target_ref) { 'master' } let!(:branch_pipeline) do - create(:ci_pipeline, source: :push, project: project, - ref: source_ref, sha: merge_request.merge_request_diff.head_commit_sha) + create( + :ci_pipeline, source: :push, project: project, + ref: source_ref, sha: merge_request.merge_request_diff.head_commit_sha + ) end let!(:tag_pipeline) do @@ -148,13 +154,17 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do end let!(:detached_merge_request_pipeline) do - create(:ci_pipeline, source: :merge_request_event, project: project, - ref: source_ref, sha: shas.second, merge_request: merge_request) + create( + :ci_pipeline, source: :merge_request_event, project: project, + ref: source_ref, sha: shas.second, merge_request: merge_request + ) end let(:merge_request) do - create(:merge_request, source_project: project, source_branch: source_ref, - target_project: project, target_branch: target_ref) + create( + :merge_request, source_project: project, source_branch: source_ref, + target_project: project, target_branch: target_ref + ) end let(:project) { create(:project, :repository) } @@ -166,13 +176,14 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do context 'when there are a branch pipeline and a merge request pipeline' do let!(:branch_pipeline_2) do - create(:ci_pipeline, source: :push, project: project, - ref: source_ref, sha: shas.first) + create(:ci_pipeline, source: :push, project: project, ref: source_ref, sha: shas.first) end let!(:detached_merge_request_pipeline_2) do - create(:ci_pipeline, source: :merge_request_event, project: project, - ref: source_ref, sha: shas.first, merge_request: merge_request) + create( + :ci_pipeline, source: :merge_request_event, project: project, + ref: source_ref, sha: shas.first, merge_request: merge_request + ) end it 'returns merge request pipelines first' do @@ -183,8 +194,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do context 'when there are multiple merge request pipelines from the same branch' do let!(:branch_pipeline_2) do - create(:ci_pipeline, source: :push, project: project, - ref: source_ref, sha: shas.first) + create(:ci_pipeline, source: :push, project: project, ref: source_ref, sha: shas.first) end let!(:branch_pipeline_with_sha_not_belonging_to_merge_request) do @@ -192,20 +202,26 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do end let!(:detached_merge_request_pipeline_2) do - create(:ci_pipeline, source: :merge_request_event, project: project, - ref: source_ref, sha: shas.first, merge_request: merge_request_2) + create( + :ci_pipeline, source: :merge_request_event, project: project, + ref: source_ref, sha: shas.first, merge_request: merge_request_2 + ) end let(:merge_request_2) do - create(:merge_request, source_project: project, source_branch: source_ref, - target_project: project, target_branch: 'stable') + create( + :merge_request, source_project: project, source_branch: source_ref, + target_project: project, target_branch: 'stable' + ) end before do shas.each.with_index do |sha, index| - create(:merge_request_diff_commit, - merge_request_diff: merge_request_2.merge_request_diff, - sha: sha, relative_order: index) + create( + :merge_request_diff_commit, + merge_request_diff: merge_request_2.merge_request_diff, + sha: sha, relative_order: index + ) end end @@ -219,8 +235,10 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do context 'when detached merge request pipeline is run on head ref of the merge request' do let!(:detached_merge_request_pipeline) do - create(:ci_pipeline, source: :merge_request_event, project: project, - ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request) + create( + :ci_pipeline, source: :merge_request_event, project: project, + ref: merge_request.ref_path, sha: shas.second, merge_request: merge_request + ) end it 'sets the head ref of the merge request to the pipeline ref' do diff --git a/spec/finders/clusters/agent_authorizations_finder_spec.rb b/spec/finders/clusters/agent_authorizations_finder_spec.rb deleted file mode 100644 index f680792d6c4..00000000000 --- a/spec/finders/clusters/agent_authorizations_finder_spec.rb +++ /dev/null @@ -1,140 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::AgentAuthorizationsFinder do - describe '#execute' do - let_it_be(:top_level_group) { create(:group) } - let_it_be(:subgroup1) { create(:group, parent: top_level_group) } - let_it_be(:subgroup2) { create(:group, parent: subgroup1) } - let_it_be(:bottom_level_group) { create(:group, parent: subgroup2) } - - let_it_be(:non_ancestor_group) { create(:group, parent: top_level_group) } - let_it_be(:non_ancestor_project) { create(:project, namespace: non_ancestor_group) } - let_it_be(:non_ancestor_agent) { create(:cluster_agent, project: non_ancestor_project) } - - let_it_be(:agent_configuration_project) { create(:project, namespace: subgroup1) } - let_it_be(:requesting_project, reload: true) { create(:project, namespace: bottom_level_group) } - - let_it_be(:staging_agent) { create(:cluster_agent, project: agent_configuration_project) } - let_it_be(:production_agent) { create(:cluster_agent, project: agent_configuration_project) } - - subject { described_class.new(requesting_project).execute } - - shared_examples_for 'access_as' do - let(:config) { { access_as: { access_as => {} } } } - - context 'agent' do - let(:access_as) { :agent } - - it { is_expected.to match_array [authorization] } - end - - context 'impersonate' do - let(:access_as) { :impersonate } - - it { is_expected.to be_empty } - end - - context 'ci_user' do - let(:access_as) { :ci_user } - - it { is_expected.to be_empty } - end - - context 'ci_job' do - let(:access_as) { :ci_job } - - it { is_expected.to be_empty } - end - end - - describe 'project authorizations' do - context 'agent configuration project does not share a root namespace with the given project' do - let(:unrelated_agent) { create(:cluster_agent) } - - before do - create(:agent_project_authorization, agent: unrelated_agent, project: requesting_project) - end - - it { is_expected.to be_empty } - end - - context 'agent configuration project shares a root namespace, but does not belong to an ancestor of the given project' do - let!(:project_authorization) { create(:agent_project_authorization, agent: non_ancestor_agent, project: requesting_project) } - - it { is_expected.to match_array([project_authorization]) } - end - - context 'with project authorizations present' do - let!(:authorization) { create(:agent_project_authorization, agent: production_agent, project: requesting_project) } - - it { is_expected.to match_array [authorization] } - end - - context 'with overlapping authorizations' do - let!(:agent) { create(:cluster_agent, project: requesting_project) } - let!(:project_authorization) { create(:agent_project_authorization, agent: agent, project: requesting_project) } - let!(:group_authorization) { create(:agent_group_authorization, agent: agent, group: bottom_level_group) } - - it { is_expected.to match_array [project_authorization] } - end - - it_behaves_like 'access_as' do - let!(:authorization) { create(:agent_project_authorization, agent: production_agent, project: requesting_project, config: config) } - end - end - - describe 'implicit authorizations' do - let!(:associated_agent) { create(:cluster_agent, project: requesting_project) } - - it 'returns authorizations for agents directly associated with the project' do - expect(subject.count).to eq(1) - - authorization = subject.first - expect(authorization).to be_a(Clusters::Agents::ImplicitAuthorization) - expect(authorization.agent).to eq(associated_agent) - end - end - - describe 'authorized groups' do - context 'agent configuration project is outside the requesting project hierarchy' do - let(:unrelated_agent) { create(:cluster_agent) } - - before do - create(:agent_group_authorization, agent: unrelated_agent, group: top_level_group) - end - - it { is_expected.to be_empty } - end - - context 'multiple agents are authorized for the same group' do - let!(:staging_auth) { create(:agent_group_authorization, agent: staging_agent, group: bottom_level_group) } - let!(:production_auth) { create(:agent_group_authorization, agent: production_agent, group: bottom_level_group) } - - it 'returns authorizations for all agents' do - expect(subject).to contain_exactly(staging_auth, production_auth) - end - end - - context 'a single agent is authorized to more than one matching group' do - let!(:bottom_level_auth) { create(:agent_group_authorization, agent: production_agent, group: bottom_level_group) } - let!(:top_level_auth) { create(:agent_group_authorization, agent: production_agent, group: top_level_group) } - - it 'picks the authorization for the closest group to the requesting project' do - expect(subject).to contain_exactly(bottom_level_auth) - end - end - - context 'agent configuration project does not belong to an ancestor of the authorized group' do - let!(:group_authorization) { create(:agent_group_authorization, agent: non_ancestor_agent, group: bottom_level_group) } - - it { is_expected.to match_array([group_authorization]) } - end - - it_behaves_like 'access_as' do - let!(:authorization) { create(:agent_group_authorization, agent: production_agent, group: top_level_group, config: config) } - end - end - end -end diff --git a/spec/finders/clusters/agents/authorizations/ci_access/finder_spec.rb b/spec/finders/clusters/agents/authorizations/ci_access/finder_spec.rb new file mode 100644 index 00000000000..0d010729d5c --- /dev/null +++ b/spec/finders/clusters/agents/authorizations/ci_access/finder_spec.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::Finder, feature_category: :deployment_management do + describe '#execute' do + let_it_be(:top_level_group) { create(:group) } + let_it_be(:subgroup1) { create(:group, parent: top_level_group) } + let_it_be(:subgroup2) { create(:group, parent: subgroup1) } + let_it_be(:bottom_level_group) { create(:group, parent: subgroup2) } + + let_it_be(:non_ancestor_group) { create(:group, parent: top_level_group) } + let_it_be(:non_ancestor_project) { create(:project, namespace: non_ancestor_group) } + let_it_be(:non_ancestor_agent) { create(:cluster_agent, project: non_ancestor_project) } + + let_it_be(:agent_configuration_project) { create(:project, namespace: subgroup1) } + let_it_be(:requesting_project, reload: true) { create(:project, namespace: bottom_level_group) } + + let_it_be(:staging_agent) { create(:cluster_agent, project: agent_configuration_project) } + let_it_be(:production_agent) { create(:cluster_agent, project: agent_configuration_project) } + + subject { described_class.new(requesting_project).execute } + + shared_examples_for 'access_as' do + let(:config) { { access_as: { access_as => {} } } } + + context 'agent' do + let(:access_as) { :agent } + + it { is_expected.to match_array [authorization] } + end + + context 'impersonate' do + let(:access_as) { :impersonate } + + it { is_expected.to be_empty } + end + + context 'ci_user' do + let(:access_as) { :ci_user } + + it { is_expected.to be_empty } + end + + context 'ci_job' do + let(:access_as) { :ci_job } + + it { is_expected.to be_empty } + end + end + + describe 'project authorizations' do + context 'agent configuration project does not share a root namespace with the given project' do + let(:unrelated_agent) { create(:cluster_agent) } + + before do + create(:agent_ci_access_project_authorization, agent: unrelated_agent, project: requesting_project) + end + + it { is_expected.to be_empty } + end + + context 'agent configuration project shares a root namespace, but does not belong to an ancestor of the given project' do + let!(:project_authorization) { create(:agent_ci_access_project_authorization, agent: non_ancestor_agent, project: requesting_project) } + + it { is_expected.to match_array([project_authorization]) } + end + + context 'with project authorizations present' do + let!(:authorization) { create(:agent_ci_access_project_authorization, agent: production_agent, project: requesting_project) } + + it { is_expected.to match_array [authorization] } + end + + context 'with overlapping authorizations' do + let!(:agent) { create(:cluster_agent, project: requesting_project) } + let!(:project_authorization) { create(:agent_ci_access_project_authorization, agent: agent, project: requesting_project) } + let!(:group_authorization) { create(:agent_ci_access_group_authorization, agent: agent, group: bottom_level_group) } + + it { is_expected.to match_array [project_authorization] } + end + + it_behaves_like 'access_as' do + let!(:authorization) { create(:agent_ci_access_project_authorization, agent: production_agent, project: requesting_project, config: config) } + end + end + + describe 'implicit authorizations' do + let!(:associated_agent) { create(:cluster_agent, project: requesting_project) } + + it 'returns authorizations for agents directly associated with the project' do + expect(subject.count).to eq(1) + + authorization = subject.first + expect(authorization).to be_a(Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization) + expect(authorization.agent).to eq(associated_agent) + end + end + + describe 'authorized groups' do + context 'agent configuration project is outside the requesting project hierarchy' do + let(:unrelated_agent) { create(:cluster_agent) } + + before do + create(:agent_ci_access_group_authorization, agent: unrelated_agent, group: top_level_group) + end + + it { is_expected.to be_empty } + end + + context 'multiple agents are authorized for the same group' do + let!(:staging_auth) { create(:agent_ci_access_group_authorization, agent: staging_agent, group: bottom_level_group) } + let!(:production_auth) { create(:agent_ci_access_group_authorization, agent: production_agent, group: bottom_level_group) } + + it 'returns authorizations for all agents' do + expect(subject).to contain_exactly(staging_auth, production_auth) + end + end + + context 'a single agent is authorized to more than one matching group' do + let!(:bottom_level_auth) { create(:agent_ci_access_group_authorization, agent: production_agent, group: bottom_level_group) } + let!(:top_level_auth) { create(:agent_ci_access_group_authorization, agent: production_agent, group: top_level_group) } + + it 'picks the authorization for the closest group to the requesting project' do + expect(subject).to contain_exactly(bottom_level_auth) + end + end + + context 'agent configuration project does not belong to an ancestor of the authorized group' do + let!(:group_authorization) { create(:agent_ci_access_group_authorization, agent: non_ancestor_agent, group: bottom_level_group) } + + it { is_expected.to match_array([group_authorization]) } + end + + it_behaves_like 'access_as' do + let!(:authorization) { create(:agent_ci_access_group_authorization, agent: production_agent, group: top_level_group, config: config) } + end + end + end +end diff --git a/spec/finders/context_commits_finder_spec.rb b/spec/finders/context_commits_finder_spec.rb index c22675bc67d..3de1d29b695 100644 --- a/spec/finders/context_commits_finder_spec.rb +++ b/spec/finders/context_commits_finder_spec.rb @@ -26,27 +26,30 @@ RSpec.describe ContextCommitsFinder do end it 'returns commits based in author filter' do - params = { search: 'test text', author: 'Job van der Voort' } + params = { author: 'Job van der Voort' } commits = described_class.new(project, merge_request, params).execute expect(commits.length).to eq(1) expect(commits[0].id).to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0') end - it 'returns commits based in before filter' do - params = { search: 'test text', committed_before: 1474828200 } + it 'returns commits based in committed before and after filter' do + params = { committed_before: 1471631400, committed_after: 1471458600 } # August 18, 2016 - # August 20, 2016 commits = described_class.new(project, merge_request, params).execute - expect(commits.length).to eq(1) - expect(commits[0].id).to eq('498214de67004b1da3d820901307bed2a68a8ef6') + expect(commits.length).to eq(2) + expect(commits[0].id).to eq('1b12f15a11fc6e62177bef08f47bc7b5ce50b141') + expect(commits[1].id).to eq('38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e') end - it 'returns commits based in after filter' do - params = { search: 'test text', committed_after: 1474828200 } - commits = described_class.new(project, merge_request, params).execute + it 'returns commits from target branch if no filter is applied' do + expect(project.repository).to receive(:commits).with(merge_request.target_branch, anything).and_call_original - expect(commits.length).to eq(1) + commits = described_class.new(project, merge_request).execute + + expect(commits.length).to eq(37) expect(commits[0].id).to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0') + expect(commits[1].id).to eq('498214de67004b1da3d820901307bed2a68a8ef6') end end end diff --git a/spec/finders/data_transfer/group_data_transfer_finder_spec.rb b/spec/finders/data_transfer/group_data_transfer_finder_spec.rb new file mode 100644 index 00000000000..0c54e6504e8 --- /dev/null +++ b/spec/finders/data_transfer/group_data_transfer_finder_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe DataTransfer::GroupDataTransferFinder, feature_category: :source_code_management do + let_it_be(:user) { create(:user) } + let_it_be(:namespace_1) { create(:group) } + let_it_be(:project_1) { create(:project, group: namespace_1) } + let_it_be(:project_2) { create(:project, group: namespace_1) } + let(:from_date) { Date.new(2022, 2, 1) } + let(:to_date) { Date.new(2023, 1, 1) } + + before_all do + namespace_1.add_owner(user) + end + + describe '#execute' do + let(:subject) { described_class.new(group: namespace_1, from: from_date, to: to_date, user: user) } + + before do + create(:project_data_transfer, project: project_1, date: '2022-01-01') + create(:project_data_transfer, project: project_1, date: '2022-02-01') + create(:project_data_transfer, project: project_2, date: '2022-02-01') + end + + it 'returns the correct number of egress' do + expect(subject.execute.to_a.size).to eq(1) + end + + it 'returns the correct values grouped by date' do + first_result = subject.execute.first + expect(first_result.attributes).to include( + { + 'namespace_id' => namespace_1.id, + 'date' => from_date, + 'repository_egress' => 2, + 'artifacts_egress' => 4, + 'packages_egress' => 6, + 'registry_egress' => 8, + 'total_egress' => 20 + } + ) + end + + context 'when there are no results for specified namespace' do + let_it_be(:namespace_2) { create(:group) } + let(:subject) { described_class.new(group: namespace_2, from: from_date, to: to_date, user: user) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + + context 'when there are no results for specified dates' do + let(:from_date) { Date.new(2021, 1, 1) } + let(:to_date) { Date.new(2021, 1, 1) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + + context 'when dates are not provided' do + let(:from_date) { nil } + let(:to_date) { nil } + + it 'return all values for a namespace', :aggregate_failures do + results = subject.execute + expect(results.to_a.size).to eq(2) + results.each do |result| + expect(result.namespace).to eq(namespace_1) + end + end + end + + context 'when user does not have permissions' do + let(:user) { build(:user) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + end +end diff --git a/spec/finders/data_transfer/mocked_transfer_finder_spec.rb b/spec/finders/data_transfer/mocked_transfer_finder_spec.rb new file mode 100644 index 00000000000..f60bc98f587 --- /dev/null +++ b/spec/finders/data_transfer/mocked_transfer_finder_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe DataTransfer::MockedTransferFinder, feature_category: :source_code_management do + describe '#execute' do + subject(:execute) { described_class.new.execute } + + it 'returns mock data' do + expect(execute.first).to include( + date: '2023-01-01', + repository_egress: be_a(Integer), + artifacts_egress: be_a(Integer), + packages_egress: be_a(Integer), + registry_egress: be_a(Integer), + total_egress: be_a(Integer) + ) + + expect(execute.size).to eq(12) + end + end +end diff --git a/spec/finders/data_transfer/project_data_transfer_finder_spec.rb b/spec/finders/data_transfer/project_data_transfer_finder_spec.rb new file mode 100644 index 00000000000..1d5cd0f3339 --- /dev/null +++ b/spec/finders/data_transfer/project_data_transfer_finder_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe DataTransfer::ProjectDataTransferFinder, feature_category: :source_code_management do + let_it_be(:project_1) { create(:project) } + let_it_be(:project_2) { create(:project) } + let_it_be(:user) { project_1.first_owner } + let(:from_date) { Date.new(2022, 2, 1) } + let(:to_date) { Date.new(2023, 1, 1) } + + describe '#execute' do + let(:subject) { described_class.new(project: project_1, from: from_date, to: to_date, user: user) } + + before do + create(:project_data_transfer, project: project_1, date: '2022-01-01') + create(:project_data_transfer, project: project_1, date: '2022-02-01') + create(:project_data_transfer, project: project_1, date: '2022-03-01') + create(:project_data_transfer, project: project_2, date: '2022-01-01') + end + + it 'returns the correct number of egress' do + expect(subject.execute.size).to eq(2) + end + + it 'returns the correct values' do + first_result = subject.execute.first + expect(first_result.attributes).to include( + { + 'project_id' => project_1.id, + 'date' => from_date, + 'repository_egress' => 1, + 'artifacts_egress' => 2, + 'packages_egress' => 3, + 'registry_egress' => 4, + 'total_egress' => 10 + } + ) + end + + context 'when there are no results for specified dates' do + let(:from_date) { Date.new(2021, 1, 1) } + let(:to_date) { Date.new(2021, 1, 1) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + + context 'when there are no results for specified project' do + let_it_be(:project_3) { create(:project, :repository) } + let(:subject) { described_class.new(project: project_3, from: from_date, to: to_date, user: user) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + + context 'when dates are not provided' do + let(:from_date) { nil } + let(:to_date) { nil } + + it 'return all values for a project', :aggregate_failures do + results = subject.execute + expect(results.size).to eq(3) + results.each do |result| + expect(result.project).to eq(project_1) + end + end + end + + context 'when user does not have permissions' do + let(:user) { build(:user) } + + it 'returns nothing' do + expect(subject.execute).to be_empty + end + end + end +end diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb index efb739c3d2f..90cd6283130 100644 --- a/spec/finders/deployments_finder_spec.rb +++ b/spec/finders/deployments_finder_spec.rb @@ -260,15 +260,25 @@ RSpec.describe DeploymentsFinder do end describe 'enforce sorting to `updated_at` sorting' do - let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc', raise_for_inefficient_updated_at_query: false } } + let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc' } } - it 'sorts by only one column' do - expect(subject.order_values.size).to eq(2) + it 'raises an error' do + expect { subject }.to raise_error(DeploymentsFinder::InefficientQueryError) end - it 'sorts by `updated_at`' do - expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:updated_at].asc.to_sql) - expect(subject.order_values.second.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql) + context 'when deployments_raise_updated_at_inefficient_error is disabled' do + before do + stub_feature_flags(deployments_raise_updated_at_inefficient_error: false) + end + + it 'sorts by only one column' do + expect(subject.order_values.size).to eq(2) + end + + it 'sorts by `updated_at`' do + expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:updated_at].asc.to_sql) + expect(subject.order_values.second.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql) + end end end @@ -331,9 +341,11 @@ RSpec.describe DeploymentsFinder do with_them do it 'returns the deployments unordered' do - expect(subject.to_a).to contain_exactly(group_project_1_deployment, - group_project_2_deployment, - subgroup_project_1_deployment) + expect(subject.to_a).to contain_exactly( + group_project_1_deployment, + group_project_2_deployment, + subgroup_project_1_deployment + ) end end end diff --git a/spec/finders/fork_targets_finder_spec.rb b/spec/finders/fork_targets_finder_spec.rb index 41651513f18..746c48a8fab 100644 --- a/spec/finders/fork_targets_finder_spec.rb +++ b/spec/finders/fork_targets_finder_spec.rb @@ -29,17 +29,38 @@ RSpec.describe ForkTargetsFinder do create(:group).tap { |g| g.add_guest(user) } end + let_it_be(:shared_group_to_group_with_owner_access) do + create(:group) + end + before do project.namespace.add_owner(user) + create(:group_group_link, :maintainer, + shared_with_group: owned_group, + shared_group: shared_group_to_group_with_owner_access + ) end shared_examples 'returns namespaces and groups' do it 'returns all user manageable namespaces' do - expect(finder.execute).to match_array([user.namespace, maintained_group, owned_group, project.namespace, developer_group]) + expect(finder.execute).to match_array([ + user.namespace, + maintained_group, + owned_group, + project.namespace, + developer_group, + shared_group_to_group_with_owner_access + ]) end it 'returns only groups when only_groups option is passed' do - expect(finder.execute(only_groups: true)).to match_array([maintained_group, owned_group, project.namespace, developer_group]) + expect(finder.execute(only_groups: true)).to match_array([ + maintained_group, + owned_group, + project.namespace, + developer_group, + shared_group_to_group_with_owner_access + ]) end it 'returns groups relation when only_groups option is passed' do diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb index 2a9e887450c..9d528355f54 100644 --- a/spec/finders/group_descendants_finder_spec.rb +++ b/spec/finders/group_descendants_finder_spec.rb @@ -130,8 +130,10 @@ RSpec.describe GroupDescendantsFinder do it 'does not include projects shared with the group' do project = create(:project, namespace: group) other_project = create(:project) - other_project.project_group_links.create!(group: group, - group_access: Gitlab::Access::MAINTAINER) + other_project.project_group_links.create!( + group: group, + group_access: Gitlab::Access::MAINTAINER + ) expect(finder.execute).to contain_exactly(project) end @@ -140,9 +142,11 @@ RSpec.describe GroupDescendantsFinder do context 'with shared groups' do let_it_be(:other_group) { create(:group) } let_it_be(:shared_group_link) do - create(:group_group_link, - shared_group: group, - shared_with_group: other_group) + create( + :group_group_link, + shared_group: group, + shared_with_group: other_group + ) end context 'without common ancestor' do @@ -230,9 +234,11 @@ RSpec.describe GroupDescendantsFinder do other_user = create(:user) other_subgroup.add_developer(other_user) - finder = described_class.new(current_user: other_user, - parent_group: group, - params: params) + finder = described_class.new( + current_user: other_user, + parent_group: group, + params: params + ) expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup) end diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb index 5d748f71816..4fc49289fa4 100644 --- a/spec/finders/group_members_finder_spec.rb +++ b/spec/finders/group_members_finder_spec.rb @@ -56,44 +56,67 @@ RSpec.describe GroupMembersFinder, '#execute', feature_category: :subgroups do } end - it 'raises an error if a non-supported relation type is used' do - expect do - described_class.new(group).execute(include_relations: [:direct, :invalid_relation_type]) - end.to raise_error(ArgumentError, "invalid_relation_type is not a valid relation type. Valid relation types are direct, inherited, descendants, shared_from_groups.") - end + shared_examples 'member relations' do + it 'raises an error if a non-supported relation type is used' do + expect do + described_class.new(group).execute(include_relations: [:direct, :invalid_relation_type]) + end.to raise_error(ArgumentError, "invalid_relation_type is not a valid relation type. Valid relation types are direct, inherited, descendants, shared_from_groups.") + end + + using RSpec::Parameterized::TableSyntax + + where(:subject_relations, :subject_group, :expected_members) do + [] | :group | [] + GroupMembersFinder::DEFAULT_RELATIONS | :group | [:user1_group, :user2_group, :user3_group, :user4_group] + [:direct] | :group | [:user1_group, :user2_group, :user3_group, :user4_group] + [:inherited] | :group | [] + [:descendants] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group] + [:shared_from_groups] | :group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] + [:direct, :inherited, :descendants, :shared_from_groups] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] + [] | :sub_group | [] + GroupMembersFinder::DEFAULT_RELATIONS | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group] + [:direct] | :sub_group | [:user1_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group] + [:inherited] | :sub_group | [:user1_group, :user2_group, :user3_group, :user4_group] + [:descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group] + [:shared_from_groups] | :sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] + [:direct, :inherited, :descendants, :shared_from_groups] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] + [] | :sub_sub_group | [] + GroupMembersFinder::DEFAULT_RELATIONS | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group] + [:direct] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group] + [:inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group] + [:descendants] | :sub_sub_group | [] + [:shared_from_groups] | :sub_sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] + [:direct, :inherited, :descendants, :shared_from_groups] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] + end + + with_them do + it 'returns correct members' do + result = described_class.new(groups[subject_group]).execute(include_relations: subject_relations) - using RSpec::Parameterized::TableSyntax - - where(:subject_relations, :subject_group, :expected_members) do - [] | :group | [] - GroupMembersFinder::DEFAULT_RELATIONS | :group | [:user1_group, :user2_group, :user3_group, :user4_group] - [:direct] | :group | [:user1_group, :user2_group, :user3_group, :user4_group] - [:inherited] | :group | [] - [:descendants] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group] - [:shared_from_groups] | :group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] - [:direct, :inherited, :descendants, :shared_from_groups] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] - [] | :sub_group | [] - GroupMembersFinder::DEFAULT_RELATIONS | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group] - [:direct] | :sub_group | [:user1_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group] - [:inherited] | :sub_group | [:user1_group, :user2_group, :user3_group, :user4_group] - [:descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group] - [:shared_from_groups] | :sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] - [:direct, :inherited, :descendants, :shared_from_groups] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] - [] | :sub_sub_group | [] - GroupMembersFinder::DEFAULT_RELATIONS | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group] - [:direct] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group] - [:inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group] - [:descendants] | :sub_sub_group | [] - [:shared_from_groups] | :sub_sub_group | [:user1_public_shared_group, :user2_public_shared_group, :user3_public_shared_group, :user4_public_shared_group] - [:direct, :inherited, :descendants, :shared_from_groups] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_public_shared_group] + expect(result.to_a).to match_array(expected_members.map { |name| members[name] }) + end + end end - with_them do - it 'returns correct members' do - result = described_class.new(groups[subject_group]).execute(include_relations: subject_relations) + it_behaves_like 'member relations' + + it 'returns the correct access level of the members shared through group sharing' do + shared_members_access = described_class + .new(groups[:group]) + .execute(include_relations: [:shared_from_groups]) + .to_a + .map(&:access_level) + + correct_access_levels = ([Gitlab::Access::DEVELOPER] * 3) << Gitlab::Access::REPORTER + expect(shared_members_access).to match_array(correct_access_levels) + end - expect(result.to_a).to match_array(expected_members.map { |name| members[name] }) + context 'when members_with_shared_group_access feature flag is disabled' do + before do + stub_feature_flags(members_with_shared_group_access: false) end + + it_behaves_like 'member relations' end end diff --git a/spec/finders/groups/accepting_group_transfers_finder_spec.rb b/spec/finders/groups/accepting_group_transfers_finder_spec.rb index 06e6fa05892..18407dd0196 100644 --- a/spec/finders/groups/accepting_group_transfers_finder_spec.rb +++ b/spec/finders/groups/accepting_group_transfers_finder_spec.rb @@ -39,14 +39,16 @@ RSpec.describe Groups::AcceptingGroupTransfersFinder do describe '#execute' do before_all do - create(:group_group_link, :owner, - shared_with_group: group_where_user_has_owner_access, - shared_group: shared_with_group_where_direct_owner_as_owner + create( + :group_group_link, :owner, + shared_with_group: group_where_user_has_owner_access, + shared_group: shared_with_group_where_direct_owner_as_owner ) - create(:group_group_link, :guest, - shared_with_group: group_where_user_has_owner_access, - shared_group: shared_with_group_where_direct_owner_as_guest + create( + :group_group_link, :guest, + shared_with_group: group_where_user_has_owner_access, + shared_group: shared_with_group_where_direct_owner_as_guest ) end diff --git a/spec/finders/groups/accepting_project_creations_finder_spec.rb b/spec/finders/groups/accepting_project_creations_finder_spec.rb new file mode 100644 index 00000000000..b1b9403748d --- /dev/null +++ b/spec/finders/groups/accepting_project_creations_finder_spec.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Groups::AcceptingProjectCreationsFinder, feature_category: :subgroups do + let_it_be(:user) { create(:user) } + let_it_be(:group_where_direct_owner) { create(:group) } + let_it_be(:subgroup_of_group_where_direct_owner) { create(:group, parent: group_where_direct_owner) } + let_it_be(:group_where_direct_maintainer) { create(:group) } + let_it_be(:group_where_direct_maintainer_but_cant_create_projects) do + create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) + end + + let_it_be(:group_where_direct_developer_but_developers_cannot_create_projects) { create(:group) } + let_it_be(:group_where_direct_developer) do + create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) + end + + let_it_be(:shared_with_group_where_direct_owner_as_owner) { create(:group) } + + let_it_be(:shared_with_group_where_direct_owner_as_developer) do + create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) + end + + let_it_be(:shared_with_group_where_direct_owner_as_developer_but_developers_cannot_create_projects) do + create(:group) + end + + let_it_be(:shared_with_group_where_direct_developer_as_maintainer) do + create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) + end + + let_it_be(:shared_with_group_where_direct_owner_as_guest) { create(:group) } + let_it_be(:shared_with_group_where_direct_owner_as_maintainer) { create(:group) } + let_it_be(:shared_with_group_where_direct_developer_as_owner) do + create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) + end + + let_it_be(:subgroup_of_shared_with_group_where_direct_owner_as_maintainer) do + create(:group, parent: shared_with_group_where_direct_owner_as_maintainer) + end + + before do + group_where_direct_owner.add_owner(user) + group_where_direct_maintainer.add_maintainer(user) + group_where_direct_developer_but_developers_cannot_create_projects.add_developer(user) + group_where_direct_developer.add_developer(user) + + create(:group_group_link, :owner, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_owner + ) + + create(:group_group_link, :developer, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_developer_but_developers_cannot_create_projects + ) + + create(:group_group_link, :maintainer, + shared_with_group: group_where_direct_developer, + shared_group: shared_with_group_where_direct_developer_as_maintainer + ) + + create(:group_group_link, :developer, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_developer + ) + + create(:group_group_link, :guest, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_guest + ) + + create(:group_group_link, :maintainer, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_maintainer + ) + + create(:group_group_link, :owner, + shared_with_group: group_where_direct_developer_but_developers_cannot_create_projects, + shared_group: shared_with_group_where_direct_developer_as_owner + ) + end + + describe '#execute' do + subject(:result) { described_class.new(user).execute } + + it 'only returns groups where the user has access to create projects' do + expect(result).to match_array([ + group_where_direct_owner, + subgroup_of_group_where_direct_owner, + group_where_direct_maintainer, + group_where_direct_developer, + # groups arising from group shares + shared_with_group_where_direct_owner_as_owner, + shared_with_group_where_direct_owner_as_maintainer, + subgroup_of_shared_with_group_where_direct_owner_as_maintainer, + shared_with_group_where_direct_developer_as_owner, + shared_with_group_where_direct_developer_as_maintainer, + shared_with_group_where_direct_owner_as_developer + ]) + end + + context 'when `include_groups_from_group_shares_in_project_creation_locations` flag is disabled' do + before do + stub_feature_flags(include_groups_from_group_shares_in_project_creation_locations: false) + end + + it 'returns only groups accessible via direct membership where user has access to create projects' do + expect(result).to match_array([ + group_where_direct_owner, + subgroup_of_group_where_direct_owner, + group_where_direct_maintainer, + group_where_direct_developer + ]) + end + end + end +end diff --git a/spec/finders/groups/accepting_project_shares_finder_spec.rb b/spec/finders/groups/accepting_project_shares_finder_spec.rb new file mode 100644 index 00000000000..6af3fad2110 --- /dev/null +++ b/spec/finders/groups/accepting_project_shares_finder_spec.rb @@ -0,0 +1,122 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Groups::AcceptingProjectSharesFinder, feature_category: :subgroups do + subject(:result) { described_class.new(current_user, project, params).execute } + + let_it_be_with_reload(:current_user) { create(:user) } + let_it_be(:group_1) { create(:group) } + let_it_be(:group_1_subgroup) { create(:group, parent: group_1) } + let_it_be(:group_2) { create(:group, name: 'hello-world-group') } + let_it_be(:group_3) { create(:group) } + let_it_be_with_reload(:group) { create(:group) } + let_it_be_with_reload(:project) { create(:project, group: group) } + + let(:params) { {} } + + context 'when admin', :enable_admin_mode do + let_it_be(:current_user) { create(:admin) } + + it 'returns all groups' do + expect(result).to match_array([group_1, group_1_subgroup, group_2, group_3]) + end + end + + context 'when normal user' do + context 'when the user has no access to the project to be shared' do + it 'does not return any group' do + expect(result).to be_empty + end + end + + context 'when the user has no access to any group' do + before do + project.add_maintainer(current_user) + end + + it 'does not return any group' do + expect(result).to be_empty + end + end + + context "when the project's group has enabled lock on group sharing" do + before do + project.add_maintainer(current_user) + project.namespace.update!(share_with_group_lock: true) + group_1.add_maintainer(current_user) + end + + it 'does not return any group' do + expect(result).to be_empty + end + end + + context 'when the user has access to groups' do + before do + project.add_maintainer(current_user) + + group_1.add_guest(current_user) + group_2.add_guest(current_user) + end + + it 'returns groups where the user has at least guest access' do + expect(result).to match_array([group_1, group_1_subgroup, group_2]) + end + + context 'when searching' do + let(:params) { { search: 'hello' } } + + it 'returns groups where the search term matches' do + expect(result).to match_array([group_2]) + end + end + end + + context 'for sharing outside hierarchy' do + let_it_be_with_reload(:grandparent_group) { create(:group) } + let_it_be(:child_group) { create(:group, parent: grandparent_group) } + let_it_be(:grandchild_group) { create(:group, parent: child_group) } + let_it_be(:grandchild_group_subgroup) { create(:group, parent: grandchild_group) } + let_it_be(:unrelated_group) { create(:group) } + let_it_be_with_reload(:project) { create(:project, group: child_group) } + + before do + project.add_maintainer(current_user) + + grandparent_group.add_guest(current_user) + unrelated_group.add_guest(current_user) + end + + context 'when sharing outside hierarchy is allowed' do + before do + grandparent_group.namespace_settings.update!(prevent_sharing_groups_outside_hierarchy: false) + end + + it 'returns all groups where the user has at least guest access' do + expect(result).to match_array([grandchild_group, grandchild_group_subgroup, unrelated_group]) + end + end + + context 'when sharing outside hierarchy is not allowed' do + before do + grandparent_group.namespace_settings.update!(prevent_sharing_groups_outside_hierarchy: true) + end + + it 'returns groups where the user has at least guest access, but only from within the hierarchy' do + expect(result).to match_array([grandchild_group, grandchild_group_subgroup]) + end + + context 'when groups are already linked to the project' do + before do + create(:project_group_link, project: project, group: grandchild_group_subgroup) + end + + it 'does not appear in the result' do + expect(result).to match_array([grandchild_group]) + end + end + end + end + end +end diff --git a/spec/finders/groups/accepting_project_transfers_finder_spec.rb b/spec/finders/groups/accepting_project_transfers_finder_spec.rb index e73318c763f..bb6731abbba 100644 --- a/spec/finders/groups/accepting_project_transfers_finder_spec.rb +++ b/spec/finders/groups/accepting_project_transfers_finder_spec.rb @@ -25,24 +25,28 @@ RSpec.describe Groups::AcceptingProjectTransfersFinder do group_where_direct_maintainer.add_maintainer(user) group_where_direct_developer.add_developer(user) - create(:group_group_link, :owner, - shared_with_group: group_where_direct_owner, - shared_group: shared_with_group_where_direct_owner_as_owner + create( + :group_group_link, :owner, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_owner ) - create(:group_group_link, :guest, - shared_with_group: group_where_direct_owner, - shared_group: shared_with_group_where_direct_owner_as_guest + create( + :group_group_link, :guest, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_guest ) - create(:group_group_link, :maintainer, - shared_with_group: group_where_direct_owner, - shared_group: shared_with_group_where_direct_owner_as_maintainer + create( + :group_group_link, :maintainer, + shared_with_group: group_where_direct_owner, + shared_group: shared_with_group_where_direct_owner_as_maintainer ) - create(:group_group_link, :owner, - shared_with_group: group_where_direct_developer, - shared_group: shared_with_group_where_direct_developer_as_owner + create( + :group_group_link, :owner, + shared_with_group: group_where_direct_developer, + shared_group: shared_with_group_where_direct_developer_as_owner ) end @@ -51,13 +55,13 @@ RSpec.describe Groups::AcceptingProjectTransfersFinder do it 'only returns groups where the user has access to transfer projects to' do expect(result).to match_array([ - group_where_direct_owner, - subgroup_of_group_where_direct_owner, - group_where_direct_maintainer, - shared_with_group_where_direct_owner_as_owner, - shared_with_group_where_direct_owner_as_maintainer, - subgroup_of_shared_with_group_where_direct_owner_as_maintainer - ]) + group_where_direct_owner, + subgroup_of_group_where_direct_owner, + group_where_direct_maintainer, + shared_with_group_where_direct_owner_as_owner, + shared_with_group_where_direct_owner_as_maintainer, + subgroup_of_shared_with_group_where_direct_owner_as_maintainer + ]) end end end diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb index c48a0271471..dd995e99b9f 100644 --- a/spec/finders/members_finder_spec.rb +++ b/spec/finders/members_finder_spec.rb @@ -207,12 +207,4 @@ RSpec.describe MembersFinder, feature_category: :subgroups do end it_behaves_like '#execute' - - context 'when project_members_index_by_project_namespace feature flag is disabled' do - before do - stub_feature_flags(project_members_index_by_project_namespace: false) - end - - it_behaves_like '#execute' - end end diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb index 306acb9391d..aa167fe7aba 100644 --- a/spec/finders/merge_requests_finder_spec.rb +++ b/spec/finders/merge_requests_finder_spec.rb @@ -582,24 +582,28 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do let_it_be(:new_project) { create(:project, forked_from_project: project1) } let!(:new_merge_request) do - create(:merge_request, - :simple, - author: user, - created_at: 1.week.from_now, - updated_at: 1.week.from_now, - source_project: new_project, - target_project: new_project) + create( + :merge_request, + :simple, + author: user, + created_at: 1.week.from_now, + updated_at: 1.week.from_now, + source_project: new_project, + target_project: new_project + ) end let!(:old_merge_request) do - create(:merge_request, - :simple, - author: user, - source_branch: 'feature_1', - created_at: 1.week.ago, - updated_at: 1.week.ago, - source_project: new_project, - target_project: new_project) + create( + :merge_request, + :simple, + author: user, + source_branch: 'feature_1', + created_at: 1.week.ago, + updated_at: 1.week.ago, + source_project: new_project, + target_project: new_project + ) end before_all do diff --git a/spec/finders/notes_finder_spec.rb b/spec/finders/notes_finder_spec.rb index 1255a882114..e93c0c790c2 100644 --- a/spec/finders/notes_finder_spec.rb +++ b/spec/finders/notes_finder_spec.rb @@ -74,11 +74,13 @@ RSpec.describe NotesFinder do context 'on restricted projects' do let(:project) do - create(:project, - :public, - :issues_private, - :snippets_private, - :merge_requests_private) + create( + :project, + :public, + :issues_private, + :snippets_private, + :merge_requests_private + ) end it 'publicly excludes notes on merge requests' do @@ -126,6 +128,51 @@ RSpec.describe NotesFinder do end end + context 'for notes from users who have been banned', :enable_admin_mode, feature_category: :instance_resiliency do + subject(:finder) { described_class.new(user, project: project).execute } + + let_it_be(:banned_user) { create(:banned_user).user } + let!(:banned_note) { create(:note_on_issue, project: project, author: banned_user) } + + context 'when :hidden_notes feature is not enabled' do + before do + stub_feature_flags(hidden_notes: false) + end + + context 'when user is not an admin' do + it { is_expected.to include(banned_note) } + end + + context 'when @current_user is nil' do + let(:user) { nil } + + it { is_expected.to be_empty } + end + end + + context 'when :hidden_notes feature is enabled' do + before do + stub_feature_flags(hidden_notes: true) + end + + context 'when user is an admin' do + let(:user) { create(:admin) } + + it { is_expected.to include(banned_note) } + end + + context 'when user is not an admin' do + it { is_expected.not_to include(banned_note) } + end + + context 'when @current_user is nil' do + let(:user) { nil } + + it { is_expected.to be_empty } + end + end + end + context 'for target type' do let(:project) { create(:project, :repository) } let!(:note1) { create :note_on_issue, project: project } diff --git a/spec/finders/packages/npm/package_finder_spec.rb b/spec/finders/packages/npm/package_finder_spec.rb index 8c9149a5a2d..e11b33f71e9 100644 --- a/spec/finders/packages/npm/package_finder_spec.rb +++ b/spec/finders/packages/npm/package_finder_spec.rb @@ -71,6 +71,14 @@ RSpec.describe ::Packages::Npm::PackageFinder do context 'enabled' do it { is_expected.to contain_exactly(package2) } end + + context 'with npm_allow_packages_in_multiple_projects disabled' do + before do + stub_feature_flags(npm_allow_packages_in_multiple_projects: false) + end + + it { is_expected.to contain_exactly(package2) } + end end context 'with a project' do diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb index 297c6f84cef..13263698cfe 100644 --- a/spec/finders/projects_finder_spec.rb +++ b/spec/finders/projects_finder_spec.rb @@ -14,7 +14,7 @@ RSpec.describe ProjectsFinder do end let_it_be(:internal_project) do - create(:project, :internal, :merge_requests_disabled, group: group, name: 'B', path: 'B') + create(:project, :internal, :merge_requests_disabled, group: group, name: 'B', path: 'B', updated_at: 4.days.ago) end let_it_be(:public_project) do @@ -133,6 +133,52 @@ RSpec.describe ProjectsFinder do end end + describe 'filter by updated_at' do + context 'when updated_before is present' do + let(:params) { { updated_before: 2.days.ago } } + + it { is_expected.to contain_exactly(internal_project) } + end + + context 'when updated_after is present' do + let(:params) { { updated_after: 2.days.ago } } + + it { is_expected.not_to include(internal_project) } + end + + context 'when both updated_before and updated_after are present' do + let(:params) { { updated_before: 2.days.ago, updated_after: 6.days.ago } } + + it { is_expected.to contain_exactly(internal_project) } + + context 'when updated_after > updated_before' do + let(:params) { { updated_after: 2.days.ago, updated_before: 6.days.ago } } + + it { is_expected.to be_empty } + + it 'does not query the DB' do + expect { subject.to_a }.to make_queries(0) + end + end + + context 'when updated_after equals updated_before' do + let(:params) { { updated_after: internal_project.updated_at, updated_before: internal_project.updated_at } } + + it 'allows an exact match' do + expect(subject).to contain_exactly(internal_project) + end + end + + context 'when arguments are invalid datetimes' do + let(:params) { { updated_after: 'invalid', updated_before: 'inavlid' } } + + it 'does not filter by updated_at' do + expect(subject).to contain_exactly(internal_project, public_project) + end + end + end + end + describe 'filter by tags (deprecated)' do before do public_project.reload diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb index 48880ec2c1f..9f4b7612be5 100644 --- a/spec/finders/snippets_finder_spec.rb +++ b/spec/finders/snippets_finder_spec.rb @@ -237,25 +237,28 @@ RSpec.describe SnippetsFinder do it 'returns all personal snippets for the admin' do snippets = described_class.new(admin, only_personal: true).execute - expect(snippets).to contain_exactly(admin_private_personal_snippet, - private_personal_snippet, - internal_personal_snippet, - public_personal_snippet) + expect(snippets).to contain_exactly( + admin_private_personal_snippet, + private_personal_snippet, + internal_personal_snippet, + public_personal_snippet + ) end it 'returns only personal snippets visible by user' do snippets = described_class.new(user, only_personal: true).execute - expect(snippets).to contain_exactly(private_personal_snippet, - internal_personal_snippet, - public_personal_snippet) + expect(snippets).to contain_exactly( + private_personal_snippet, + internal_personal_snippet, + public_personal_snippet + ) end it 'returns only internal or public personal snippets for user without snippets' do snippets = described_class.new(user_without_snippets, only_personal: true).execute - expect(snippets).to contain_exactly(internal_personal_snippet, - public_personal_snippet) + expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet) end end end diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb index 5cf845a87b2..2e94ca5757a 100644 --- a/spec/finders/users_finder_spec.rb +++ b/spec/finders/users_finder_spec.rb @@ -61,9 +61,11 @@ RSpec.describe UsersFinder do filtered_user_before = create(:user, created_at: 3.days.ago) filtered_user_after = create(:user, created_at: Time.now + 3.days) - users = described_class.new(user, - created_after: 2.days.ago, - created_before: Time.now + 2.days).execute + users = described_class.new( + user, + created_after: 2.days.ago, + created_before: Time.now + 2.days + ).execute expect(users.map(&:username)).not_to include([filtered_user_before.username, filtered_user_after.username]) end diff --git a/spec/fixtures/api/schemas/entities/diff_viewer.json b/spec/fixtures/api/schemas/entities/diff_viewer.json index ae0fb32d3ac..b16f8d8b1a2 100644 --- a/spec/fixtures/api/schemas/entities/diff_viewer.json +++ b/spec/fixtures/api/schemas/entities/diff_viewer.json @@ -25,6 +25,12 @@ "type": [ "boolean" ] + }, + "whitespace_only": { + "type": [ + "boolean", + "null" + ] } }, "additionalProperties": false diff --git a/spec/fixtures/api/schemas/internal/pages/lookup_path.json b/spec/fixtures/api/schemas/internal/pages/lookup_path.json index 8ca71870911..fba3efc4ded 100644 --- a/spec/fixtures/api/schemas/internal/pages/lookup_path.json +++ b/spec/fixtures/api/schemas/internal/pages/lookup_path.json @@ -8,23 +8,62 @@ "prefix" ], "properties": { - "project_id": { "type": "integer" }, - "https_only": { "type": "boolean" }, - "access_control": { "type": "boolean" }, - "source": { "type": "object", - "required": ["type", "path"], - "properties" : { - "type": { "type": "string", "enum": ["file", "zip"] }, - "path": { "type": "string" }, - "global_id": { "type": "string" }, - "sha256": { "type": "string" }, - "file_size": { "type": "integer" }, - "file_count": { "type": ["integer", "null"] } + "project_id": { + "type": "integer" + }, + "https_only": { + "type": "boolean" + }, + "access_control": { + "type": "boolean" + }, + "source": { + "type": "object", + "required": [ + "type", + "path" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "file", + "zip" + ] + }, + "path": { + "type": "string" + }, + "global_id": { + "type": "string" + }, + "sha256": { + "type": "string" + }, + "file_size": { + "type": "integer" + }, + "file_count": { + "type": [ + "integer", + "null" + ] + } }, "additionalProperties": false }, - "prefix": { "type": "string" }, - "unique_domain": { "type": ["string", "null"] } + "prefix": { + "type": "string" + }, + "unique_host": { + "type": [ + "string", + "null" + ] + }, + "root_directory": { + "type": "string" + } }, "additionalProperties": false } diff --git a/spec/fixtures/emails/valid_reply_with_references_in_comma.eml b/spec/fixtures/emails/valid_reply_with_references_in_comma.eml new file mode 100644 index 00000000000..4a2d213f4cc --- /dev/null +++ b/spec/fixtures/emails/valid_reply_with_references_in_comma.eml @@ -0,0 +1,42 @@ +Return-Path: +Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400 +Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for ; Thu, 13 Jun 2013 17:03:50 -0400 +Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for ; Thu, 13 Jun 2013 14:03:48 -0700 +Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700 +Date: Thu, 13 Jun 2013 17:03:48 -0400 +From: Jake the Dog +To: reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo +Message-ID: +In-Reply-To: +References: ",," +Subject: re: [Discourse Meta] eviltrout posted in 'Adventure Time Sux' +Mime-Version: 1.0 +Content-Type: text/plain; + charset=ISO-8859-1 +Content-Transfer-Encoding: 7bit +X-Sieve: CMU Sieve 2.2 +X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, + 13 Jun 2013 14:03:48 -0700 (PDT) +X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 + +I could not disagree more. I am obviously biased but adventure time is the +greatest show ever created. Everyone should watch it. + +- Jake out + + +On Sun, Jun 9, 2013 at 1:39 PM, eviltrout via Discourse Meta + wrote: +> +> +> +> eviltrout posted in 'Adventure Time Sux' on Discourse Meta: +> +> --- +> hey guys everyone knows adventure time sucks! +> +> --- +> Please visit this link to respond: http://localhost:3000/t/adventure-time-sux/1234/3 +> +> To unsubscribe from these emails, visit your [user preferences](http://localhost:3000/user_preferences). +> diff --git a/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz b/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz index d6632c5121a..1ecfa5a80f9 100644 Binary files a/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz and b/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz differ diff --git a/spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz b/spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz index e5f6f195fe5..71a0ade3eba 100644 Binary files a/spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz and b/spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz differ diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml index 520328f1041..42f9cc31c3a 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml @@ -5,7 +5,6 @@ description: product_section: product_stage: product_group: -product_category: value_type: number status: active milestone: "13.9" diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml index 1942f33e043..e123056d771 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml @@ -5,7 +5,6 @@ description: product_section: product_stage: product_group: -product_category: value_type: number status: active milestone: "13.9" diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml index a72ba5109cc..87c4e68f19e 100644 --- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml +++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml @@ -6,7 +6,6 @@ description: product_section: product_stage: product_group: -product_category: value_type: number status: active milestone: "13.9" diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json index a0ac70d7d9c..cdf9395fbe6 100644 --- a/spec/fixtures/lib/gitlab/import_export/complex/project.json +++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json @@ -8309,5 +8309,38 @@ "reject_unsigned_commits": true, "commit_committer_check": true, "regexp_uses_re2": true - } + }, + "approval_rules": [ + { + "approvals_required": 1, + "name": "MustContain", + "rule_type": "regular", + "scanners": [ + + ], + "vulnerabilities_allowed": 0, + "severity_levels": [ + "unknown", + "high", + "critical" + ], + "report_type": null, + "vulnerability_states": [ + "newly_detected" + ], + "orchestration_policy_idx": null, + "applies_to_all_protected_branches": false, + "approval_project_rules_protected_branches": [ + { + "protected_branch_id": 1, + "branch_name": "master" + } + ], + "approval_project_rules_users": [ + { + "user_id": 35 + } + ] + } + ] } diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/protected_environments.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/protected_environments.ndjson index 55afaa8bcf6..f87fdd860c7 100644 --- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/protected_environments.ndjson +++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/protected_environments.ndjson @@ -1 +1 @@ -{ "id": 1, "project_id": 9, "created_at": "2017-10-19T15:36:23.466Z", "updated_at": "2017-10-19T15:36:23.466Z", "name": "production", "deploy_access_levels": [ { "id": 1, "protected_environment_id": 1, "created_at": "2017-10-19T15:36:23.466Z", "updated_at": "2017-10-19T15:36:23.466Z", "access_level": 40, "user_id": 1, "group_id": null } ] } +{ "id": 1, "project_id": 9, "created_at": "2017-10-19T15:36:23.466Z", "updated_at": "2017-10-19T15:36:23.466Z", "name": "production", "deploy_access_levels": [ { "id": 1, "protected_environment_id": 1, "created_at": "2017-10-19T15:36:23.466Z", "updated_at": "2017-10-19T15:36:23.466Z", "access_level": null, "user_id": 1, "group_id": null } ] } diff --git a/spec/fixtures/lib/gitlab/import_export/designs/tree/project.json b/spec/fixtures/lib/gitlab/import_export/designs/tree/project.json new file mode 100644 index 00000000000..3adcb693aeb --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/designs/tree/project.json @@ -0,0 +1,15 @@ +{ + "description": "Nisi et repellendus ut enim quo accusamus vel magnam.", + "import_type": "gitlab_project", + "creator_id": 123, + "visibility_level": 10, + "archived": false, + "deploy_keys": [ + + ], + "hooks": [ + + ], + "shared_runners_enabled": true, + "ci_config_path": "config/path" +} diff --git a/spec/fixtures/lib/gitlab/import_export/designs/tree/project/issues.ndjson b/spec/fixtures/lib/gitlab/import_export/designs/tree/project/issues.ndjson new file mode 100644 index 00000000000..3f767505bfb --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/designs/tree/project/issues.ndjson @@ -0,0 +1,2 @@ +{"id":469,"title":"issue 1","author_id":1,"project_id":30,"created_at":"2019-08-07T03:57:55.007Z","updated_at":"2019-08-07T03:57:55.007Z","description":"","state":"opened","iid":1,"updated_by_id":null,"weight":null,"confidential":false,"due_date":null,"moved_to_id":null,"lock_version":0,"time_estimate":0,"relative_position":1073742323,"external_author":null,"last_edited_at":null,"last_edited_by_id":null,"discussion_locked":null,"closed_at":null,"closed_by_id":null,"state_id":1,"events":[{"id":1775,"project_id":30,"author_id":1,"target_id":469,"created_at":"2019-08-07T03:57:55.158Z","updated_at":"2019-08-07T03:57:55.158Z","target_type":"Issue","action":1}],"timelogs":[],"notes":[],"label_links":[],"resource_label_events":[],"issue_assignees":[],"designs":[{"id":38,"iid":1,"project_id":30,"issue_id":469,"filename":"chirrido3.jpg","notes":[]},{"id":39,"iid":2,"project_id":30,"issue_id":469,"filename":"jonathan_richman.jpg","notes":[]},{"id":40,"iid":3,"project_id":30,"issue_id":469,"filename":"mariavontrap.jpeg","notes":[]}],"design_versions":[{"id":24,"sha":"9358d1bac8ff300d3d2597adaa2572a20f7f8703","issue_id":469,"author_id":1,"actions":[{"design_id":38,"version_id":24,"event":0,"design":{"id":38,"iid":1,"project_id":30,"issue_id":469,"filename":"chirrido3.jpg"}}]},{"id":25,"sha":"e1a4a501bcb42f291f84e5d04c8f927821542fb6","issue_id":469,"author_id":2,"actions":[{"design_id":38,"version_id":25,"event":1,"design":{"id":38,"iid":1,"project_id":30,"issue_id":469,"filename":"chirrido3.jpg"}},{"design_id":39,"version_id":25,"event":0,"design":{"id":39,"iid":2,"project_id":30,"issue_id":469,"filename":"jonathan_richman.jpg"}}]},{"id":26,"sha":"27702d08f5ee021ae938737f84e8fe7c38599e85","issue_id":469,"author_id":1,"actions":[{"design_id":38,"version_id":26,"event":1,"design":{"id":38,"iid":1,"project_id":30,"issue_id":469,"filename":"chirrido3.jpg"}},{"design_id":39,"version_id":26,"event":2,"design":{"id":39,"iid":2,"project_id":30,"issue_id":469,"filename":"jonathan_richman.jpg"}},{"design_id":40,"version_id":26,"event":0,"design":{"id":40,"iid":3,"project_id":30,"issue_id":469,"filename":"mariavontrap.jpeg"}}]}]} +{"id":470,"title":"issue 2","author_id":1,"project_id":30,"created_at":"2019-08-07T04:15:57.607Z","updated_at":"2019-08-07T04:15:57.607Z","description":"","state":"opened","iid":2,"updated_by_id":null,"weight":null,"confidential":false,"due_date":null,"moved_to_id":null,"lock_version":0,"time_estimate":0,"relative_position":1073742823,"external_author":null,"last_edited_at":null,"last_edited_by_id":null,"discussion_locked":null,"closed_at":null,"closed_by_id":null,"state_id":1,"events":[{"id":1776,"project_id":30,"author_id":1,"target_id":470,"created_at":"2019-08-07T04:15:57.789Z","updated_at":"2019-08-07T04:15:57.789Z","target_type":"Issue","action":1}],"timelogs":[],"notes":[],"label_links":[],"resource_label_events":[],"issue_assignees":[],"designs":[{"id":42,"project_id":30,"issue_id":470,"filename":"1 (1).jpeg","notes":[]},{"id":43,"project_id":30,"issue_id":470,"filename":"2099743.jpg","notes":[]},{"id":44,"project_id":30,"issue_id":470,"filename":"a screenshot (1).jpg","notes":[]},{"id":41,"project_id":30,"issue_id":470,"filename":"chirrido3.jpg","notes":[]}],"design_versions":[{"id":27,"sha":"8587e78ab6bda3bc820a9f014c3be4a21ad4fcc8","issue_id":470,"author_id":1,"actions":[{"design_id":41,"version_id":27,"event":0,"design":{"id":41,"project_id":30,"issue_id":470,"filename":"chirrido3.jpg"}}]},{"id":28,"sha":"73f871b4c8c1d65c62c460635e023179fb53abc4","issue_id":470,"author_id":2,"actions":[{"design_id":42,"version_id":28,"event":0,"design":{"id":42,"project_id":30,"issue_id":470,"filename":"1 (1).jpeg"}},{"design_id":43,"version_id":28,"event":0,"design":{"id":43,"project_id":30,"issue_id":470,"filename":"2099743.jpg"}}]},{"id":29,"sha":"c9b5f067f3e892122a4b12b0a25a8089192f3ac8","issue_id":470,"author_id":2,"actions":[{"design_id":42,"version_id":29,"event":1,"design":{"id":42,"project_id":30,"issue_id":470,"filename":"1 (1).jpeg"}},{"design_id":44,"version_id":29,"event":0,"design":{"id":44,"project_id":30,"issue_id":470,"filename":"a screenshot (1).jpg"}}]}]} \ No newline at end of file diff --git a/spec/fixtures/lib/gitlab/import_export/designs/tree/project/project_members.ndjson b/spec/fixtures/lib/gitlab/import_export/designs/tree/project/project_members.ndjson new file mode 100644 index 00000000000..570fd4a0c05 --- /dev/null +++ b/spec/fixtures/lib/gitlab/import_export/designs/tree/project/project_members.ndjson @@ -0,0 +1,2 @@ +{"id":95,"access_level":40,"source_id":30,"source_type":"Project","user_id":1,"notification_level":3,"created_at":"2019-08-07T03:57:32.825Z","updated_at":"2019-08-07T03:57:32.825Z","created_by_id":1,"invite_email":null,"invite_token":null,"invite_accepted_at":null,"requested_at":null,"expires_at":null,"ldap":false,"override":false,"user":{"id":1,"public_email":"admin@example.com","username":"root"}} +{"id":96,"access_level":40,"source_id":30,"source_type":"Project","user_id":2,"notification_level":3,"created_at":"2019-08-07T03:57:32.825Z","updated_at":"2019-08-07T03:57:32.825Z","created_by_id":null,"invite_email":null,"invite_token":null,"invite_accepted_at":null,"requested_at":null,"expires_at":null,"ldap":false,"override":false,"user":{"id":2,"public_email":"user_2@gitlabexample.com","username":"user_2"}} \ No newline at end of file diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb index 979e96e6e8e..26e5f110687 100644 --- a/spec/fixtures/markdown.md.erb +++ b/spec/fixtures/markdown.md.erb @@ -299,6 +299,32 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e v^2 + w^2 = x^2 ``` +Parsed correctly when between code blocks + +```ruby +x = 1 +``` + +$$ +a^2+b^2=c^2 +$$ + +``` +plaintext +``` + +Parsed correctly with a mixture of HTML comments and HTML blocks + + + +$$ +a^2+b^2=c^2 +$$ + +

+html +

+ ### Gollum Tags - [[linked-resource]] diff --git a/spec/fixtures/pages_with_custom_root.zip b/spec/fixtures/pages_with_custom_root.zip new file mode 100644 index 00000000000..40dea253245 Binary files /dev/null and b/spec/fixtures/pages_with_custom_root.zip differ diff --git a/spec/fixtures/pages_with_custom_root.zip.meta b/spec/fixtures/pages_with_custom_root.zip.meta new file mode 100644 index 00000000000..2cb04e0c33b Binary files /dev/null and b/spec/fixtures/pages_with_custom_root.zip.meta differ diff --git a/spec/fixtures/pages_with_custom_root.zip.meta0 b/spec/fixtures/pages_with_custom_root.zip.meta0 new file mode 100644 index 00000000000..9b348055b5f Binary files /dev/null and b/spec/fixtures/pages_with_custom_root.zip.meta0 differ diff --git a/spec/fixtures/scripts/test_report.json b/spec/fixtures/scripts/test_report.json index 29fd9a4bcb5..520ab3a8578 100644 --- a/spec/fixtures/scripts/test_report.json +++ b/spec/fixtures/scripts/test_report.json @@ -1,7 +1,7 @@ { "suites": [ { - "name": "rspec unit pg12", + "name": "rspec unit pg13", "total_time": 975.6635620000018, "total_count": 3811, "success_count": 3800, diff --git a/spec/fixtures/security_reports/feature-branch/gl-sast-report.json b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json index 083042e19ff..f153192fed7 100644 --- a/spec/fixtures/security_reports/feature-branch/gl-sast-report.json +++ b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json @@ -1,7 +1,8 @@ { - "version": "14.0.0", + "version": "15.0.0", "vulnerabilities": [ { + "id": "1", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -29,6 +30,7 @@ ] }, { + "id": "2", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -56,6 +58,7 @@ ] }, { + "id": "3", "category": "sast", "name": "ECB mode is insecure", "message": "ECB mode is insecure", @@ -90,6 +93,7 @@ ] }, { + "id": "4", "category": "sast", "name": "Hard coded key", "message": "Hard coded key", @@ -124,6 +128,7 @@ ] }, { + "id": "5", "category": "sast", "name": "ECB mode is insecure", "message": "ECB mode is insecure", @@ -158,8 +163,19 @@ ] } ], - "remediations": [], + "remediations": [ + + ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "find_sec_bugs", "name": "Find Security Bugs", @@ -174,4 +190,4 @@ "start_time": "2022-08-10T22:37:00", "end_time": "2022-08-10T22:38:00" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json index 4862a504cec..c75b9bfb9de 100644 --- a/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json +++ b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json @@ -1,5 +1,33 @@ { - "version": "14.1.2", - "vulnerabilities": [], - "remediations": [] -} \ No newline at end of file + "version": "15.0.0", + "vulnerabilities": [ + + ], + "remediations": [ + + ], + "scan": { + "analyzer": { + "id": "secret_detection_analyzer", + "name": "Secret Detection Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, + "scanner": { + "id": "secret_detection", + "name": "Secret Detection", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "0.1.0" + }, + "type": "sast", + "start_time": "2022-03-11T18:48:16", + "end_time": "2022-03-11T18:48:22", + "status": "success" + } +} diff --git a/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json index fcfd9b831f4..16d02490156 100644 --- a/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json +++ b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json @@ -1,7 +1,23 @@ { - "version": "14.1.2", + "version": "15.0.0", + "scan": { + "analyzer": { + "id": "sast_analyzer", + "name": "SAST Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, + "type": "sast", + "start_time": "2022-03-11T18:48:16", + "end_time": "2022-03-11T18:48:22", + "status": "success" + }, "vulnerabilities": [ { + "id": "1", "category": "sast", "message": "Probable insecure usage of temp file/directory.", "cve": "python/hardcoded/hardcoded-tmp.py:52865813c884a507be1f152d654245af34aba8a391626d01f1ab6d3f52ec8779:B108", @@ -26,6 +42,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html" }, { + "id": "2", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -53,6 +70,7 @@ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM" }, { + "id": "3", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -80,6 +98,7 @@ "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM" }, { + "id": "4", "category": "sast", "message": "Use of insecure MD2, MD4, or MD5 hash function.", "cve": "python/imports/imports-aliases.py:cb203b465dffb0cb3a8e8bd8910b84b93b0a5995a938e4b903dbb0cd6ffa1254:B303", @@ -102,6 +121,7 @@ "line": 11 }, { + "id": "5", "category": "sast", "message": "Use of insecure MD2, MD4, or MD5 hash function.", "cve": "python/imports/imports-aliases.py:a7173c43ae66bd07466632d819d450e0071e02dbf782763640d1092981f9631b:B303", @@ -124,6 +144,7 @@ "line": 12 }, { + "id": "6", "category": "sast", "message": "Use of insecure MD2, MD4, or MD5 hash function.", "cve": "python/imports/imports-aliases.py:017017b77deb0b8369b6065947833eeea752a92ec8a700db590fece3e934cf0d:B303", @@ -146,6 +167,7 @@ "line": 13 }, { + "id": "6", "category": "sast", "message": "Use of insecure MD2, MD4, or MD5 hash function.", "cve": "python/imports/imports-aliases.py:45fc8c53aea7b84f06bc4e590cc667678d6073c4c8a1d471177ca2146fb22db2:B303", @@ -168,6 +190,7 @@ "line": 14 }, { + "id": "7", "category": "sast", "message": "Pickle library appears to be in use, possible security issue.", "cve": "python/imports/imports-aliases.py:5f200d47291e7bbd8352db23019b85453ca048dd98ea0c291260fa7d009963a4:B301", @@ -190,6 +213,7 @@ "line": 15 }, { + "id": "8", "category": "sast", "name": "ECB mode is insecure", "message": "ECB mode is insecure", @@ -217,6 +241,7 @@ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE" }, { + "id": "9", "category": "sast", "name": "Cipher with no integrity", "message": "Cipher with no integrity", @@ -244,6 +269,7 @@ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY" }, { + "id": "10", "category": "sast", "message": "Probable insecure usage of temp file/directory.", "cve": "python/hardcoded/hardcoded-tmp.py:63dd4d626855555b816985d82c4614a790462a0a3ada89dc58eb97f9c50f3077:B108", @@ -268,6 +294,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html" }, { + "id": "11", "category": "sast", "message": "Probable insecure usage of temp file/directory.", "cve": "python/hardcoded/hardcoded-tmp.py:4ad6d4c40a8c263fc265f3384724014e0a4f8dd6200af83e51ff120420038031:B108", @@ -292,6 +319,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html" }, { + "id": "12", "category": "sast", "message": "Consider possible security implications associated with Popen module.", "cve": "python/imports/imports-aliases.py:2c3e1fa1e54c3c6646e8bcfaee2518153c6799b77587ff8d9a7b0631f6d34785:B404", @@ -314,6 +342,7 @@ "line": 1 }, { + "id": "13", "category": "sast", "message": "Consider possible security implications associated with pickle module.", "cve": "python/imports/imports.py:af58d07f6ad519ef5287fcae65bf1a6999448a1a3a8bc1ac2a11daa80d0b96bf:B403", @@ -336,6 +365,7 @@ "line": 2 }, { + "id": "14", "category": "sast", "message": "Consider possible security implications associated with subprocess module.", "cve": "python/imports/imports.py:8de9bc98029d212db530785a5f6780cfa663548746ff228ab8fa96c5bb82f089:B404", @@ -358,6 +388,7 @@ "line": 4 }, { + "id": "15", "category": "sast", "message": "Possible hardcoded password: 'blerg'", "cve": "python/hardcoded/hardcoded-passwords.py:97c30f1d76d2a88913e3ce9ae74087874d740f87de8af697a9c455f01119f633:B106", @@ -382,6 +413,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html" }, { + "id": "16", "category": "sast", "message": "Possible hardcoded password: 'root'", "cve": "python/hardcoded/hardcoded-passwords.py:7431c73a0bc16d94ece2a2e75ef38f302574d42c37ac0c3c38ad0b3bf8a59f10:B105", @@ -406,6 +438,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html" }, { + "id": "17", "category": "sast", "message": "Possible hardcoded password: ''", "cve": "python/hardcoded/hardcoded-passwords.py:d2d1857c27caedd49c57bfbcdc23afcc92bd66a22701fcdc632869aab4ca73ee:B105", @@ -430,6 +463,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html" }, { + "id": "18", "category": "sast", "message": "Possible hardcoded password: 'ajklawejrkl42348swfgkg'", "cve": "python/hardcoded/hardcoded-passwords.py:fb3866215a61393a5c9c32a3b60e2058171a23219c353f722cbd3567acab21d2:B105", @@ -454,6 +488,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html" }, { + "id": "19", "category": "sast", "message": "Possible hardcoded password: 'blerg'", "cve": "python/hardcoded/hardcoded-passwords.py:63c62a8b7e1e5224439bd26b28030585ac48741e28ca64561a6071080c560a5f:B105", @@ -478,6 +513,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html" }, { + "id": "20", "category": "sast", "message": "Possible hardcoded password: 'blerg'", "cve": "python/hardcoded/hardcoded-passwords.py:4311b06d08df8fa58229b341c531da8e1a31ec4520597bdff920cd5c098d86f9:B105", @@ -502,6 +538,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html" }, { + "id": "21", "category": "sast", "message": "Consider possible security implications associated with subprocess module.", "cve": "python/imports/imports-function.py:5858400c2f39047787702de44d03361ef8d954c9d14bd54ee1c2bef9e6a7df93:B404", @@ -524,6 +561,7 @@ "line": 4 }, { + "id": "22", "category": "sast", "message": "Consider possible security implications associated with pickle module.", "cve": "python/imports/imports-function.py:dbda3cf4190279d30e0aad7dd137eca11272b0b225e8af4e8bf39682da67d956:B403", @@ -546,6 +584,7 @@ "line": 2 }, { + "id": "23", "category": "sast", "message": "Consider possible security implications associated with Popen module.", "cve": "python/imports/imports-from.py:eb8a0db9cd1a8c1ab39a77e6025021b1261cc2a0b026b2f4a11fca4e0636d8dd:B404", @@ -568,6 +607,7 @@ "line": 7 }, { + "id": "24", "category": "sast", "message": "subprocess call with shell=True seems safe, but may be changed in the future, consider rewriting without shell", "cve": "python/imports/imports-aliases.py:f99f9721e27537fbcb6699a4cf39c6740d6234d2c6f06cfc2d9ea977313c483d:B602", @@ -592,6 +632,7 @@ "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html" }, { + "id": "25", "category": "sast", "message": "Consider possible security implications associated with subprocess module.", "cve": "python/imports/imports-from.py:332a12ab1146698f614a905ce6a6a5401497a12281aef200e80522711c69dcf4:B404", @@ -614,6 +655,7 @@ "line": 6 }, { + "id": "26", "category": "sast", "message": "Consider possible security implications associated with Popen module.", "cve": "python/imports/imports-from.py:0a48de4a3d5348853a03666cb574697e3982998355e7a095a798bd02a5947276:B404", @@ -636,6 +678,7 @@ "line": 1 }, { + "id": "27", "category": "sast", "message": "Consider possible security implications associated with pickle module.", "cve": "python/imports/imports-aliases.py:51b71661dff994bde3529639a727a678c8f5c4c96f00d300913f6d5be1bbdf26:B403", @@ -658,6 +701,7 @@ "line": 7 }, { + "id": "28", "category": "sast", "message": "Consider possible security implications associated with loads module.", "cve": "python/imports/imports-aliases.py:6ff02aeb3149c01ab68484d794a94f58d5d3e3bb0d58557ef4153644ea68ea54:B403", @@ -680,6 +724,7 @@ "line": 6 }, { + "id": "29", "category": "sast", "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)", "cve": "c/subdir/utils.c:b466873101951fe96e1332f6728eb7010acbbd5dfc3b65d7d53571d091a06d9e:CWE-119!/CWE-120", @@ -713,6 +758,7 @@ "url": "https://cwe.mitre.org/data/definitions/119.html" }, { + "id": "30", "category": "sast", "message": "Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents? (CWE-362)", "cve": "c/subdir/utils.c:bab681140fcc8fc3085b6bba74081b44ea145c1c98b5e70cf19ace2417d30770:CWE-362", @@ -739,6 +785,7 @@ "url": "https://cwe.mitre.org/data/definitions/362.html" }, { + "id": "31", "category": "sast", "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)", "cve": "cplusplus/src/hello.cpp:c8c6dd0afdae6814194cf0930b719f757ab7b379cf8f261e7f4f9f2f323a818a:CWE-119!/CWE-120", @@ -772,6 +819,7 @@ "url": "https://cwe.mitre.org/data/definitions/119.html" }, { + "id": "32", "category": "sast", "message": "Does not check for buffer overflows when copying to destination [MS-banned] (CWE-120)", "cve": "cplusplus/src/hello.cpp:331c04062c4fe0c7c486f66f59e82ad146ab33cdd76ae757ca41f392d568cbd0:CWE-120", @@ -799,4 +847,4 @@ "url": "https://cwe.mitre.org/data/definitions/120.html" } ] -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-bandit.json b/spec/fixtures/security_reports/master/gl-sast-report-bandit.json index d0346479b85..690c58d049b 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-bandit.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-bandit.json @@ -1,5 +1,5 @@ { - "version": "14.0.4", + "version": "15.0.4", "vulnerabilities": [ { "id": "985a5666dcae22adef5ac12f8a8a2dacf9b9b481ae5d87cd0ac1712b0fd64864", @@ -26,6 +26,15 @@ } ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "bandit", "name": "Bandit", @@ -40,4 +49,4 @@ "end_time": "2022-03-11T00:21:50", "status": "success" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-gosec.json b/spec/fixtures/security_reports/master/gl-sast-report-gosec.json index 4c385326c8c..ef1d06d2e4f 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-gosec.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-gosec.json @@ -1,5 +1,5 @@ { - "version": "14.0.4", + "version": "15.0.4", "vulnerabilities": [ { "id": "2e5656ff30e2e7cc93c36b4845c8a689ddc47fdbccf45d834c67442fbaa89be0", @@ -51,6 +51,15 @@ } ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "gosec", "name": "Gosec", @@ -65,4 +74,4 @@ "end_time": "2022-03-15T20:33:17", "status": "success" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-minimal.json b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json index 53d15224b30..d29571638ff 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-minimal.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json @@ -1,7 +1,8 @@ { - "version": "14.0.0", + "version": "15.0.0", "vulnerabilities": [ { + "id": "1", "category": "sast", "name": "Cipher with no integrity", "message": "Cipher with no integrity", @@ -49,8 +50,19 @@ } } ], - "remediations": [], + "remediations": [ + + ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "find_sec_bugs", "name": "Find Security Bugs", @@ -65,4 +77,4 @@ "start_time": "2022-08-10T21:37:00", "end_time": "2022-08-10T21:38:00" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-bandit.json b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-bandit.json index 037b9fb8d3e..c51abf46c13 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-bandit.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-bandit.json @@ -1,5 +1,5 @@ { - "version": "14.0.4", + "version": "15.0.4", "vulnerabilities": [ { "id": "985a5666dcae22adef5ac12f8a8a2dacf9b9b481ae5d87cd0ac1712b0fd64864", @@ -54,6 +54,15 @@ } ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "semgrep", "name": "Semgrep", @@ -68,4 +77,4 @@ "end_time": "2022-03-11T18:48:22", "status": "success" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json index 8fa85c30b56..9a6dd4190c5 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json @@ -1,5 +1,5 @@ { - "version": "14.0.4", + "version": "15.0.4", "vulnerabilities": [ { "id": "79f6537b7ec83c7717f5bd1a4f12645916caafefe2e4359148d889855505aa67", @@ -53,6 +53,15 @@ } ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "semgrep", "name": "Semgrep", @@ -74,4 +83,4 @@ "end_time": "2022-03-15T20:37:05", "status": "success" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json index cbdfdb86f6b..e3659c70710 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json +++ b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json @@ -1,5 +1,5 @@ { - "version": "14.0.4", + "version": "15.0.4", "vulnerabilities": [ { "id": "985a5666dcae22adef5ac12f8a8a2dacf9b9b481ae5d87cd0ac1712b0fd64864", @@ -104,6 +104,15 @@ } ], "scan": { + "analyzer": { + "id": "semgrep_analyzer", + "name": "Semgrep Analyzer", + "url": "https://gitlab.com/", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "semgrep", "name": "Semgrep", @@ -131,4 +140,4 @@ "end_time": "2022-03-15T20:37:05", "status": "success" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-sast-report.json b/spec/fixtures/security_reports/master/gl-sast-report.json index 0ec31252e97..1bd1f241a6d 100644 --- a/spec/fixtures/security_reports/master/gl-sast-report.json +++ b/spec/fixtures/security_reports/master/gl-sast-report.json @@ -1,7 +1,8 @@ { - "version": "14.0.0", + "version": "15.0.0", "vulnerabilities": [ { + "id": "1_481ae5d87cd0ac1712b0fd64864", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -39,6 +40,7 @@ ] }, { + "id": "2_481ae5d87cd0ac1712b0fd64864", "category": "sast", "name": "Predictable pseudorandom number generator", "message": "Predictable pseudorandom number generator", @@ -66,6 +68,7 @@ ] }, { + "id": "3_481ae5d87cd0ac1712b0fd64864", "category": "sast", "name": "ECB mode is insecure", "message": "ECB mode is insecure", @@ -100,6 +103,7 @@ ] }, { + "id": "4_481ae5d87cd0ac1712b0fd64864", "category": "sast", "name": "Hard coded key", "message": "Hard coded key", @@ -134,6 +138,7 @@ ] }, { + "id": "5_481ae5d87cd0ac1712b0fd64864", "category": "sast", "name": "Cipher with no integrity", "message": "Cipher with no integrity", @@ -181,8 +186,19 @@ } } ], - "remediations": [], + "remediations": [ + + ], "scan": { + "analyzer": { + "id": "find_sec_bugs_analyzer", + "name": "Find Security Bugs Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, "scanner": { "id": "find_sec_bugs", "name": "Find Security Bugs", @@ -197,4 +213,4 @@ "start_time": "2022-08-10T21:37:00", "end_time": "2022-08-10T21:38:00" } -} \ No newline at end of file +} diff --git a/spec/fixtures/security_reports/master/gl-secret-detection-report.json b/spec/fixtures/security_reports/master/gl-secret-detection-report.json index cb97b60ced1..43c079e8769 100644 --- a/spec/fixtures/security_reports/master/gl-secret-detection-report.json +++ b/spec/fixtures/security_reports/master/gl-secret-detection-report.json @@ -1,5 +1,29 @@ { - "version": "14.1.2", + "version": "15.0.0", + "scan": { + "analyzer": { + "id": "secret_detection_analyzer", + "name": "Secret Detection Analyzer", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "1.0.0" + }, + "scanner": { + "id": "secret_detection", + "name": "Secret Detection", + "url": "https://gitlab.com", + "vendor": { + "name": "GitLab" + }, + "version": "0.1.0" + }, + "type": "sast", + "start_time": "2022-03-11T18:48:16", + "end_time": "2022-03-11T18:48:22", + "status": "success" + }, "vulnerabilities": [ { "id": "27d2322d519c94f803ffed1cf6d14e455df97e5a0668e229eb853fdb0d277d2c", @@ -17,7 +41,8 @@ "location": { "file": "aws-key.py", "dependency": { - "package": {} + "package": { + } }, "commit": { "sha": "e9c3a56590d5bed4155c0d128f1552d52fdcc7ae" @@ -32,5 +57,7 @@ ] } ], - "remediations": [] -} \ No newline at end of file + "remediations": [ + + ] +} diff --git a/spec/fixtures/service_account.json b/spec/fixtures/service_account.json index 9f7f5526cf5..31ef182f8c2 100644 --- a/spec/fixtures/service_account.json +++ b/spec/fixtures/service_account.json @@ -2,7 +2,7 @@ "type": "service_account", "project_id": "demo-app-123", "private_key_id": "47f0b1700983da548af6fcd37007f42996099999", - "private_key": "-----BEGIN PRIVATE KEY-----\nABCDEFIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDJn8w20WcN+fi5\nIhO1BEFCv7ExK8J5rW5Pc8XpJgpQoL5cfv6qC6aS+x4maI7S4AG7diqXBLCfjlnA\nqBzXwCRnnPtQhu+v1ehAj5fGNa7F51f9aacRNmKdHzNmWZEPDuLqq0I/Ewcsotu+\nnb+tCYk1o2ahyPZau8JtXFZs7oZb7SrfgoSJemccxeVreGm1Dt6SM74/3qJAeHN/\niK/v0IiQP1GS4Jxgz38XQGo+jiTpNrFcf4S0RNxKcNf+tuuEBDi57LBLwdotM7E5\nF1l9pZZMWkmQKQIxeER6+2HuE56V6QPITwkQ/u9XZFQSgl4SBIw2sHr5D/xaUxjw\n+kMy2Jt9AgMBAAECggEACL7E34rRIWbP043cv3ZQs1RiWzY2mvWmCiMEzkz0rRRv\nyqNv0yXVYtzVV7KjdpY56leLgjM1Sv0PEQoUUtpWFJAXSXdKLaewSXPrpXCoz5OD\nekMgeItnQcE7nECdyAKsCSQw/SXg4t4p0a3WGsCwt3If2TwWIrov9R4zGcn1wMZn\n922WtZDmh2NqdTZIKElWZLxNlIr/1v88mAp7oSa1DLfqWkwEEnxK7GGAiwN8ARIF\nkvgiuKdsHBf5aNKg70xN6AcZx/Z4+KZxXxyKKF5VkjCtDzA97EjJqftDPwGTkela\n2bgiDSJs0Un0wQpFFRDrlfyo7rr9Ey/Gf4rR66NWeQKBgQD7qPP55xoWHCDvoK9P\nMN67qFLNDPWcKVKr8siwUlZ6/+acATXjfNUjsJLM7vBxYLjdtFxQ/vojJTQyMxHt\n80wARDk1DTu2zhltL2rKo6LfbwjQsot1MLZFXAMwqtHTLfURaj8kO1JDV/j+4a94\nP0gzNMiBYAKWm6z08akEz2TrhQKBgQDNGfFvtxo4Mf6AA3iYXCwc0CJXb+cqZkW/\n7glnV+vDqYVo23HJaKHFD+Xqaj+cUrOUNglWgT9WSCZR++Hzw1OCPZvX2V9Z6eQh\ngqOBX6D19q9jfShfxLywEAD5pk7LMINumsNm6H+6shJQK5c67bsM9/KQbSnIlWhw\n7JBe8OlFmQKBgQDREyF2mb/7ZG0ch8N9qB0zjHkV79FRZqdPQUnn6s/8KgO90eei\nUkCFARpE9bF+kBul3UTg6aSIdE0z82fO51VZ11Qrtg3JJtrK8hznsyEKPaX2NI9V\n0h1r7DCeSxw9NS4nxLwmbr4+QqUTpA3yeaiTGiQGD+y2kSkU6nxACclPPQKBgFkb\nkVqg6YJKrjB90ZIYUY3/GzxzwLIaFumpCGretu6eIvkIhiokDExqeNBccuB+ych1\npZ7wrkzVMdjinythzFFEZQXlSdjtlhC9Cj52Bp92GoMV6EmbVwMDIPlVuNvsat3N\n3WFDV+ML5IryNVUD3gVnX/pBgyrDRsnw7VRiRGbZAoGBANxZwGKZo0zpyb5O5hS6\nxVrgJtIySlV5BOEjFXKeLwzByht8HmrHhSWix6WpPejfK1RHhl3boU6t9yeC0cre\nvUI/Y9LBhHXjSwWCWlqVe9yYqsde+xf0UYRS8IoaoJjus7YVJr9yPpCboEF28ZmQ\ndVBlpZYg6oLIar6waaLMz/1B\n-----END PRIVATE KEY-----\n", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDJn8w20WcN+fi5\nIhO1BEFCv7ExK8J5rW5Pc8XpJgpQoL5cfv6qC6aS+x4maI7S4AG7diqXBLCfjlnA\nqBzXwCRnnPtQhu+v1ehAj5fGNa7F51f9aacRNmKdHzNmWZEPDuLqq0I/Ewcsotu+\nnb+tCYk1o2ahyPZau8JtXFZs7oZb7SrfgoSJemccxeVreGm1Dt6SM74/3qJAeHN/\niK/v0IiQP1GS4Jxgz38XQGo+jiTpNrFcf4S0RNxKcNf+tuuEBDi57LBLwdotM7E5\nF1l9pZZMWkmQKQIxeER6+2HuE56V6QPITwkQ/u9XZFQSgl4SBIw2sHr5D/xaUxjw\n+kMy2Jt9AgMBAAECggEACL7E34rRIWbP043cv3ZQs1RiWzY2mvWmCiMEzkz0rRRv\nyqNv0yXVYtzVV7KjdpY56leLgjM1Sv0PEQoUUtpWFJAXSXdKLaewSXPrpXCoz5OD\nekMgeItnQcE7nECdyAKsCSQw/SXg4t4p0a3WGsCwt3If2TwWIrov9R4zGcn1wMZn\n922WtZDmh2NqdTZIKElWZLxNlIr/1v88mAp7oSa1DLfqWkwEEnxK7GGAiwN8ARIF\nkvgiuKdsHBf5aNKg70xN6AcZx/Z4+KZxXxyKKF5VkjCtDzA97EjJqftDPwGTkela\n2bgiDSJs0Un0wQpFFRDrlfyo7rr9Ey/Gf4rR66NWeQKBgQD7qPP55xoWHCDvoK9P\nMN67qFLNDPWcKVKr8siwUlZ6/+acATXjfNUjsJLM7vBxYLjdtFxQ/vojJTQyMxHt\n80wARDk1DTu2zhltL2rKo6LfbwjQsot1MLZFXAMwqtHTLfURaj8kO1JDV/j+4a94\nP0gzNMiBYAKWm6z08akEz2TrhQKBgQDNGfFvtxo4Mf6AA3iYXCwc0CJXb+cqZkW/\n7glnV+vDqYVo23HJaKHFD+Xqaj+cUrOUNglWgT9WSCZR++Hzw1OCPZvX2V9Z6eQh\ngqOBX6D19q9jfShfxLywEAD5pk7LMINumsNm6H+6shJQK5c67bsM9/KQbSnIlWhw\n7JBe8OlFmQKBgQDREyF2mb/7ZG0ch8N9qB0zjHkV79FRZqdPQUnn6s/8KgO90eei\nUkCFARpE9bF+kBul3UTg6aSIdE0z82fO51VZ11Qrtg3JJtrK8hznsyEKPaX2NI9V\n0h1r7DCeSxw9NS4nxLwmbr4+QqUTpA3yeaiTGiQGD+y2kSkU6nxACclPPQKBgFkb\nkVqg6YJKrjB90ZIYUY3/GzxzwLIaFumpCGretu6eIvkIhiokDExqeNBccuB+ych1\npZ7wrkzVMdjinythzFFEZQXlSdjtlhC9Cj52Bp92GoMV6EmbVwMDIPlVuNvsat3N\n3WFDV+ML5IryNVUD3gVnX/pBgyrDRsnw7VRiRGbZAoGBANxZwGKZo0zpyb5O5hS6\nxVrgJtIySlV5BOEjFXKeLwzByht8HmrHhSWix6WpPejfK1RHhl3boU6t9yeC0cre\nvUI/Y9LBhHXjSwWCWlqVe9yYqsde+xf0UYRS8IoaoJjus7YVJr9yPpCboEF28ZmQ\ndVBlpZYg6oLIar6waaLMz/1B\n-----END PRIVATE KEY-----\n", "client_email": "demo-app-account@demo-app-374914.iam.gserviceaccount.com", "client_id": "111111116847110173051", "auth_uri": "https://accounts.google.com/o/oauth2/auth", diff --git a/spec/fixtures/structure.sql b/spec/fixtures/structure.sql index 800c33bb9b9..5a7deb4fadf 100644 --- a/spec/fixtures/structure.sql +++ b/spec/fixtures/structure.sql @@ -13,12 +13,81 @@ CREATE INDEX index_users_on_public_email_excluding_null_and_empty ON users USING ALTER TABLE ONLY bulk_import_configurations ADD CONSTRAINT fk_rails_536b96bff1 FOREIGN KEY (bulk_import_id) REFERENCES bulk_imports(id) ON DELETE CASCADE; +CREATE TABLE test_table ( + id bigint NOT NULL, + integer_column integer, + integer_with_default_column integer DEFAULT 1, + smallint_column smallint, + smallint_with_default_column smallint DEFAULT 0 NOT NULL, + numeric_column numeric NOT NULL, + numeric_with_default_column numeric DEFAULT 1.0 NOT NULL, + boolean_colum boolean, + boolean_with_default_colum boolean DEFAULT true NOT NULL, + double_precision_column double precision, + double_precision_with_default_column double precision DEFAULT 1.0, + varying_column character varying, + varying_with_default_column character varying DEFAULT 'DEFAULT'::character varying NOT NULL, + varying_with_limit_column character varying(255), + varying_with_limit_and_default_column character varying(255) DEFAULT 'DEFAULT'::character varying, + text_column text NOT NULL, + text_with_default_column text DEFAULT ''::text NOT NULL, + array_column character varying(255)[] NOT NULL, + array_with_default_column character varying(255)[] DEFAULT '{one,two}'::character varying[] NOT NULL, + jsonb_column jsonb, + jsonb_with_default_column jsonb DEFAULT '[]'::jsonb NOT NULL, + timestamptz_column timestamp with time zone, + timestamptz_with_default_column timestamp(6) with time zone DEFAULT now(), + timestamp_column timestamp(6) without time zone NOT NULL, + timestamp_with_default_column timestamp(6) without time zone DEFAULT '2022-01-23 00:00:00+00'::timestamp without time zone NOT NULL, + date_column date, + date_with_default_column date DEFAULT '2023-04-05', + inet_column inet NOT NULL, + inet_with_default_column inet DEFAULT '0.0.0.0'::inet NOT NULL, + macaddr_column macaddr, + macaddr_with_default_column macaddr DEFAULT '00-00-00-00-00-000'::macaddr NOT NULL, + uuid_column uuid NOT NULL, + uuid_with_default_column uuid DEFAULT '00000000-0000-0000-0000-000000000000'::uuid NOT NULL, + bytea_column bytea, + bytea_with_default_column bytea DEFAULT '\xDEADBEEF'::bytea, + unmapped_column_type anyarray +); + CREATE TABLE ci_project_mirrors ( id bigint NOT NULL, project_id integer NOT NULL, namespace_id integer NOT NULL ); +CREATE TABLE wrong_table ( + id bigint NOT NULL, + description character varying(255) NOT NULL +); + +CREATE TABLE extra_table_columns ( + id bigint NOT NULL, + name character varying(255) NOT NULL +); + +CREATE TABLE missing_table ( + id bigint NOT NULL, + description text NOT NULL +); + +CREATE TABLE missing_table_columns ( + id bigint NOT NULL, + email character varying(255) NOT NULL +); + +CREATE TABLE operations_user_lists ( + id bigint NOT NULL, + project_id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + iid integer NOT NULL, + name character varying(255) NOT NULL, + user_xids text DEFAULT ''::text NOT NULL +); + CREATE TRIGGER trigger AFTER INSERT ON public.t1 FOR EACH ROW EXECUTE FUNCTION t1(); CREATE TRIGGER wrong_trigger BEFORE UPDATE ON public.t2 FOR EACH ROW EXECUTE FUNCTION my_function(); diff --git a/spec/frontend/__helpers__/assert_props.js b/spec/frontend/__helpers__/assert_props.js new file mode 100644 index 00000000000..3e372454bf5 --- /dev/null +++ b/spec/frontend/__helpers__/assert_props.js @@ -0,0 +1,24 @@ +import { mount } from '@vue/test-utils'; +import { ErrorWithStack } from 'jest-util'; + +export function assertProps(Component, props, extraMountArgs = {}) { + const originalConsoleError = global.console.error; + global.console.error = function error(...args) { + throw new ErrorWithStack( + `Unexpected call of console.error() with:\n\n${args.join(', ')}`, + this.error, + ); + }; + const ComponentWithoutRenderFn = { + ...Component, + render() { + return ''; + }, + }; + + try { + mount(ComponentWithoutRenderFn, { propsData: props, ...extraMountArgs }); + } finally { + global.console.error = originalConsoleError; + } +} diff --git a/spec/frontend/__helpers__/wait_for_text.js b/spec/frontend/__helpers__/wait_for_text.js index 6bed8a90a98..991adc5d6c0 100644 --- a/spec/frontend/__helpers__/wait_for_text.js +++ b/spec/frontend/__helpers__/wait_for_text.js @@ -1,3 +1,3 @@ import { findByText } from '@testing-library/dom'; -export const waitForText = async (text, container = document) => findByText(container, text); +export const waitForText = (text, container = document) => findByText(container, text); diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js index 1157e44f41a..c1158e0d124 100644 --- a/spec/frontend/access_tokens/index_spec.js +++ b/spec/frontend/access_tokens/index_spec.js @@ -112,7 +112,7 @@ describe('access tokens', () => { ); }); - it('mounts component and sets `inputAttrs` prop', async () => { + it('mounts component and sets `inputAttrs` prop', () => { wrapper = createWrapper(initExpiresAtField()); const component = wrapper.findComponent(ExpiresAtField); diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap index 2c2151bfb41..e379aba094c 100644 --- a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap +++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap @@ -6,8 +6,9 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = ` body-class="add-review-item pt-0" cancel-variant="light" dismisslabel="Close" - modalclass="" + modalclass="add-review-item-modal" modalid="add-review-item" + nofocusonshow="true" ok-disabled="true" ok-title="Save changes" scrollable="true" @@ -27,9 +28,13 @@ exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
- diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js index 5e96da9af7e..27fe010c354 100644 --- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js +++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js @@ -1,4 +1,4 @@ -import { GlModal, GlSearchBoxByType } from '@gitlab/ui'; +import { GlModal, GlFilteredSearch } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import Vuex from 'vuex'; @@ -49,7 +49,7 @@ describe('AddContextCommitsModal', () => { }; const findModal = () => wrapper.findComponent(GlModal); - const findSearch = () => wrapper.findComponent(GlSearchBoxByType); + const findSearch = () => wrapper.findComponent(GlFilteredSearch); beforeEach(() => { wrapper = createWrapper(); @@ -68,12 +68,29 @@ describe('AddContextCommitsModal', () => { expect(findSearch().exists()).toBe(true); }); - it('when user starts entering text in search box, it calls action "searchCommits" after waiting for 500s', () => { - const searchText = 'abcd'; - findSearch().vm.$emit('input', searchText); - expect(searchCommits).not.toHaveBeenCalled(); - jest.advanceTimersByTime(500); - expect(searchCommits).toHaveBeenCalledWith(expect.anything(), searchText); + it('when user submits after entering filters in search box, then it calls action "searchCommits"', () => { + const search = [ + 'abcd', + { + type: 'author', + value: { operator: '=', data: 'abhi' }, + }, + { + type: 'committed-before-date', + value: { operator: '=', data: '2022-10-31' }, + }, + { + type: 'committed-after-date', + value: { operator: '=', data: '2022-10-28' }, + }, + ]; + findSearch().vm.$emit('submit', search); + expect(searchCommits).toHaveBeenCalledWith(expect.anything(), { + searchText: 'abcd', + author: 'abhi', + committed_before: '2022-10-31', + committed_after: '2022-10-28', + }); }); it('disabled ok button when no row is selected', () => { diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js new file mode 100644 index 00000000000..e72d0c24d5e --- /dev/null +++ b/spec/frontend/admin/abuse_reports/components/abuse_report_actions_spec.js @@ -0,0 +1,166 @@ +import { mount, shallowMount } from '@vue/test-utils'; +import { nextTick } from 'vue'; +import axios from 'axios'; +import MockAdapter from 'axios-mock-adapter'; +import { GlButton, GlModal } from '@gitlab/ui'; +import AbuseReportActions from '~/admin/abuse_reports/components/abuse_report_actions.vue'; +import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; +import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import { createAlert, VARIANT_SUCCESS } from '~/alert'; +import { sprintf } from '~/locale'; +import { ACTIONS_I18N } from '~/admin/abuse_reports/constants'; +import { mockAbuseReports } from '../mock_data'; + +jest.mock('~/alert'); + +describe('AbuseReportActions', () => { + let wrapper; + + const findRemoveUserAndReportButton = () => wrapper.findAllComponents(GlButton).at(0); + const findBlockUserButton = () => wrapper.findAllComponents(GlButton).at(1); + const findRemoveReportButton = () => wrapper.findAllComponents(GlButton).at(2); + const findConfirmationModal = () => wrapper.findComponent(GlModal); + + const report = mockAbuseReports[0]; + + const createComponent = ({ props, mountFn } = { props: {}, mountFn: mount }) => { + wrapper = mountFn(AbuseReportActions, { + propsData: { + report, + ...props, + }, + }); + }; + const createShallowComponent = (props) => createComponent({ props, mountFn: shallowMount }); + + describe('default', () => { + beforeEach(() => { + createShallowComponent(); + }); + + it('displays "Block user", "Remove user & report", and "Remove report" buttons', () => { + expect(findRemoveUserAndReportButton().text()).toBe(ACTIONS_I18N.removeUserAndReport); + + const blockButton = findBlockUserButton(); + expect(blockButton.text()).toBe(ACTIONS_I18N.blockUser); + expect(blockButton.attributes('disabled')).toBeUndefined(); + + expect(findRemoveReportButton().text()).toBe(ACTIONS_I18N.removeReport); + }); + + it('does not show the confirmation modal initially', () => { + expect(findConfirmationModal().props('visible')).toBe(false); + }); + }); + + describe('block button when user is already blocked', () => { + it('is disabled and has the correct text', () => { + createShallowComponent({ report: { ...report, userBlocked: true } }); + + const button = findBlockUserButton(); + expect(button.text()).toBe(ACTIONS_I18N.alreadyBlocked); + expect(button.attributes('disabled')).toBe('true'); + }); + }); + + describe('actions', () => { + let axiosMock; + + useMockLocationHelper(); + + beforeEach(() => { + axiosMock = new MockAdapter(axios); + + createComponent(); + }); + + afterEach(() => { + axiosMock.restore(); + createAlert.mockClear(); + }); + + describe('on remove user and report', () => { + it('shows confirmation modal and reloads the page on success', async () => { + findRemoveUserAndReportButton().trigger('click'); + await nextTick(); + + expect(findConfirmationModal().props()).toMatchObject({ + visible: true, + title: sprintf(ACTIONS_I18N.removeUserAndReportConfirm, { + user: report.reportedUser.name, + }), + }); + + axiosMock.onDelete(report.removeUserAndReportPath).reply(HTTP_STATUS_OK); + + findConfirmationModal().vm.$emit('primary'); + await axios.waitForAll(); + + expect(window.location.reload).toHaveBeenCalled(); + }); + }); + + describe('on block user', () => { + beforeEach(async () => { + findBlockUserButton().trigger('click'); + await nextTick(); + }); + + it('shows confirmation modal', () => { + expect(findConfirmationModal().props()).toMatchObject({ + visible: true, + title: ACTIONS_I18N.blockUserConfirm, + }); + }); + + describe.each([ + { + responseData: { notice: 'Notice' }, + createAlertArgs: { message: 'Notice', variant: VARIANT_SUCCESS }, + blockButtonText: ACTIONS_I18N.alreadyBlocked, + blockButtonDisabled: 'disabled', + }, + { + responseData: { error: 'Error' }, + createAlertArgs: { message: 'Error' }, + blockButtonText: ACTIONS_I18N.blockUser, + blockButtonDisabled: undefined, + }, + ])( + 'when reponse JSON is $responseData', + ({ responseData, createAlertArgs, blockButtonText, blockButtonDisabled }) => { + beforeEach(async () => { + axiosMock.onPut(report.blockUserPath).reply(HTTP_STATUS_OK, responseData); + + findConfirmationModal().vm.$emit('primary'); + await axios.waitForAll(); + }); + + it('updates the block button correctly', () => { + const button = findBlockUserButton(); + expect(button.text()).toBe(blockButtonText); + expect(button.attributes('disabled')).toBe(blockButtonDisabled); + }); + + it('displays the returned message', () => { + expect(createAlert).toHaveBeenCalledWith(createAlertArgs); + }); + }, + ); + }); + + describe('on remove report', () => { + it('reloads the page on success', async () => { + axiosMock.onDelete(report.removeReportPath).reply(HTTP_STATUS_OK); + + findRemoveReportButton().trigger('click'); + + expect(findConfirmationModal().props('visible')).toBe(false); + + await axios.waitForAll(); + + expect(window.location.reload).toHaveBeenCalled(); + }); + }); + }); +}); diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_details_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_details_spec.js new file mode 100644 index 00000000000..b89bbac0196 --- /dev/null +++ b/spec/frontend/admin/abuse_reports/components/abuse_report_details_spec.js @@ -0,0 +1,53 @@ +import { GlButton, GlCollapse } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import { shallowMount } from '@vue/test-utils'; +import AbuseReportDetails from '~/admin/abuse_reports/components/abuse_report_details.vue'; +import { getTimeago } from '~/lib/utils/datetime_utility'; +import { mockAbuseReports } from '../mock_data'; + +describe('AbuseReportDetails', () => { + let wrapper; + const report = mockAbuseReports[0]; + + const findToggleButton = () => wrapper.findComponent(GlButton); + const findCollapsible = () => wrapper.findComponent(GlCollapse); + + const createComponent = () => { + wrapper = shallowMount(AbuseReportDetails, { + propsData: { + report, + }, + }); + }; + + describe('default', () => { + beforeEach(() => { + createComponent(); + }); + + it('renders toggle button with the correct text', () => { + expect(findToggleButton().text()).toEqual('Show details'); + }); + + it('renders collapsed GlCollapse containing the report details', () => { + const collapsible = findCollapsible(); + expect(collapsible.attributes('visible')).toBeUndefined(); + + const userJoinedText = `User joined ${getTimeago().format(report.reportedUser.createdAt)}`; + expect(collapsible.text()).toMatch(userJoinedText); + expect(collapsible.text()).toMatch(report.message); + }); + }); + + describe('when toggled', () => { + it('expands GlCollapse and updates toggle text', async () => { + createComponent(); + + findToggleButton().vm.$emit('click'); + await nextTick(); + + expect(findToggleButton().text()).toEqual('Hide details'); + expect(findCollapsible().attributes('visible')).toBe('true'); + }); + }); +}); diff --git a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js index d32fa25d238..9876ee70e5e 100644 --- a/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js +++ b/spec/frontend/admin/abuse_reports/components/abuse_report_row_spec.js @@ -1,22 +1,31 @@ +import { GlSprintf, GlLink } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import setWindowLocation from 'helpers/set_window_location_helper'; +import AbuseReportDetails from '~/admin/abuse_reports/components/abuse_report_details.vue'; import AbuseReportRow from '~/admin/abuse_reports/components/abuse_report_row.vue'; +import AbuseReportActions from '~/admin/abuse_reports/components/abuse_report_actions.vue'; import ListItem from '~/vue_shared/components/registry/list_item.vue'; import { getTimeago } from '~/lib/utils/datetime_utility'; +import { SORT_UPDATED_AT } from '~/admin/abuse_reports/constants'; import { mockAbuseReports } from '../mock_data'; describe('AbuseReportRow', () => { let wrapper; const mockAbuseReport = mockAbuseReports[0]; + const findLinks = () => wrapper.findAllComponents(GlLink); + const findAbuseReportActions = () => wrapper.findComponent(AbuseReportActions); const findListItem = () => wrapper.findComponent(ListItem); const findTitle = () => wrapper.findByTestId('title'); - const findUpdatedAt = () => wrapper.findByTestId('updated-at'); + const findDisplayedDate = () => wrapper.findByTestId('abuse-report-date'); + const findAbuseReportDetails = () => wrapper.findComponent(AbuseReportDetails); const createComponent = () => { wrapper = shallowMountExtended(AbuseReportRow, { propsData: { report: mockAbuseReport, }, + stubs: { GlSprintf }, }); }; @@ -29,15 +38,49 @@ describe('AbuseReportRow', () => { }); it('displays correctly formatted title', () => { - const { reporter, reportedUser, category } = mockAbuseReport; + const { reporter, reportedUser, category, reportedUserPath, reporterPath } = mockAbuseReport; expect(findTitle().text()).toMatchInterpolatedText( `${reportedUser.name} reported for ${category} by ${reporter.name}`, ); + + const userLink = findLinks().at(0); + expect(userLink.text()).toEqual(reportedUser.name); + expect(userLink.attributes('href')).toEqual(reportedUserPath); + + const reporterLink = findLinks().at(1); + expect(reporterLink.text()).toEqual(reporter.name); + expect(reporterLink.attributes('href')).toEqual(reporterPath); }); - it('displays correctly formatted updated at', () => { - expect(findUpdatedAt().text()).toMatchInterpolatedText( - `Updated ${getTimeago().format(mockAbuseReport.updatedAt)}`, - ); + describe('displayed date', () => { + it('displays correctly formatted created at', () => { + expect(findDisplayedDate().text()).toMatchInterpolatedText( + `Created ${getTimeago().format(mockAbuseReport.createdAt)}`, + ); + }); + + describe('when sorted by updated_at', () => { + it('displays correctly formatted updated at', () => { + setWindowLocation(`?sort=${SORT_UPDATED_AT.sortDirection.ascending}`); + + createComponent(); + + expect(findDisplayedDate().text()).toMatchInterpolatedText( + `Updated ${getTimeago().format(mockAbuseReport.updatedAt)}`, + ); + }); + }); + }); + + it('renders AbuseReportDetails', () => { + expect(findAbuseReportDetails().exists()).toBe(true); + expect(findAbuseReportDetails().props('report')).toEqual(mockAbuseReport); + }); + + it('renders AbuseReportRowActions with the correct props', () => { + const actions = findAbuseReportActions(); + + expect(actions.exists()).toBe(true); + expect(actions.props('report')).toMatchObject(mockAbuseReport); }); }); diff --git a/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js b/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js index 9efab8403a0..990503c453d 100644 --- a/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js +++ b/spec/frontend/admin/abuse_reports/components/abuse_reports_filtered_search_bar_spec.js @@ -58,21 +58,28 @@ describe('AbuseReportsFilteredSearchBar', () => { }); }); - it('sets status=open query when there is no initial status query', () => { - createComponent(); + it.each([undefined, 'invalid'])( + 'sets status=open query when initial status query is %s', + (status) => { + if (status) { + setWindowLocation(`?status=${status}`); + } - expect(updateHistory).toHaveBeenCalledWith({ - url: 'https://localhost/?status=open', - replace: true, - }); + createComponent(); - expect(findFilteredSearchBar().props('initialFilterValue')).toMatchObject([ - { - type: FILTERED_SEARCH_TOKEN_STATUS.type, - value: { data: 'open', operator: '=' }, - }, - ]); - }); + expect(updateHistory).toHaveBeenCalledWith({ + url: 'https://localhost/?status=open', + replace: true, + }); + + expect(findFilteredSearchBar().props('initialFilterValue')).toMatchObject([ + { + type: FILTERED_SEARCH_TOKEN_STATUS.type, + value: { data: 'open', operator: '=' }, + }, + ]); + }, + ); it('parses and passes search param to `FilteredSearchBar` component as `initialFilterValue` prop', () => { setWindowLocation('?status=closed&user=mr_abuser&reporter=ms_nitch'); diff --git a/spec/frontend/admin/abuse_reports/mock_data.js b/spec/frontend/admin/abuse_reports/mock_data.js index 778f055eb82..90289757a74 100644 --- a/spec/frontend/admin/abuse_reports/mock_data.js +++ b/spec/frontend/admin/abuse_reports/mock_data.js @@ -1,14 +1,30 @@ export const mockAbuseReports = [ { category: 'spam', + createdAt: '2018-10-03T05:46:38.977Z', updatedAt: '2022-12-07T06:45:39.977Z', reporter: { name: 'Ms. Admin' }, - reportedUser: { name: 'Mr. Abuser' }, + reportedUser: { name: 'Mr. Abuser', createdAt: '2017-09-01T05:46:38.977Z' }, + reportedUserPath: '/mr_abuser', + reporterPath: '/admin', + userBlocked: false, + blockUserPath: '/block/user/mr_abuser/path', + removeUserAndReportPath: '/remove/user/mr_abuser/and/report/path', + removeReportPath: '/remove/report/path', + message: 'message 1', }, { category: 'phishing', + createdAt: '2018-10-03T05:46:38.977Z', updatedAt: '2022-12-07T06:45:39.977Z', reporter: { name: 'Ms. Reporter' }, - reportedUser: { name: 'Mr. Phisher' }, + reportedUser: { name: 'Mr. Phisher', createdAt: '2016-09-01T05:46:38.977Z' }, + reportedUserPath: '/mr_phisher', + reporterPath: '/admin', + userBlocked: false, + blockUserPath: '/block/user/mr_phisher/path', + removeUserAndReportPath: '/remove/user/mr_phisher/and/report/path', + removeReportPath: '/remove/report/path', + message: 'message 2', }, ]; diff --git a/spec/frontend/admin/abuse_reports/utils_spec.js b/spec/frontend/admin/abuse_reports/utils_spec.js index 17f0b9acb26..3908edd3fdd 100644 --- a/spec/frontend/admin/abuse_reports/utils_spec.js +++ b/spec/frontend/admin/abuse_reports/utils_spec.js @@ -1,5 +1,8 @@ -import { FILTERED_SEARCH_TOKEN_CATEGORY } from '~/admin/abuse_reports/constants'; -import { buildFilteredSearchCategoryToken } from '~/admin/abuse_reports/utils'; +import { + FILTERED_SEARCH_TOKEN_CATEGORY, + FILTERED_SEARCH_TOKEN_STATUS, +} from '~/admin/abuse_reports/constants'; +import { buildFilteredSearchCategoryToken, isValidStatus } from '~/admin/abuse_reports/utils'; describe('buildFilteredSearchCategoryToken', () => { it('adds correctly formatted options to FILTERED_SEARCH_TOKEN_CATEGORY', () => { @@ -11,3 +14,18 @@ describe('buildFilteredSearchCategoryToken', () => { }); }); }); + +describe('isValidStatus', () => { + const validStatuses = FILTERED_SEARCH_TOKEN_STATUS.options.map((o) => o.value); + + it.each(validStatuses)( + 'returns true when status is an option value of FILTERED_SEARCH_TOKEN_STATUS', + (status) => { + expect(isValidStatus(status)).toBe(true); + }, + ); + + it('return false when status is not an option value of FILTERED_SEARCH_TOKEN_STATUS', () => { + expect(isValidStatus('invalid')).toBe(false); + }); +}); diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js index 4aeaa5356b4..a5e7c6ebe21 100644 --- a/spec/frontend/admin/users/components/actions/actions_spec.js +++ b/spec/frontend/admin/users/components/actions/actions_spec.js @@ -1,4 +1,4 @@ -import { GlDropdownItem } from '@gitlab/ui'; +import { GlDisclosureDropdownItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Actions from '~/admin/users/components/actions'; import Delete from '~/admin/users/components/actions/delete.vue'; @@ -12,7 +12,7 @@ import { paths, userDeletionObstacles } from '../../mock_data'; describe('Action components', () => { let wrapper; - const findDropdownItem = () => wrapper.findComponent(GlDropdownItem); + const findDisclosureDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem); const initComponent = ({ component, props } = {}) => { wrapper = shallowMount(component, { @@ -32,7 +32,7 @@ describe('Action components', () => { }, }); - expect(findDropdownItem().exists()).toBe(true); + expect(findDisclosureDropdownItem().exists()).toBe(true); }); }); @@ -52,7 +52,7 @@ describe('Action components', () => { }, }); - await findDropdownItem().vm.$emit('click'); + await findDisclosureDropdownItem().vm.$emit('action'); expect(eventHub.$emit).toHaveBeenCalledWith( EVENT_OPEN_DELETE_USER_MODAL, diff --git a/spec/frontend/admin/users/components/actions/delete_with_contributions_spec.js b/spec/frontend/admin/users/components/actions/delete_with_contributions_spec.js index 64a88aab2c2..606a5c779fb 100644 --- a/spec/frontend/admin/users/components/actions/delete_with_contributions_spec.js +++ b/spec/frontend/admin/users/components/actions/delete_with_contributions_spec.js @@ -1,5 +1,5 @@ import { GlLoadingIcon } from '@gitlab/ui'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { mount } from '@vue/test-utils'; import waitForPromises from 'helpers/wait_for_promises'; import DeleteWithContributions from '~/admin/users/components/actions/delete_with_contributions.vue'; import eventHub, { @@ -35,7 +35,7 @@ describe('DeleteWithContributions', () => { }; const createComponent = () => { - wrapper = mountExtended(DeleteWithContributions, { propsData: defaultPropsData }); + wrapper = mount(DeleteWithContributions, { propsData: defaultPropsData }); }; describe('when action is clicked', () => { @@ -47,10 +47,10 @@ describe('DeleteWithContributions', () => { }); it('displays loading icon and disables button', async () => { - await wrapper.trigger('click'); + await wrapper.find('button').trigger('click'); expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true); - expect(wrapper.findByRole('menuitem').attributes()).toMatchObject({ + expect(wrapper.attributes()).toMatchObject({ disabled: 'disabled', 'aria-busy': 'true', }); @@ -67,7 +67,7 @@ describe('DeleteWithContributions', () => { }); it('emits event with association counts', async () => { - await wrapper.trigger('click'); + await wrapper.find('button').trigger('click'); await waitForPromises(); expect(associationsCount).toHaveBeenCalledWith(defaultPropsData.userId); @@ -92,7 +92,7 @@ describe('DeleteWithContributions', () => { }); it('emits event with error', async () => { - await wrapper.trigger('click'); + await wrapper.find('button').trigger('click'); await waitForPromises(); expect(eventHub.$emit).toHaveBeenCalledWith( diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js index 1a2cc3e5c34..73d8c082bb9 100644 --- a/spec/frontend/admin/users/components/user_actions_spec.js +++ b/spec/frontend/admin/users/components/user_actions_spec.js @@ -1,4 +1,4 @@ -import { GlDropdownDivider } from '@gitlab/ui'; +import { GlDisclosureDropdownGroup } from '@gitlab/ui'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import Actions from '~/admin/users/components/actions'; @@ -19,7 +19,7 @@ describe('AdminUserActions component', () => { const findEditButton = (id = user.id) => findUserActions(id).find('[data-testid="edit"]'); const findActionsDropdown = (id = user.id) => findUserActions(id).find('[data-testid="dropdown-toggle"]'); - const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider); + const findDisclosureGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup); const initComponent = ({ actions = [], showButtonLabels } = {}) => { wrapper = shallowMountExtended(AdminUserActions, { @@ -104,8 +104,8 @@ describe('AdminUserActions component', () => { initComponent({ actions: [LDAP, ...DELETE_ACTIONS] }); }); - it('renders a dropdown divider', () => { - expect(findDropdownDivider().exists()).toBe(true); + it('renders a disclosure group', () => { + expect(findDisclosureGroup().exists()).toBe(true); }); it('only renders delete dropdown items for actions containing the word "delete"', () => { @@ -126,8 +126,8 @@ describe('AdminUserActions component', () => { }); describe('when there are no delete actions', () => { - it('does not render a dropdown divider', () => { - expect(findDropdownDivider().exists()).toBe(false); + it('does not render a disclosure group', () => { + expect(findDisclosureGroup().exists()).toBe(false); }); it('does not render a delete dropdown item', () => { diff --git a/spec/frontend/admin/users/new_spec.js b/spec/frontend/admin/users/new_spec.js index 5e5763822a8..eba5c87f470 100644 --- a/spec/frontend/admin/users/new_spec.js +++ b/spec/frontend/admin/users/new_spec.js @@ -1,20 +1,19 @@ +import newWithInternalUserRegex from 'test_fixtures/admin/users/new_with_internal_user_regex.html'; import { setupInternalUserRegexHandler, ID_USER_EMAIL, ID_USER_EXTERNAL, ID_WARNING, } from '~/admin/users/new'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; describe('admin/users/new', () => { - const FIXTURE = 'admin/users/new_with_internal_user_regex.html'; - let elExternal; let elUserEmail; let elWarningMessage; beforeEach(() => { - loadHTMLFixture(FIXTURE); + setHTMLFixture(newWithInternalUserRegex); setupInternalUserRegexHandler(); elExternal = document.getElementById(ID_USER_EXTERNAL); diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap index 4a60d605cae..202a0a04192 100644 --- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap +++ b/spec/frontend/alerts_settings/components/__snapshots__/alerts_form_spec.js.snap @@ -61,6 +61,7 @@ exports[`Alert integration settings form default state should match the default items="[object Object]" noresultstext="No results found" placement="left" + popperoptions="[object Object]" resetbuttonlabel="" searchplaceholder="Search" selected="selecte_tmpl" diff --git a/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js b/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js index 1e125bdfd3a..2b8479eab6d 100644 --- a/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js +++ b/spec/frontend/alerts_settings/components/alert_mapping_builder_spec.js @@ -49,7 +49,7 @@ describe('AlertMappingBuilder', () => { const fallbackColumnIcon = findColumnInRow(0, 3).findComponent(GlIcon); expect(fallbackColumnIcon.exists()).toBe(true); - expect(fallbackColumnIcon.attributes('name')).toBe('question'); + expect(fallbackColumnIcon.attributes('name')).toBe('question-o'); expect(fallbackColumnIcon.attributes('title')).toBe(i18n.fallbackTooltip); }); diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js index e0075aa71d9..b8575d8ab26 100644 --- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js +++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js @@ -97,7 +97,7 @@ describe('AlertsSettingsForm', () => { expect(findFormFields().at(0).isVisible()).toBe(true); }); - it('disables the dropdown and shows help text when multi integrations are not supported', async () => { + it('disables the dropdown and shows help text when multi integrations are not supported', () => { createComponent({ props: { canAddIntegration: false } }); expect(findSelect().attributes('disabled')).toBe('disabled'); expect(findMultiSupportText().exists()).toBe(true); diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js index 67d8619f157..8c5df06042c 100644 --- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js +++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js @@ -429,7 +429,7 @@ describe('AlertsSettingsWrapper', () => { }); describe('Test alert', () => { - it('makes `updateTestAlert` service call', async () => { + it('makes `updateTestAlert` service call', () => { jest.spyOn(alertsUpdateService, 'updateTestAlert').mockResolvedValueOnce(); const testPayload = '{"title":"test"}'; findAlertsSettingsForm().vm.$emit('test-alert-payload', testPayload); diff --git a/spec/frontend/analytics/cycle_analytics/base_spec.js b/spec/frontend/analytics/cycle_analytics/base_spec.js index 033916eabcd..1a1d22626ea 100644 --- a/spec/frontend/analytics/cycle_analytics/base_spec.js +++ b/spec/frontend/analytics/cycle_analytics/base_spec.js @@ -137,6 +137,7 @@ describe('Value stream analytics component', () => { it('passes the paths to the filter bar', () => { expect(findFilters().props()).toEqual({ groupPath, + namespacePath: groupPath, endDate: createdBefore, hasDateRangeFilter: true, hasProjectFilter: false, diff --git a/spec/frontend/analytics/cycle_analytics/filter_bar_spec.js b/spec/frontend/analytics/cycle_analytics/filter_bar_spec.js index da7824adbf9..f1b3af39199 100644 --- a/spec/frontend/analytics/cycle_analytics/filter_bar_spec.js +++ b/spec/frontend/analytics/cycle_analytics/filter_bar_spec.js @@ -85,7 +85,7 @@ describe('Filter bar', () => { return shallowMount(FilterBar, { store: initialStore, propsData: { - groupPath: 'foo', + namespacePath: 'foo', }, stubs: { UrlSync, diff --git a/spec/frontend/analytics/cycle_analytics/mock_data.js b/spec/frontend/analytics/cycle_analytics/mock_data.js index 216e07844b8..f9587bf1967 100644 --- a/spec/frontend/analytics/cycle_analytics/mock_data.js +++ b/spec/frontend/analytics/cycle_analytics/mock_data.js @@ -214,7 +214,7 @@ export const group = { id: 1, name: 'foo', path: 'foo', - full_path: 'foo', + full_path: 'groups/foo', avatar_url: `${TEST_HOST}/images/home/nasa.svg`, }; diff --git a/spec/frontend/analytics/cycle_analytics/utils_spec.js b/spec/frontend/analytics/cycle_analytics/utils_spec.js index e6d17edcadc..ab5d78bde51 100644 --- a/spec/frontend/analytics/cycle_analytics/utils_spec.js +++ b/spec/frontend/analytics/cycle_analytics/utils_spec.js @@ -91,7 +91,7 @@ describe('Value stream analytics utils', () => { const projectId = '5'; const createdAfter = '2021-09-01'; const createdBefore = '2021-11-06'; - const groupPath = 'fake-group'; + const groupPath = 'groups/fake-group'; const namespaceName = 'Fake project'; const namespaceFullPath = 'fake-group/fake-project'; const labelsPath = '/fake-group/fake-project/-/labels.json'; @@ -130,7 +130,7 @@ describe('Value stream analytics utils', () => { }); it('sets the endpoints', () => { - expect(res.groupPath).toBe(`groups/${groupPath}`); + expect(res.groupPath).toBe(groupPath); }); it('returns null when there is no stage', () => { @@ -158,10 +158,13 @@ describe('Value stream analytics utils', () => { describe('with features set', () => { const fakeFeatures = { cycleAnalyticsForGroups: true }; + beforeEach(() => { + window.gon = { licensed_features: fakeFeatures }; + }); + it('sets the feature flags', () => { res = buildCycleAnalyticsInitialData({ ...rawData, - gon: { licensed_features: fakeFeatures }, }); expect(res.features).toMatchObject(fakeFeatures); }); diff --git a/spec/frontend/analytics/cycle_analytics/value_stream_filters_spec.js b/spec/frontend/analytics/cycle_analytics/value_stream_filters_spec.js index 160f6ce0563..c6915c9054c 100644 --- a/spec/frontend/analytics/cycle_analytics/value_stream_filters_spec.js +++ b/spec/frontend/analytics/cycle_analytics/value_stream_filters_spec.js @@ -10,12 +10,15 @@ import { selectedProjects, } from './mock_data'; +const { path } = currentGroup; +const groupPath = `groups/${path}`; + function createComponent(props = {}) { return shallowMount(ValueStreamFilters, { propsData: { selectedProjects, - groupId: currentGroup.id, - groupPath: currentGroup.fullPath, + groupPath, + namespacePath: currentGroup.fullPath, startDate, endDate, ...props, diff --git a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js index d2cbe0d39e4..33801fb8552 100644 --- a/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js +++ b/spec/frontend/analytics/shared/components/projects_dropdown_filter_spec.js @@ -1,4 +1,4 @@ -import { GlDropdown, GlDropdownItem, GlTruncate } from '@gitlab/ui'; +import { GlDropdown, GlDropdownItem, GlTruncate, GlSearchBoxByType } from '@gitlab/ui'; import { nextTick } from 'vue'; import { mountExtended } from 'helpers/vue_test_utils_helper'; import { stubComponent } from 'helpers/stub_component'; @@ -31,6 +31,7 @@ const projects = [ const MockGlDropdown = stubComponent(GlDropdown, { template: `
+
@@ -112,6 +113,8 @@ describe('ProjectsDropdownFilter component', () => { const selectedIds = () => wrapper.vm.selectedProjects.map(({ id }) => id); + const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType); + describe('queryParams are applied when fetching data', () => { beforeEach(() => { createComponent({ @@ -123,9 +126,7 @@ describe('ProjectsDropdownFilter component', () => { }); it('applies the correct queryParams when making an api call', async () => { - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ searchTerm: 'gitlab' }); + findSearchBoxByType().vm.$emit('input', 'gitlab'); expect(spyQuery).toHaveBeenCalledTimes(1); @@ -144,6 +145,7 @@ describe('ProjectsDropdownFilter component', () => { describe('highlighted items', () => { const blockDefaultProps = { multiSelect: true }; + beforeEach(() => { createComponent(blockDefaultProps); }); @@ -151,6 +153,7 @@ describe('ProjectsDropdownFilter component', () => { describe('with no project selected', () => { it('does not render the highlighted items', async () => { await createWithMockDropdown(blockDefaultProps); + expect(findSelectedDropdownItems().length).toBe(0); }); @@ -188,8 +191,7 @@ describe('ProjectsDropdownFilter component', () => { expect(findSelectedProjectsLabel().text()).toBe('2 projects selected'); - findClearAllButton().trigger('click'); - await nextTick(); + await findClearAllButton().trigger('click'); expect(findSelectedProjectsLabel().text()).toBe('Select projects'); }); @@ -201,16 +203,14 @@ describe('ProjectsDropdownFilter component', () => { await createWithMockDropdown({ multiSelect: true }); selectDropdownItemAtIndex(0); - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ searchTerm: 'this is a very long search string' }); + findSearchBoxByType().vm.$emit('input', 'this is a very long search string'); }); - it('renders the highlighted items', async () => { + it('renders the highlighted items', () => { expect(findUnhighlightedItems().findAll('li').length).toBe(1); }); - it('hides the unhighlighted items that do not match the string', async () => { + it('hides the unhighlighted items that do not match the string', () => { expect(findUnhighlightedItems().findAll('li').length).toBe(1); expect(findUnhighlightedItems().text()).toContain('No matching results'); }); @@ -351,17 +351,19 @@ describe('ProjectsDropdownFilter component', () => { it('should remove from selection when clicked again', () => { selectDropdownItemAtIndex(0); + expect(selectedIds()).toEqual([projects[0].id]); selectDropdownItemAtIndex(0); + expect(selectedIds()).toEqual([]); }); it('renders the correct placeholder text when multiple projects are selected', async () => { selectDropdownItemAtIndex(0); selectDropdownItemAtIndex(1); - await nextTick(); + expect(findDropdownButton().text()).toBe('2 projects selected'); }); }); diff --git a/spec/frontend/api/projects_api_spec.js b/spec/frontend/api/projects_api_spec.js index 2de56fae0c2..4ceed885e6e 100644 --- a/spec/frontend/api/projects_api_spec.js +++ b/spec/frontend/api/projects_api_spec.js @@ -9,14 +9,11 @@ describe('~/api/projects_api.js', () => { let mock; const projectId = 1; - const setfullPathProjectSearch = (value) => { - window.gon.features.fullPathProjectSearch = value; - }; beforeEach(() => { mock = new MockAdapter(axios); - window.gon = { api_version: 'v7', features: { fullPathProjectSearch: true } }; + window.gon = { api_version: 'v7' }; }); afterEach(() => { @@ -68,17 +65,18 @@ describe('~/api/projects_api.js', () => { expect(data.data).toEqual(expectedProjects); }); }); + }); - it('does not search namespaces if fullPathProjectSearch is disabled', () => { - setfullPathProjectSearch(false); - const expectedParams = { params: { per_page: 20, search: 'group/project1', simple: true } }; - const query = 'group/project1'; + describe('createProject', () => { + it('posts to the correct URL and returns the data', () => { + const body = { name: 'test project' }; + const expectedUrl = '/api/v7/projects.json'; + const expectedRes = { id: 999, name: 'test project' }; - mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, { data: expectedProjects }); + mock.onPost(expectedUrl, body).replyOnce(HTTP_STATUS_OK, { data: expectedRes }); - return projectsApi.getProjects(query, options).then(({ data }) => { - expect(axios.get).toHaveBeenCalledWith(expectedUrl, expectedParams); - expect(data.data).toEqual(expectedProjects); + return projectsApi.createProject(body).then(({ data }) => { + expect(data).toStrictEqual(expectedRes); }); }); }); diff --git a/spec/frontend/api/user_api_spec.js b/spec/frontend/api/user_api_spec.js index 6636d77a09b..a879c229581 100644 --- a/spec/frontend/api/user_api_spec.js +++ b/spec/frontend/api/user_api_spec.js @@ -1,6 +1,13 @@ import MockAdapter from 'axios-mock-adapter'; -import { followUser, unfollowUser, associationsCount, updateUserStatus } from '~/api/user_api'; +import projects from 'test_fixtures/api/users/projects/get.json'; +import { + followUser, + unfollowUser, + associationsCount, + updateUserStatus, + getUserProjects, +} from '~/api/user_api'; import axios from '~/lib/utils/axios_utils'; import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; import { @@ -91,4 +98,18 @@ describe('~/api/user_api', () => { expect(JSON.parse(axiosMock.history.patch[0].data)).toEqual(expectedData); }); }); + + describe('getUserProjects', () => { + it('calls correct URL and returns expected response', async () => { + const expectedUrl = '/api/v4/users/1/projects'; + const expectedResponse = { data: projects }; + + axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedResponse); + + await expect(getUserProjects(1)).resolves.toEqual( + expect.objectContaining({ data: expectedResponse }), + ); + expect(axiosMock.history.get[0].url).toBe(expectedUrl); + }); + }); }); diff --git a/spec/frontend/artifacts/components/app_spec.js b/spec/frontend/artifacts/components/app_spec.js deleted file mode 100644 index 931c4703e95..00000000000 --- a/spec/frontend/artifacts/components/app_spec.js +++ /dev/null @@ -1,109 +0,0 @@ -import { GlSkeletonLoader } from '@gitlab/ui'; -import VueApollo from 'vue-apollo'; -import Vue from 'vue'; -import { numberToHumanSize } from '~/lib/utils/number_utils'; -import ArtifactsApp from '~/artifacts/components/app.vue'; -import JobArtifactsTable from '~/artifacts/components/job_artifacts_table.vue'; -import getBuildArtifactsSizeQuery from '~/artifacts/graphql/queries/get_build_artifacts_size.query.graphql'; -import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import waitForPromises from 'helpers/wait_for_promises'; -import { PAGE_TITLE, TOTAL_ARTIFACTS_SIZE, SIZE_UNKNOWN } from '~/artifacts/constants'; - -const TEST_BUILD_ARTIFACTS_SIZE = 1024; -const TEST_PROJECT_PATH = 'project/path'; -const TEST_PROJECT_ID = 'gid://gitlab/Project/22'; - -const createBuildArtifactsSizeResponse = (buildArtifactsSize) => ({ - data: { - project: { - __typename: 'Project', - id: TEST_PROJECT_ID, - statistics: { - __typename: 'ProjectStatistics', - buildArtifactsSize, - }, - }, - }, -}); - -Vue.use(VueApollo); - -describe('ArtifactsApp component', () => { - let wrapper; - let apolloProvider; - let getBuildArtifactsSizeSpy; - - const findTitle = () => wrapper.findByTestId('artifacts-page-title'); - const findBuildArtifactsSize = () => wrapper.findByTestId('build-artifacts-size'); - const findJobArtifactsTable = () => wrapper.findComponent(JobArtifactsTable); - const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader); - - const createComponent = () => { - wrapper = shallowMountExtended(ArtifactsApp, { - provide: { projectPath: 'project/path' }, - apolloProvider, - }); - }; - - beforeEach(() => { - getBuildArtifactsSizeSpy = jest.fn(); - - apolloProvider = createMockApollo([[getBuildArtifactsSizeQuery, getBuildArtifactsSizeSpy]]); - }); - - describe('when loading', () => { - beforeEach(() => { - // Promise that never resolves so it's always loading - getBuildArtifactsSizeSpy.mockReturnValue(new Promise(() => {})); - - createComponent(); - }); - - it('shows the page title', () => { - expect(findTitle().text()).toBe(PAGE_TITLE); - }); - - it('shows a skeleton while loading the artifacts size', () => { - expect(findSkeletonLoader().exists()).toBe(true); - }); - - it('shows the job artifacts table', () => { - expect(findJobArtifactsTable().exists()).toBe(true); - }); - - it('does not show message', () => { - expect(findBuildArtifactsSize().text()).toBe(''); - }); - - it('calls apollo query', () => { - expect(getBuildArtifactsSizeSpy).toHaveBeenCalledWith({ projectPath: TEST_PROJECT_PATH }); - }); - }); - - describe.each` - buildArtifactsSize | expectedText - ${TEST_BUILD_ARTIFACTS_SIZE} | ${numberToHumanSize(TEST_BUILD_ARTIFACTS_SIZE)} - ${null} | ${SIZE_UNKNOWN} - `('when buildArtifactsSize is $buildArtifactsSize', ({ buildArtifactsSize, expectedText }) => { - beforeEach(async () => { - getBuildArtifactsSizeSpy.mockResolvedValue( - createBuildArtifactsSizeResponse(buildArtifactsSize), - ); - - createComponent(); - - await waitForPromises(); - }); - - it('hides loader', () => { - expect(findSkeletonLoader().exists()).toBe(false); - }); - - it('shows the size', () => { - expect(findBuildArtifactsSize().text()).toMatchInterpolatedText( - `${TOTAL_ARTIFACTS_SIZE} ${expectedText}`, - ); - }); - }); -}); diff --git a/spec/frontend/artifacts/components/artifact_row_spec.js b/spec/frontend/artifacts/components/artifact_row_spec.js deleted file mode 100644 index 268772ed4c0..00000000000 --- a/spec/frontend/artifacts/components/artifact_row_spec.js +++ /dev/null @@ -1,105 +0,0 @@ -import { GlBadge, GlButton, GlFriendlyWrap, GlFormCheckbox } from '@gitlab/ui'; -import mockGetJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; -import { numberToHumanSize } from '~/lib/utils/number_utils'; -import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import waitForPromises from 'helpers/wait_for_promises'; -import ArtifactRow from '~/artifacts/components/artifact_row.vue'; -import { BULK_DELETE_FEATURE_FLAG } from '~/artifacts/constants'; - -describe('ArtifactRow component', () => { - let wrapper; - - const artifact = mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[0]; - - const findName = () => wrapper.findByTestId('job-artifact-row-name'); - const findBadge = () => wrapper.findComponent(GlBadge); - const findSize = () => wrapper.findByTestId('job-artifact-row-size'); - const findDownloadButton = () => wrapper.findByTestId('job-artifact-row-download-button'); - const findDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button'); - const findCheckbox = () => wrapper.findComponent(GlFormCheckbox); - - const createComponent = ({ canDestroyArtifacts = true, glFeatures = {} } = {}) => { - wrapper = shallowMountExtended(ArtifactRow, { - propsData: { - artifact, - isSelected: false, - isLoading: false, - isLastRow: false, - }, - provide: { canDestroyArtifacts, glFeatures }, - stubs: { GlBadge, GlButton, GlFriendlyWrap }, - }); - }; - - describe('artifact details', () => { - beforeEach(async () => { - createComponent(); - - await waitForPromises(); - }); - - it('displays the artifact name and type', () => { - expect(findName().text()).toContain(artifact.name); - expect(findBadge().text()).toBe(artifact.fileType.toLowerCase()); - }); - - it('displays the artifact size', () => { - expect(findSize().text()).toBe(numberToHumanSize(artifact.size)); - }); - - it('displays the download button as a link to the download path', () => { - expect(findDownloadButton().attributes('href')).toBe(artifact.downloadPath); - }); - }); - - describe('delete button', () => { - it('does not show when user does not have permission', () => { - createComponent({ canDestroyArtifacts: false }); - - expect(findDeleteButton().exists()).toBe(false); - }); - - it('shows when user has permission', () => { - createComponent(); - - expect(findDeleteButton().exists()).toBe(true); - }); - - it('emits the delete event when clicked', async () => { - createComponent(); - - expect(wrapper.emitted('delete')).toBeUndefined(); - - findDeleteButton().trigger('click'); - await waitForPromises(); - - expect(wrapper.emitted('delete')).toBeDefined(); - }); - }); - - describe('bulk delete checkbox', () => { - describe('with permission and feature flag enabled', () => { - beforeEach(() => { - createComponent({ glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true } }); - }); - - it('emits selectArtifact when toggled', () => { - findCheckbox().vm.$emit('input', true); - - expect(wrapper.emitted('selectArtifact')).toStrictEqual([[artifact, true]]); - }); - }); - - it('is not shown without permission', () => { - createComponent({ canDestroyArtifacts: false }); - - expect(findCheckbox().exists()).toBe(false); - }); - - it('is not shown with feature flag disabled', () => { - createComponent(); - - expect(findCheckbox().exists()).toBe(false); - }); - }); -}); diff --git a/spec/frontend/artifacts/components/artifacts_bulk_delete_spec.js b/spec/frontend/artifacts/components/artifacts_bulk_delete_spec.js deleted file mode 100644 index 876906b2c3c..00000000000 --- a/spec/frontend/artifacts/components/artifacts_bulk_delete_spec.js +++ /dev/null @@ -1,96 +0,0 @@ -import { GlSprintf, GlModal } from '@gitlab/ui'; -import Vue from 'vue'; -import VueApollo from 'vue-apollo'; -import mockGetJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; -import waitForPromises from 'helpers/wait_for_promises'; -import ArtifactsBulkDelete from '~/artifacts/components/artifacts_bulk_delete.vue'; -import bulkDestroyArtifactsMutation from '~/artifacts/graphql/mutations/bulk_destroy_job_artifacts.mutation.graphql'; - -Vue.use(VueApollo); - -describe('ArtifactsBulkDelete component', () => { - let wrapper; - let requestHandlers; - - const projectId = '123'; - const selectedArtifacts = [ - mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[0].id, - mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[1].id, - ]; - - const findText = () => wrapper.findComponent(GlSprintf).text(); - const findDeleteButton = () => wrapper.findByTestId('bulk-delete-delete-button'); - const findClearButton = () => wrapper.findByTestId('bulk-delete-clear-button'); - const findModal = () => wrapper.findComponent(GlModal); - - const createComponent = ({ - handlers = { - bulkDestroyArtifactsMutation: jest.fn(), - }, - } = {}) => { - requestHandlers = handlers; - wrapper = mountExtended(ArtifactsBulkDelete, { - apolloProvider: createMockApollo([ - [bulkDestroyArtifactsMutation, requestHandlers.bulkDestroyArtifactsMutation], - ]), - propsData: { - selectedArtifacts, - queryVariables: {}, - isLoading: false, - isLastRow: false, - }, - provide: { projectId }, - }); - }; - - describe('selected artifacts box', () => { - beforeEach(async () => { - createComponent(); - await waitForPromises(); - }); - - it('displays selected artifacts count', () => { - expect(findText()).toContain(String(selectedArtifacts.length)); - }); - - it('opens the confirmation modal when the delete button is clicked', async () => { - expect(findModal().props('visible')).toBe(false); - - findDeleteButton().trigger('click'); - await waitForPromises(); - - expect(findModal().props('visible')).toBe(true); - }); - - it('emits clearSelectedArtifacts event when the clear button is clicked', () => { - findClearButton().trigger('click'); - - expect(wrapper.emitted('clearSelectedArtifacts')).toBeDefined(); - }); - }); - - describe('bulk delete confirmation modal', () => { - beforeEach(async () => { - createComponent(); - findDeleteButton().trigger('click'); - await waitForPromises(); - }); - - it('calls the bulk delete mutation with the selected artifacts on confirm', () => { - findModal().vm.$emit('primary'); - - expect(requestHandlers.bulkDestroyArtifactsMutation).toHaveBeenCalledWith({ - projectId: `gid://gitlab/Project/${projectId}`, - ids: selectedArtifacts, - }); - }); - - it('does not call the bulk delete mutation on cancel', () => { - findModal().vm.$emit('cancel'); - - expect(requestHandlers.bulkDestroyArtifactsMutation).not.toHaveBeenCalled(); - }); - }); -}); diff --git a/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js b/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js deleted file mode 100644 index 6bf3498f9b0..00000000000 --- a/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js +++ /dev/null @@ -1,137 +0,0 @@ -import { GlModal } from '@gitlab/ui'; -import Vue from 'vue'; -import VueApollo from 'vue-apollo'; -import getJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; -import waitForPromises from 'helpers/wait_for_promises'; -import ArtifactsTableRowDetails from '~/artifacts/components/artifacts_table_row_details.vue'; -import ArtifactRow from '~/artifacts/components/artifact_row.vue'; -import ArtifactDeleteModal from '~/artifacts/components/artifact_delete_modal.vue'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; -import destroyArtifactMutation from '~/artifacts/graphql/mutations/destroy_artifact.mutation.graphql'; -import { I18N_DESTROY_ERROR, I18N_MODAL_TITLE } from '~/artifacts/constants'; -import { createAlert } from '~/alert'; - -jest.mock('~/alert'); - -const { artifacts } = getJobArtifactsResponse.data.project.jobs.nodes[0]; -const refetchArtifacts = jest.fn(); - -Vue.use(VueApollo); - -describe('ArtifactsTableRowDetails component', () => { - let wrapper; - let requestHandlers; - - const findModal = () => wrapper.findComponent(GlModal); - - const createComponent = ({ - handlers = { - destroyArtifactMutation: jest.fn(), - }, - selectedArtifacts = [], - } = {}) => { - requestHandlers = handlers; - wrapper = mountExtended(ArtifactsTableRowDetails, { - apolloProvider: createMockApollo([ - [destroyArtifactMutation, requestHandlers.destroyArtifactMutation], - ]), - propsData: { - artifacts, - selectedArtifacts, - refetchArtifacts, - queryVariables: {}, - }, - provide: { canDestroyArtifacts: true }, - data() { - return { deletingArtifactId: null }; - }, - }); - }; - - describe('passes correct props', () => { - beforeEach(() => { - createComponent(); - }); - - it('to the artifact rows', () => { - [0, 1, 2].forEach((index) => { - expect(wrapper.findAllComponents(ArtifactRow).at(index).props()).toMatchObject({ - artifact: artifacts.nodes[index], - }); - }); - }); - }); - - describe('when the artifact row emits the delete event', () => { - it('shows the artifact delete modal', async () => { - createComponent(); - await waitForPromises(); - - expect(findModal().props('visible')).toBe(false); - - await wrapper.findComponent(ArtifactRow).vm.$emit('delete'); - - expect(findModal().props('visible')).toBe(true); - expect(findModal().props('title')).toBe(I18N_MODAL_TITLE(artifacts.nodes[0].name)); - }); - }); - - describe('when the artifact delete modal emits its primary event', () => { - it('triggers the destroyArtifact GraphQL mutation', async () => { - createComponent(); - await waitForPromises(); - - wrapper.findComponent(ArtifactRow).vm.$emit('delete'); - wrapper.findComponent(ArtifactDeleteModal).vm.$emit('primary'); - - expect(requestHandlers.destroyArtifactMutation).toHaveBeenCalledWith({ - id: artifacts.nodes[0].id, - }); - }); - - it('displays an alert message and refetches artifacts when the mutation fails', async () => { - createComponent({ - destroyArtifactMutation: jest.fn().mockRejectedValue(new Error('Error!')), - }); - await waitForPromises(); - - expect(wrapper.emitted('refetch')).toBeUndefined(); - - wrapper.findComponent(ArtifactRow).vm.$emit('delete'); - wrapper.findComponent(ArtifactDeleteModal).vm.$emit('primary'); - await waitForPromises(); - - expect(createAlert).toHaveBeenCalledWith({ message: I18N_DESTROY_ERROR }); - expect(wrapper.emitted('refetch')).toBeDefined(); - }); - }); - - describe('when the artifact delete modal is cancelled', () => { - it('does not trigger the destroyArtifact GraphQL mutation', async () => { - createComponent(); - await waitForPromises(); - - wrapper.findComponent(ArtifactRow).vm.$emit('delete'); - wrapper.findComponent(ArtifactDeleteModal).vm.$emit('cancel'); - - expect(requestHandlers.destroyArtifactMutation).not.toHaveBeenCalled(); - }); - }); - - describe('bulk delete selection', () => { - it('is not selected for unselected artifact', async () => { - createComponent(); - await waitForPromises(); - - expect(wrapper.findAllComponents(ArtifactRow).at(0).props('isSelected')).toBe(false); - }); - - it('is selected for selected artifacts', async () => { - createComponent({ selectedArtifacts: [artifacts.nodes[0].id] }); - await waitForPromises(); - - expect(wrapper.findAllComponents(ArtifactRow).at(0).props('isSelected')).toBe(true); - }); - }); -}); diff --git a/spec/frontend/artifacts/components/feedback_banner_spec.js b/spec/frontend/artifacts/components/feedback_banner_spec.js deleted file mode 100644 index af9599daefa..00000000000 --- a/spec/frontend/artifacts/components/feedback_banner_spec.js +++ /dev/null @@ -1,59 +0,0 @@ -import { GlBanner } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; -import FeedbackBanner from '~/artifacts/components/feedback_banner.vue'; -import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser'; -import { - I18N_FEEDBACK_BANNER_TITLE, - I18N_FEEDBACK_BANNER_BUTTON, - FEEDBACK_URL, -} from '~/artifacts/constants'; - -const mockBannerImagePath = 'banner/image/path'; - -describe('Artifacts management feedback banner', () => { - let wrapper; - let userCalloutDismissSpy; - - const findBanner = () => wrapper.findComponent(GlBanner); - - const createComponent = ({ shouldShowCallout = true } = {}) => { - userCalloutDismissSpy = jest.fn(); - - wrapper = shallowMount(FeedbackBanner, { - provide: { - artifactsManagementFeedbackImagePath: mockBannerImagePath, - }, - stubs: { - UserCalloutDismisser: makeMockUserCalloutDismisser({ - dismiss: userCalloutDismissSpy, - shouldShowCallout, - }), - }, - }); - }; - - it('is displayed with the correct props', () => { - createComponent(); - - expect(findBanner().props()).toMatchObject({ - title: I18N_FEEDBACK_BANNER_TITLE, - buttonText: I18N_FEEDBACK_BANNER_BUTTON, - buttonLink: FEEDBACK_URL, - svgPath: mockBannerImagePath, - }); - }); - - it('dismisses the callout when closed', () => { - createComponent(); - - findBanner().vm.$emit('close'); - - expect(userCalloutDismissSpy).toHaveBeenCalled(); - }); - - it('is not displayed once it has been dismissed', () => { - createComponent({ shouldShowCallout: false }); - - expect(findBanner().exists()).toBe(false); - }); -}); diff --git a/spec/frontend/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/artifacts/components/job_artifacts_table_spec.js deleted file mode 100644 index 40f3c9633ab..00000000000 --- a/spec/frontend/artifacts/components/job_artifacts_table_spec.js +++ /dev/null @@ -1,473 +0,0 @@ -import { - GlLoadingIcon, - GlTable, - GlLink, - GlBadge, - GlPagination, - GlModal, - GlFormCheckbox, -} from '@gitlab/ui'; -import Vue from 'vue'; -import VueApollo from 'vue-apollo'; -import getJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; -import CiIcon from '~/vue_shared/components/ci_icon.vue'; -import waitForPromises from 'helpers/wait_for_promises'; -import JobArtifactsTable from '~/artifacts/components/job_artifacts_table.vue'; -import FeedbackBanner from '~/artifacts/components/feedback_banner.vue'; -import ArtifactsTableRowDetails from '~/artifacts/components/artifacts_table_row_details.vue'; -import ArtifactDeleteModal from '~/artifacts/components/artifact_delete_modal.vue'; -import ArtifactsBulkDelete from '~/artifacts/components/artifacts_bulk_delete.vue'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; -import getJobArtifactsQuery from '~/artifacts/graphql/queries/get_job_artifacts.query.graphql'; -import { getIdFromGraphQLId } from '~/graphql_shared/utils'; -import { - ARCHIVE_FILE_TYPE, - JOBS_PER_PAGE, - I18N_FETCH_ERROR, - INITIAL_CURRENT_PAGE, - BULK_DELETE_FEATURE_FLAG, -} from '~/artifacts/constants'; -import { totalArtifactsSizeForJob } from '~/artifacts/utils'; -import { createAlert } from '~/alert'; - -jest.mock('~/alert'); - -Vue.use(VueApollo); - -describe('JobArtifactsTable component', () => { - let wrapper; - let requestHandlers; - - const mockToastShow = jest.fn(); - - const findBanner = () => wrapper.findComponent(FeedbackBanner); - - const findLoadingState = () => wrapper.findComponent(GlLoadingIcon); - const findTable = () => wrapper.findComponent(GlTable); - const findDetailsRows = () => wrapper.findAllComponents(ArtifactsTableRowDetails); - const findDetailsInRow = (i) => - findTable().findAll('tbody tr').at(i).findComponent(ArtifactsTableRowDetails); - - const findCount = () => wrapper.findByTestId('job-artifacts-count'); - const findCountAt = (i) => wrapper.findAllByTestId('job-artifacts-count').at(i); - - const findModal = () => wrapper.findComponent(GlModal); - - const findStatuses = () => wrapper.findAllByTestId('job-artifacts-job-status'); - const findSuccessfulJobStatus = () => findStatuses().at(0); - const findFailedJobStatus = () => findStatuses().at(1); - - const findLinks = () => wrapper.findAllComponents(GlLink); - const findJobLink = () => findLinks().at(0); - const findPipelineLink = () => findLinks().at(1); - const findRefLink = () => findLinks().at(2); - const findCommitLink = () => findLinks().at(3); - - const findSize = () => wrapper.findByTestId('job-artifacts-size'); - const findCreated = () => wrapper.findByTestId('job-artifacts-created'); - - const findDownloadButton = () => wrapper.findByTestId('job-artifacts-download-button'); - const findBrowseButton = () => wrapper.findByTestId('job-artifacts-browse-button'); - const findDeleteButton = () => wrapper.findByTestId('job-artifacts-delete-button'); - const findArtifactDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button'); - - // first checkbox is a "select all", this finder should get the first job checkbox - const findJobCheckbox = () => wrapper.findAllComponents(GlFormCheckbox).at(1); - const findAnyCheckbox = () => wrapper.findComponent(GlFormCheckbox); - const findBulkDelete = () => wrapper.findComponent(ArtifactsBulkDelete); - - const findPagination = () => wrapper.findComponent(GlPagination); - const setPage = async (page) => { - findPagination().vm.$emit('input', page); - await waitForPromises(); - }; - - let enoughJobsToPaginate = [...getJobArtifactsResponse.data.project.jobs.nodes]; - while (enoughJobsToPaginate.length <= JOBS_PER_PAGE) { - enoughJobsToPaginate = [ - ...enoughJobsToPaginate, - ...getJobArtifactsResponse.data.project.jobs.nodes, - ]; - } - const getJobArtifactsResponseThatPaginates = { - data: { - project: { - jobs: { - nodes: enoughJobsToPaginate, - pageInfo: { ...getJobArtifactsResponse.data.project.jobs.pageInfo, hasNextPage: true }, - }, - }, - }, - }; - - const job = getJobArtifactsResponse.data.project.jobs.nodes[0]; - const archiveArtifact = job.artifacts.nodes.find( - (artifact) => artifact.fileType === ARCHIVE_FILE_TYPE, - ); - - const createComponent = ({ - handlers = { - getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse), - }, - data = {}, - canDestroyArtifacts = true, - glFeatures = {}, - } = {}) => { - requestHandlers = handlers; - wrapper = mountExtended(JobArtifactsTable, { - apolloProvider: createMockApollo([ - [getJobArtifactsQuery, requestHandlers.getJobArtifactsQuery], - ]), - provide: { - projectPath: 'project/path', - projectId: 'gid://projects/id', - canDestroyArtifacts, - artifactsManagementFeedbackImagePath: 'banner/image/path', - glFeatures, - }, - mocks: { - $toast: { - show: mockToastShow, - }, - }, - data() { - return data; - }, - }); - }; - - it('renders feedback banner', () => { - createComponent(); - - expect(findBanner().exists()).toBe(true); - }); - - it('when loading, shows a loading state', () => { - createComponent(); - - expect(findLoadingState().exists()).toBe(true); - }); - - it('on error, shows an alert', async () => { - createComponent({ - handlers: { - getJobArtifactsQuery: jest.fn().mockRejectedValue(new Error('Error!')), - }, - }); - - await waitForPromises(); - - expect(createAlert).toHaveBeenCalledWith({ message: I18N_FETCH_ERROR }); - }); - - it('with data, renders the table', async () => { - createComponent(); - - await waitForPromises(); - - expect(findTable().exists()).toBe(true); - }); - - describe('job details', () => { - beforeEach(async () => { - createComponent(); - - await waitForPromises(); - }); - - it('shows the artifact count', () => { - expect(findCount().text()).toBe(`${job.artifacts.nodes.length} files`); - }); - - it('shows the job status as an icon for a successful job', () => { - expect(findSuccessfulJobStatus().findComponent(CiIcon).exists()).toBe(true); - expect(findSuccessfulJobStatus().findComponent(GlBadge).exists()).toBe(false); - }); - - it('shows the job status as a badge for other job statuses', () => { - expect(findFailedJobStatus().findComponent(GlBadge).exists()).toBe(true); - expect(findFailedJobStatus().findComponent(CiIcon).exists()).toBe(false); - }); - - it('shows links to the job, pipeline, ref, and commit', () => { - expect(findJobLink().text()).toBe(job.name); - expect(findJobLink().attributes('href')).toBe(job.webPath); - - expect(findPipelineLink().text()).toBe(`#${getIdFromGraphQLId(job.pipeline.id)}`); - expect(findPipelineLink().attributes('href')).toBe(job.pipeline.path); - - expect(findRefLink().text()).toBe(job.refName); - expect(findRefLink().attributes('href')).toBe(job.refPath); - - expect(findCommitLink().text()).toBe(job.shortSha); - expect(findCommitLink().attributes('href')).toBe(job.commitPath); - }); - - it('shows the total size of artifacts', () => { - expect(findSize().text()).toBe(totalArtifactsSizeForJob(job)); - }); - - it('shows the created time', () => { - expect(findCreated().text()).toBe('5 years ago'); - }); - - describe('row expansion', () => { - it('toggles the visibility of the row details', async () => { - expect(findDetailsRows().length).toBe(0); - - findCount().trigger('click'); - await waitForPromises(); - - expect(findDetailsRows().length).toBe(1); - - findCount().trigger('click'); - await waitForPromises(); - - expect(findDetailsRows().length).toBe(0); - }); - - it('expands and collapses jobs', async () => { - // both jobs start collapsed - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(false); - - findCountAt(0).trigger('click'); - await waitForPromises(); - - // first job is expanded, second row has its details - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(true); - expect(findDetailsInRow(2).exists()).toBe(false); - - findCountAt(1).trigger('click'); - await waitForPromises(); - - // both jobs are expanded, each has details below it - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(true); - expect(findDetailsInRow(2).exists()).toBe(false); - expect(findDetailsInRow(3).exists()).toBe(true); - - findCountAt(0).trigger('click'); - await waitForPromises(); - - // first job collapsed, second job expanded - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(false); - expect(findDetailsInRow(2).exists()).toBe(true); - }); - - it('keeps the job expanded when an artifact is deleted', async () => { - findCount().trigger('click'); - await waitForPromises(); - - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(true); - - findArtifactDeleteButton().trigger('click'); - await waitForPromises(); - - expect(findModal().props('visible')).toBe(true); - - wrapper.findComponent(ArtifactDeleteModal).vm.$emit('primary'); - await waitForPromises(); - - expect(findDetailsInRow(0).exists()).toBe(false); - expect(findDetailsInRow(1).exists()).toBe(true); - }); - }); - }); - - describe('download button', () => { - it('is a link to the download path for the archive artifact', async () => { - createComponent(); - - await waitForPromises(); - - expect(findDownloadButton().attributes('href')).toBe(archiveArtifact.downloadPath); - }); - - it('is disabled when there is no download path', async () => { - const jobWithoutDownloadPath = { - ...job, - archive: { downloadPath: null }, - }; - - createComponent({ - handlers: { getJobArtifactsQuery: jest.fn() }, - data: { jobArtifacts: [jobWithoutDownloadPath] }, - }); - - await waitForPromises(); - - expect(findDownloadButton().attributes('disabled')).toBe('disabled'); - }); - }); - - describe('browse button', () => { - it('is a link to the browse path for the job', async () => { - createComponent(); - - await waitForPromises(); - - expect(findBrowseButton().attributes('href')).toBe(job.browseArtifactsPath); - }); - - it('is disabled when there is no browse path', async () => { - const jobWithoutBrowsePath = { - ...job, - browseArtifactsPath: null, - }; - - createComponent({ - handlers: { getJobArtifactsQuery: jest.fn() }, - data: { jobArtifacts: [jobWithoutBrowsePath] }, - }); - - await waitForPromises(); - - expect(findBrowseButton().attributes('disabled')).toBe('disabled'); - }); - }); - - describe('delete button', () => { - it('does not show when user does not have permission', async () => { - createComponent({ canDestroyArtifacts: false }); - - await waitForPromises(); - - expect(findDeleteButton().exists()).toBe(false); - }); - - it('shows a disabled delete button for now (coming soon)', async () => { - createComponent(); - - await waitForPromises(); - - expect(findDeleteButton().attributes('disabled')).toBe('disabled'); - }); - }); - - describe('bulk delete', () => { - describe('with permission and feature flag enabled', () => { - beforeEach(async () => { - createComponent({ - canDestroyArtifacts: true, - glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, - }); - - await waitForPromises(); - }); - - it('shows selected artifacts when a job is checked', async () => { - expect(findBulkDelete().exists()).toBe(false); - - await findJobCheckbox().vm.$emit('input', true); - - expect(findBulkDelete().exists()).toBe(true); - expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual( - job.artifacts.nodes.map((node) => node.id), - ); - }); - - it('disappears when selected artifacts are cleared', async () => { - await findJobCheckbox().vm.$emit('input', true); - - expect(findBulkDelete().exists()).toBe(true); - - await findBulkDelete().vm.$emit('clearSelectedArtifacts'); - - expect(findBulkDelete().exists()).toBe(false); - }); - - it('shows a toast when artifacts are deleted', async () => { - const count = job.artifacts.nodes.length; - - await findJobCheckbox().vm.$emit('input', true); - findBulkDelete().vm.$emit('deleted', count); - - expect(mockToastShow).toHaveBeenCalledWith(`${count} selected artifacts deleted`); - }); - }); - - it('shows no checkboxes without permission', async () => { - createComponent({ - canDestroyArtifacts: false, - glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, - }); - - await waitForPromises(); - - expect(findAnyCheckbox().exists()).toBe(false); - }); - - it('shows no checkboxes with feature flag disabled', async () => { - createComponent({ - canDestroyArtifacts: true, - glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false }, - }); - - await waitForPromises(); - - expect(findAnyCheckbox().exists()).toBe(false); - }); - }); - - describe('pagination', () => { - const { pageInfo } = getJobArtifactsResponseThatPaginates.data.project.jobs; - const query = jest.fn().mockResolvedValue(getJobArtifactsResponseThatPaginates); - - beforeEach(async () => { - createComponent({ - handlers: { - getJobArtifactsQuery: query, - }, - data: { pageInfo }, - }); - - await waitForPromises(); - }); - - it('renders pagination and passes page props', () => { - expect(findPagination().props()).toMatchObject({ - value: INITIAL_CURRENT_PAGE, - prevPage: Number(pageInfo.hasPreviousPage), - nextPage: Number(pageInfo.hasNextPage), - }); - - expect(query).toHaveBeenCalledWith({ - projectPath: 'project/path', - firstPageSize: JOBS_PER_PAGE, - lastPageSize: null, - nextPageCursor: '', - prevPageCursor: '', - }); - }); - - it('updates query variables when going to previous page', async () => { - await setPage(1); - - expect(query).toHaveBeenLastCalledWith({ - projectPath: 'project/path', - firstPageSize: null, - lastPageSize: JOBS_PER_PAGE, - prevPageCursor: pageInfo.startCursor, - }); - expect(findPagination().props('value')).toEqual(1); - }); - - it('updates query variables when going to next page', async () => { - await setPage(2); - - expect(query).toHaveBeenLastCalledWith({ - projectPath: 'project/path', - firstPageSize: JOBS_PER_PAGE, - lastPageSize: null, - prevPageCursor: '', - nextPageCursor: pageInfo.endCursor, - }); - expect(findPagination().props('value')).toEqual(2); - }); - }); -}); diff --git a/spec/frontend/artifacts/components/job_checkbox_spec.js b/spec/frontend/artifacts/components/job_checkbox_spec.js deleted file mode 100644 index 95cc548b8c8..00000000000 --- a/spec/frontend/artifacts/components/job_checkbox_spec.js +++ /dev/null @@ -1,71 +0,0 @@ -import { GlFormCheckbox } from '@gitlab/ui'; -import mockGetJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; -import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import JobCheckbox from '~/artifacts/components/job_checkbox.vue'; - -describe('JobCheckbox component', () => { - let wrapper; - - const mockArtifactNodes = mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes; - const mockSelectedArtifacts = [mockArtifactNodes[0], mockArtifactNodes[1]]; - const mockUnselectedArtifacts = [mockArtifactNodes[2]]; - - const findCheckbox = () => wrapper.findComponent(GlFormCheckbox); - - const createComponent = ({ - hasArtifacts = true, - selectedArtifacts = mockSelectedArtifacts, - unselectedArtifacts = mockUnselectedArtifacts, - } = {}) => { - wrapper = shallowMountExtended(JobCheckbox, { - propsData: { - hasArtifacts, - selectedArtifacts, - unselectedArtifacts, - }, - mocks: { GlFormCheckbox }, - }); - }; - - it('is disabled when the job has no artifacts', () => { - createComponent({ hasArtifacts: false }); - - expect(findCheckbox().attributes('disabled')).toBe('true'); - }); - - describe('when some artifacts are selected', () => { - beforeEach(() => { - createComponent(); - }); - - it('is indeterminate', () => { - expect(findCheckbox().attributes('indeterminate')).toBe('true'); - expect(findCheckbox().attributes('checked')).toBeUndefined(); - }); - - it('selects the unselected artifacts on click', () => { - findCheckbox().vm.$emit('input', true); - - expect(wrapper.emitted('selectArtifact')).toMatchObject([[mockUnselectedArtifacts[0], true]]); - }); - }); - - describe('when all artifacts are selected', () => { - beforeEach(() => { - createComponent({ unselectedArtifacts: [] }); - }); - - it('is checked', () => { - expect(findCheckbox().attributes('checked')).toBe('true'); - }); - - it('deselects the selected artifacts on click', () => { - findCheckbox().vm.$emit('input', false); - - expect(wrapper.emitted('selectArtifact')).toMatchObject([ - [mockSelectedArtifacts[0], false], - [mockSelectedArtifacts[1], false], - ]); - }); - }); -}); diff --git a/spec/frontend/artifacts/graphql/cache_update_spec.js b/spec/frontend/artifacts/graphql/cache_update_spec.js deleted file mode 100644 index 4d610328298..00000000000 --- a/spec/frontend/artifacts/graphql/cache_update_spec.js +++ /dev/null @@ -1,67 +0,0 @@ -import getJobArtifactsQuery from '~/artifacts/graphql/queries/get_job_artifacts.query.graphql'; -import { removeArtifactFromStore } from '~/artifacts/graphql/cache_update'; - -describe('Artifact table cache updates', () => { - let store; - - const cacheMock = { - project: { - jobs: { - nodes: [ - { artifacts: { nodes: [{ id: 'foo' }] } }, - { artifacts: { nodes: [{ id: 'bar' }] } }, - ], - }, - }, - }; - - const query = getJobArtifactsQuery; - const variables = { fullPath: 'path/to/project' }; - - beforeEach(() => { - store = { - readQuery: jest.fn().mockReturnValue(cacheMock), - writeQuery: jest.fn(), - }; - }); - - describe('removeArtifactFromStore', () => { - it('calls readQuery', () => { - removeArtifactFromStore(store, 'foo', query, variables); - expect(store.readQuery).toHaveBeenCalledWith({ query, variables }); - }); - - it('writes the correct result in the cache', () => { - removeArtifactFromStore(store, 'foo', query, variables); - expect(store.writeQuery).toHaveBeenCalledWith({ - query, - variables, - data: { - project: { - jobs: { - nodes: [{ artifacts: { nodes: [] } }, { artifacts: { nodes: [{ id: 'bar' }] } }], - }, - }, - }, - }); - }); - - it('does not remove an unknown artifact', () => { - removeArtifactFromStore(store, 'baz', query, variables); - expect(store.writeQuery).toHaveBeenCalledWith({ - query, - variables, - data: { - project: { - jobs: { - nodes: [ - { artifacts: { nodes: [{ id: 'foo' }] } }, - { artifacts: { nodes: [{ id: 'bar' }] } }, - ], - }, - }, - }, - }); - }); - }); -}); diff --git a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js index efdebe5f3b0..50ac7be9ae3 100644 --- a/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js +++ b/spec/frontend/artifacts_settings/components/keep_latest_artifact_checkbox_spec.js @@ -1,7 +1,8 @@ import { GlFormCheckbox, GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import Vue, { nextTick } from 'vue'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; +import waitForPromises from 'helpers/wait_for_promises'; import createMockApollo from 'helpers/mock_apollo_helper'; import UpdateKeepLatestArtifactProjectSetting from '~/artifacts_settings/graphql/mutations/update_keep_latest_artifact_project_setting.mutation.graphql'; import GetKeepLatestArtifactApplicationSetting from '~/artifacts_settings/graphql/queries/get_keep_latest_artifact_application_setting.query.graphql'; @@ -102,19 +103,16 @@ describe('Keep latest artifact checkbox', () => { }); describe('when application keep latest artifact setting is enabled', () => { - beforeEach(() => { + beforeEach(async () => { createComponent(); + await waitForPromises(); }); - it('sets correct setting value in checkbox with query result', async () => { - await nextTick(); - + it('sets correct setting value in checkbox with query result', () => { expect(wrapper.element).toMatchSnapshot(); }); - it('checkbox is enabled when application setting is enabled', async () => { - await nextTick(); - + it('checkbox is enabled when application setting is enabled', () => { expect(findCheckbox().attributes('disabled')).toBeUndefined(); }); }); diff --git a/spec/frontend/authentication/password/components/password_input_spec.js b/spec/frontend/authentication/password/components/password_input_spec.js new file mode 100644 index 00000000000..623d986b36e --- /dev/null +++ b/spec/frontend/authentication/password/components/password_input_spec.js @@ -0,0 +1,49 @@ +import { GlFormInput, GlButton } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import PasswordInput from '~/authentication/password/components/password_input.vue'; +import { SHOW_PASSWORD, HIDE_PASSWORD } from '~/authentication/password/constants'; + +describe('PasswordInput', () => { + let wrapper; + + const findPasswordInput = () => wrapper.findComponent(GlFormInput); + const findToggleButton = () => wrapper.findComponent(GlButton); + + const createComponent = () => { + return shallowMount(PasswordInput, { + propsData: { + resourceName: 'new_user', + minimumPasswordLength: '8', + qaSelector: 'new_user_password_field', + }, + }); + }; + + beforeEach(() => { + wrapper = createComponent(); + }); + + describe('when the show password button is clicked', () => { + beforeEach(() => { + findToggleButton().vm.$emit('click'); + }); + + it('displays hide password button', () => { + expect(findPasswordInput().attributes('type')).toBe('text'); + expect(findToggleButton().attributes('icon')).toBe('eye-slash'); + expect(findToggleButton().attributes('aria-label')).toBe(HIDE_PASSWORD); + }); + + describe('when the hide password button is clicked', () => { + beforeEach(() => { + findToggleButton().vm.$emit('click'); + }); + + it('displays show password button', () => { + expect(findPasswordInput().attributes('type')).toBe('password'); + expect(findToggleButton().attributes('icon')).toBe('eye'); + expect(findToggleButton().attributes('aria-label')).toBe(SHOW_PASSWORD); + }); + }); + }); +}); diff --git a/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js b/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js index 694c16a85c4..8ecef710e03 100644 --- a/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js +++ b/spec/frontend/authentication/two_factor_auth/components/manage_two_factor_form_spec.js @@ -19,7 +19,6 @@ describe('ManageTwoFactorForm', () => { wrapper = mountExtended(ManageTwoFactorForm, { provide: { ...defaultProvide, - webauthnEnabled: options?.webauthnEnabled ?? false, isCurrentPasswordRequired: options?.currentPasswordRequired ?? true, }, stubs: { @@ -41,16 +40,6 @@ describe('ManageTwoFactorForm', () => { const findRegenerateCodesButton = () => wrapper.findByTestId('test-2fa-regenerate-codes-button'); const findConfirmationModal = () => wrapper.findComponent(GlModal); - const itShowsConfirmationModal = (confirmText) => { - it('shows confirmation modal', async () => { - await wrapper.findByLabelText('Current password').setValue('foo bar'); - await findDisableButton().trigger('click'); - - expect(findConfirmationModal().props('visible')).toBe(true); - expect(findConfirmationModal().html()).toContain(confirmText); - }); - }; - const itShowsValidationMessageIfCurrentPasswordFieldIsEmpty = (findButtonFunction) => { it('shows validation message if `Current password` is empty', async () => { await findButtonFunction().trigger('click'); @@ -91,16 +80,12 @@ describe('ManageTwoFactorForm', () => { describe('when clicked', () => { itShowsValidationMessageIfCurrentPasswordFieldIsEmpty(findDisableButton); - itShowsConfirmationModal(i18n.confirm); - - describe('when webauthnEnabled', () => { - beforeEach(() => { - createComponent({ - webauthnEnabled: true, - }); - }); + it('shows confirmation modal', async () => { + await wrapper.findByLabelText('Current password').setValue('foo bar'); + await findDisableButton().trigger('click'); - itShowsConfirmationModal(i18n.confirmWebAuthn); + expect(findConfirmationModal().props('visible')).toBe(true); + expect(findConfirmationModal().html()).toContain(i18n.confirmWebAuthn); }); it('modifies the form action and method when submitted through the button', async () => { diff --git a/spec/frontend/authentication/webauthn/components/registration_spec.js b/spec/frontend/authentication/webauthn/components/registration_spec.js index 1221626db7d..e4ca1ac8c38 100644 --- a/spec/frontend/authentication/webauthn/components/registration_spec.js +++ b/spec/frontend/authentication/webauthn/components/registration_spec.js @@ -211,7 +211,7 @@ describe('Registration', () => { }); describe(`when ${STATE_ERROR} state`, () => { - it('shows an initial error message and a retry button', async () => { + it('shows an initial error message and a retry button', () => { const myError = 'my error'; createComponent({ initialError: myError }); diff --git a/spec/frontend/batch_comments/components/review_bar_spec.js b/spec/frontend/batch_comments/components/review_bar_spec.js index 923e86a7e64..ea4b015ea39 100644 --- a/spec/frontend/batch_comments/components/review_bar_spec.js +++ b/spec/frontend/batch_comments/components/review_bar_spec.js @@ -20,7 +20,7 @@ describe('Batch comments review bar component', () => { document.body.className = ''; }); - it('adds review-bar-visible class to body when review bar is mounted', async () => { + it('adds review-bar-visible class to body when review bar is mounted', () => { expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(false); createComponent(); @@ -28,7 +28,7 @@ describe('Batch comments review bar component', () => { expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(true); }); - it('removes review-bar-visible class to body when review bar is destroyed', async () => { + it('removes review-bar-visible class to body when review bar is destroyed', () => { createComponent(); wrapper.destroy(); diff --git a/spec/frontend/behaviors/gl_emoji_spec.js b/spec/frontend/behaviors/gl_emoji_spec.js index 722327e94ba..995e4219ae3 100644 --- a/spec/frontend/behaviors/gl_emoji_spec.js +++ b/spec/frontend/behaviors/gl_emoji_spec.js @@ -51,13 +51,13 @@ describe('gl_emoji', () => { 'bomb emoji just with name attribute', '', '💣', - `:bomb:`, + `:bomb:`, ], [ 'bomb emoji with name attribute and unicode version', '💣', '💣', - `:bomb:`, + `:bomb:`, ], [ 'bomb emoji with sprite fallback', @@ -69,19 +69,19 @@ describe('gl_emoji', () => { 'bomb emoji with image fallback', '', '💣', - ':bomb:', + ':bomb:', ], [ 'invalid emoji', '', '', - `:grey_question:`, + `:grey_question:`, ], [ 'custom emoji with image fallback', '', - ':party-parrot:', - ':party-parrot:', + ':party-parrot:', + ':party-parrot:', ], ])('%s', (name, markup, withEmojiSupport, withoutEmojiSupport) => { it(`renders correctly with emoji support`, async () => { @@ -111,7 +111,7 @@ describe('gl_emoji', () => { await waitForPromises(); expect(glEmojiElement.outerHTML).toBe( - ':"x="y" onload="alert(document.location.href)":', + ':"x="y" onload="alert(document.location.href)":', ); }); diff --git a/spec/frontend/behaviors/quick_submit_spec.js b/spec/frontend/behaviors/quick_submit_spec.js index 317c671cd2b..81eeb3f153e 100644 --- a/spec/frontend/behaviors/quick_submit_spec.js +++ b/spec/frontend/behaviors/quick_submit_spec.js @@ -1,5 +1,6 @@ import $ from 'jquery'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlSnippetsShow from 'test_fixtures/snippets/show.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import '~/behaviors/quick_submit'; describe('Quick Submit behavior', () => { @@ -8,7 +9,7 @@ describe('Quick Submit behavior', () => { const keydownEvent = (options = { keyCode: 13, metaKey: true }) => $.Event('keydown', options); beforeEach(() => { - loadHTMLFixture('snippets/show.html'); + setHTMLFixture(htmlSnippetsShow); testContext = {}; @@ -60,22 +61,15 @@ describe('Quick Submit behavior', () => { expect(testContext.spies.submit).not.toHaveBeenCalled(); }); - it('disables input of type submit', () => { - const submitButton = $('.js-quick-submit input[type=submit]'); - testContext.textarea.trigger(keydownEvent()); - - expect(submitButton).toBeDisabled(); - }); - - it('disables button of type submit', () => { - const submitButton = $('.js-quick-submit input[type=submit]'); + it('disables submit', () => { + const submitButton = $('.js-quick-submit [type=submit]'); testContext.textarea.trigger(keydownEvent()); expect(submitButton).toBeDisabled(); }); it('only clicks one submit', () => { - const existingSubmit = $('.js-quick-submit input[type=submit]'); + const existingSubmit = $('.js-quick-submit [type=submit]'); // Add an extra submit button const newSubmit = $(''); newSubmit.insertAfter(testContext.textarea); diff --git a/spec/frontend/behaviors/requires_input_spec.js b/spec/frontend/behaviors/requires_input_spec.js index f2f68f17d1c..68fa980216a 100644 --- a/spec/frontend/behaviors/requires_input_spec.js +++ b/spec/frontend/behaviors/requires_input_spec.js @@ -1,12 +1,13 @@ import $ from 'jquery'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlNewBranch from 'test_fixtures/branches/new_branch.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import '~/behaviors/requires_input'; describe('requiresInput', () => { let submitButton; beforeEach(() => { - loadHTMLFixture('branches/new_branch.html'); + setHTMLFixture(htmlNewBranch); submitButton = $('button[type="submit"]'); }); diff --git a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js index e6e587ff44b..ae7f5416c0c 100644 --- a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js +++ b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js @@ -1,5 +1,6 @@ import $ from 'jquery'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlSnippetsShow from 'test_fixtures/snippets/show.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import waitForPromises from 'helpers/wait_for_promises'; import initCopyAsGFM, { CopyAsGFM } from '~/behaviors/markdown/copy_as_gfm'; import ShortcutsIssuable from '~/behaviors/shortcuts/shortcuts_issuable'; @@ -11,8 +12,6 @@ jest.mock('~/lib/utils/common_utils', () => ({ })); describe('ShortcutsIssuable', () => { - const snippetShowFixtureName = 'snippets/show.html'; - beforeAll(() => { initCopyAsGFM(); @@ -24,7 +23,7 @@ describe('ShortcutsIssuable', () => { const FORM_SELECTOR = '.js-main-target-form .js-vue-comment-form'; beforeEach(() => { - loadHTMLFixture(snippetShowFixtureName); + setHTMLFixture(htmlSnippetsShow); $('body').append( `
diff --git a/spec/frontend/blob/components/blob_edit_header_spec.js b/spec/frontend/blob/components/blob_edit_header_spec.js index 2b1bd1ac4ad..b0ce5f40d95 100644 --- a/spec/frontend/blob/components/blob_edit_header_spec.js +++ b/spec/frontend/blob/components/blob_edit_header_spec.js @@ -1,6 +1,5 @@ import { GlFormInput, GlButton } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; import BlobEditHeader from '~/blob/components/blob_edit_header.vue'; describe('Blob Header Editing', () => { @@ -15,8 +14,10 @@ describe('Blob Header Editing', () => { }, }); }; + const findDeleteButton = () => wrapper.findAllComponents(GlButton).wrappers.find((x) => x.text() === 'Delete file'); + const findFormInput = () => wrapper.findComponent(GlFormInput); beforeEach(() => { createComponent(); @@ -28,7 +29,7 @@ describe('Blob Header Editing', () => { }); it('contains a form input field', () => { - expect(wrapper.findComponent(GlFormInput).exists()).toBe(true); + expect(findFormInput().exists()).toBe(true); }); it('does not show delete button', () => { @@ -37,19 +38,16 @@ describe('Blob Header Editing', () => { }); describe('functionality', () => { - it('emits input event when the blob name is changed', async () => { - const inputComponent = wrapper.findComponent(GlFormInput); + it('emits input event when the blob name is changed', () => { + const inputComponent = findFormInput(); const newValue = 'bar.txt'; - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - name: newValue, - }); + // update `name` with `newValue` + inputComponent.vm.$emit('input', newValue); + // trigger change event which emits input event on wrapper inputComponent.vm.$emit('change'); - await nextTick(); - expect(wrapper.emitted().input[0]).toEqual([newValue]); + expect(wrapper.emitted().input).toEqual([[newValue]]); }); }); diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js index e12021a48d2..6e001846bb6 100644 --- a/spec/frontend/blob/components/blob_header_default_actions_spec.js +++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js @@ -45,7 +45,7 @@ describe('Blob Header Default Actions', () => { it('exactly 3 buttons with predefined actions', () => { expect(buttons.length).toBe(3); [BTN_COPY_CONTENTS_TITLE, BTN_RAW_TITLE, BTN_DOWNLOAD_TITLE].forEach((title, i) => { - expect(buttons.at(i).vm.$el.title).toBe(title); + expect(buttons.at(i).attributes('title')).toBe(title); }); }); @@ -87,10 +87,9 @@ describe('Blob Header Default Actions', () => { it('emits a copy event if overrideCopy is set to true', () => { createComponent({ overrideCopy: true }); - jest.spyOn(wrapper.vm, '$emit'); findCopyButton().vm.$emit('click'); - expect(wrapper.vm.$emit).toHaveBeenCalledWith('copy'); + expect(wrapper.emitted('copy')).toHaveLength(1); }); }); diff --git a/spec/frontend/blob/components/mock_data.js b/spec/frontend/blob/components/mock_data.js index b5803bf0cbc..6ecf5091591 100644 --- a/spec/frontend/blob/components/mock_data.js +++ b/spec/frontend/blob/components/mock_data.js @@ -47,11 +47,13 @@ export const BinaryBlob = { }; export const RichBlobContentMock = { + __typename: 'Blob', path: 'foo.md', richData: '

Rich

', }; export const SimpleBlobContentMock = { + __typename: 'Blob', path: 'foo.js', plainData: 'Plain', }; diff --git a/spec/frontend/blob/file_template_selector_spec.js b/spec/frontend/blob/file_template_selector_spec.js index 65444e86efd..123475f8d62 100644 --- a/spec/frontend/blob/file_template_selector_spec.js +++ b/spec/frontend/blob/file_template_selector_spec.js @@ -53,7 +53,7 @@ describe('FileTemplateSelector', () => { expect(subject.wrapper.classList.contains('hidden')).toBe(false); }); - it('sets the focus on the dropdown', async () => { + it('sets the focus on the dropdown', () => { subject.show(); jest.spyOn(subject.dropdown, 'focus'); jest.runAllTimers(); diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js index 4b6cb79791c..64b6152a07d 100644 --- a/spec/frontend/blob/sketch/index_spec.js +++ b/spec/frontend/blob/sketch/index_spec.js @@ -1,10 +1,11 @@ import SketchLoader from '~/blob/sketch'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import waitForPromises from 'helpers/wait_for_promises'; +import htmlSketchViewer from 'test_fixtures_static/sketch_viewer.html'; describe('Sketch viewer', () => { beforeEach(() => { - loadHTMLFixture('static/sketch_viewer.html'); + setHTMLFixture(htmlSketchViewer); }); afterEach(() => { diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js index a612e863d46..a925f752f5e 100644 --- a/spec/frontend/boards/board_card_inner_spec.js +++ b/spec/frontend/boards/board_card_inner_spec.js @@ -168,7 +168,7 @@ describe('Board card component', () => { }); describe('blocked', () => { - it('renders blocked icon if issue is blocked', async () => { + it('renders blocked icon if issue is blocked', () => { createWrapper({ props: { item: { @@ -487,7 +487,7 @@ describe('Board card component', () => { }); describe('loading', () => { - it('renders loading icon', async () => { + it('renders loading icon', () => { createWrapper({ props: { item: { diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js index 9ec43c6e892..e0a110678b1 100644 --- a/spec/frontend/boards/board_list_spec.js +++ b/spec/frontend/boards/board_list_spec.js @@ -1,3 +1,4 @@ +import { GlIntersectionObserver } from '@gitlab/ui'; import Draggable from 'vuedraggable'; import { nextTick } from 'vue'; import { DraggableItemTypes, ListType } from 'ee_else_ce/boards/constants'; @@ -8,15 +9,18 @@ import BoardCard from '~/boards/components/board_card.vue'; import eventHub from '~/boards/eventhub'; import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_position.vue'; -import { mockIssues } from './mock_data'; +import { mockIssues, mockList, mockIssuesMore } from './mock_data'; describe('Board list component', () => { let wrapper; const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`); - const findIssueCountLoadingIcon = () => wrapper.find('[data-testid="count-loading-icon"]'); const findDraggable = () => wrapper.findComponent(Draggable); const findMoveToPositionComponent = () => wrapper.findComponent(BoardCardMoveToPosition); + const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver); + const findBoardListCount = () => wrapper.find('.board-list-count'); + + const triggerInfiniteScroll = () => findIntersectionObserver().vm.$emit('appear'); const startDrag = ( params = { @@ -61,41 +65,25 @@ describe('Board list component', () => { expect(wrapper.find('.board-card').attributes('data-item-id')).toBe('1'); }); - it('shows new issue form', async () => { - wrapper.vm.toggleForm(); - - await nextTick(); - expect(wrapper.find('.board-new-issue-form').exists()).toBe(true); - }); - it('shows new issue form after eventhub event', async () => { - eventHub.$emit(`toggle-issue-form-${wrapper.vm.list.id}`); + eventHub.$emit(`toggle-issue-form-${mockList.id}`); await nextTick(); expect(wrapper.find('.board-new-issue-form').exists()).toBe(true); }); - it('does not show new issue form for closed list', () => { - wrapper.setProps({ list: { type: 'closed' } }); - wrapper.vm.toggleForm(); - - expect(wrapper.find('.board-new-issue-form').exists()).toBe(false); - }); - - it('shows count list item', async () => { - wrapper.vm.showCount = true; - - await nextTick(); - expect(wrapper.find('.board-list-count').exists()).toBe(true); - - expect(wrapper.find('.board-list-count').text()).toBe('Showing all issues'); - }); + it('does not show new issue form for closed list', async () => { + wrapper = createComponent({ + listProps: { + listType: ListType.closed, + }, + }); + await waitForPromises(); - it('sets data attribute with invalid id', async () => { - wrapper.vm.showCount = true; + eventHub.$emit(`toggle-issue-form-${mockList.id}`); await nextTick(); - expect(wrapper.find('.board-list-count').attributes('data-issue-id')).toBe('-1'); + expect(wrapper.find('.board-new-issue-form').exists()).toBe(false); }); it('renders the move to position icon', () => { @@ -118,61 +106,41 @@ describe('Board list component', () => { }); describe('load more issues', () => { - const actions = { - fetchItemsForList: jest.fn(), - }; - - it('does not load issues if already loading', () => { - wrapper = createComponent({ - actions, - state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } }, + describe('when loading is not in progress', () => { + beforeEach(() => { + wrapper = createComponent({ + listProps: { + id: 'gid://gitlab/List/1', + }, + componentProps: { + boardItems: mockIssuesMore, + }, + actions: { + fetchItemsForList: jest.fn(), + }, + state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: false } } }, + }); }); - wrapper.vm.listRef.dispatchEvent(new Event('scroll')); - expect(actions.fetchItemsForList).not.toHaveBeenCalled(); - }); - - it('shows loading more spinner', async () => { - wrapper = createComponent({ - state: { listsFlags: { 'gid://gitlab/List/1': { isLoadingMore: true } } }, - data: { - showCount: true, - }, + it('has intersection observer when the number of board list items are more than 5', () => { + expect(findIntersectionObserver().exists()).toBe(true); }); - await nextTick(); - - expect(findIssueCountLoadingIcon().exists()).toBe(true); - }); - - it('shows how many more issues to load', async () => { - wrapper = createComponent({ - data: { - showCount: true, - }, + it('shows count when loaded more items and correct data attribute', async () => { + triggerInfiniteScroll(); + await waitForPromises(); + expect(findBoardListCount().exists()).toBe(true); + expect(findBoardListCount().attributes('data-issue-id')).toBe('-1'); }); - - await nextTick(); - await waitForPromises(); - await nextTick(); - await nextTick(); - - expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues'); }); }); describe('max issue count warning', () => { - beforeEach(() => { - wrapper = createComponent({ - listProps: { issuesCount: 50 }, - }); - }); - describe('when issue count exceeds max issue count', () => { it('sets background to gl-bg-red-100', async () => { - wrapper.setProps({ list: { issuesCount: 4, maxIssueCount: 3 } }); + wrapper = createComponent({ listProps: { issuesCount: 4, maxIssueCount: 3 } }); - await nextTick(); + await waitForPromises(); const block = wrapper.find('.gl-bg-red-100'); expect(block.exists()).toBe(true); @@ -183,16 +151,18 @@ describe('Board list component', () => { }); describe('when list issue count does NOT exceed list max issue count', () => { - it('does not sets background to gl-bg-red-100', () => { - wrapper.setProps({ list: { issuesCount: 2, maxIssueCount: 3 } }); + it('does not sets background to gl-bg-red-100', async () => { + wrapper = createComponent({ list: { issuesCount: 2, maxIssueCount: 3 } }); + await waitForPromises(); expect(wrapper.find('.gl-bg-red-100').exists()).toBe(false); }); }); describe('when list max issue count is 0', () => { - it('does not sets background to gl-bg-red-100', () => { - wrapper.setProps({ list: { maxIssueCount: 0 } }); + it('does not sets background to gl-bg-red-100', async () => { + wrapper = createComponent({ list: { maxIssueCount: 0 } }); + await waitForPromises(); expect(wrapper.find('.gl-bg-red-100').exists()).toBe(false); }); diff --git a/spec/frontend/boards/components/board_app_spec.js b/spec/frontend/boards/components/board_app_spec.js index 148e696b57b..77ba6cdc9c0 100644 --- a/spec/frontend/boards/components/board_app_spec.js +++ b/spec/frontend/boards/components/board_app_spec.js @@ -1,14 +1,20 @@ import { shallowMount } from '@vue/test-utils'; -import Vue from 'vue'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; import Vuex from 'vuex'; +import createMockApollo from 'helpers/mock_apollo_helper'; import BoardApp from '~/boards/components/board_app.vue'; +import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql'; +import { rawIssue } from '../mock_data'; describe('BoardApp', () => { let wrapper; let store; + const mockApollo = createMockApollo(); Vue.use(Vuex); + Vue.use(VueApollo); const createStore = ({ mockGetters = {} } = {}) => { store = new Vuex.Store({ @@ -23,12 +29,22 @@ describe('BoardApp', () => { }); }; - const createComponent = () => { + const createComponent = ({ isApolloBoard = false, issue = rawIssue } = {}) => { + mockApollo.clients.defaultClient.cache.writeQuery({ + query: activeBoardItemQuery, + data: { + activeBoardItem: issue, + }, + }); + wrapper = shallowMount(BoardApp, { + apolloProvider: mockApollo, store, provide: { initialBoardId: 'gid://gitlab/Board/1', initialFilterParams: {}, + isIssueBoard: true, + isApolloBoard, }, }); }; @@ -50,4 +66,22 @@ describe('BoardApp', () => { expect(wrapper.classes()).not.toContain('is-compact'); }); + + describe('Apollo boards', () => { + beforeEach(async () => { + createComponent({ isApolloBoard: true }); + await nextTick(); + }); + + it('should have is-compact class when a card is selected', () => { + expect(wrapper.classes()).toContain('is-compact'); + }); + + it('should not have is-compact class when no card is selected', async () => { + createComponent({ isApolloBoard: true, issue: {} }); + await nextTick(); + + expect(wrapper.classes()).not.toContain('is-compact'); + }); + }); }); diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js index 46116bed4cf..897219303b5 100644 --- a/spec/frontend/boards/components/board_card_spec.js +++ b/spec/frontend/boards/components/board_card_spec.js @@ -1,8 +1,10 @@ import { GlLabel } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import Vuex from 'vuex'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import BoardCard from '~/boards/components/board_card.vue'; import BoardCardInner from '~/boards/components/board_card_inner.vue'; import { inactiveId } from '~/boards/constants'; @@ -14,6 +16,14 @@ describe('Board card', () => { let mockActions; Vue.use(Vuex); + Vue.use(VueApollo); + + const mockSetActiveBoardItemResolver = jest.fn(); + const mockApollo = createMockApollo([], { + Mutation: { + setActiveBoardItem: mockSetActiveBoardItemResolver, + }, + }); const createStore = ({ initialState = {} } = {}) => { mockActions = { @@ -36,11 +46,11 @@ describe('Board card', () => { const mountComponent = ({ propsData = {}, provide = {}, - mountFn = shallowMount, stubs = { BoardCardInner }, item = mockIssue, } = {}) => { - wrapper = mountFn(BoardCard, { + wrapper = shallowMountExtended(BoardCard, { + apolloProvider: mockApollo, stubs: { ...stubs, BoardCardInner, @@ -56,9 +66,9 @@ describe('Board card', () => { groupId: null, rootPath: '/', scopedLabelsAvailable: false, + isIssueBoard: true, isEpicBoard: false, issuableType: 'issue', - isProjectBoard: false, isGroupBoard: true, disabled: false, isApolloBoard: false, @@ -96,7 +106,7 @@ describe('Board card', () => { }); }); - it('should not highlight the card by default', async () => { + it('should not highlight the card by default', () => { createStore(); mountComponent(); @@ -104,7 +114,7 @@ describe('Board card', () => { expect(wrapper.classes()).not.toContain('multi-select'); }); - it('should highlight the card with a correct style when selected', async () => { + it('should highlight the card with a correct style when selected', () => { createStore({ initialState: { activeId: mockIssue.id, @@ -116,7 +126,7 @@ describe('Board card', () => { expect(wrapper.classes()).not.toContain('multi-select'); }); - it('should highlight the card with a correct style when multi-selected', async () => { + it('should highlight the card with a correct style when multi-selected', () => { createStore({ initialState: { activeId: inactiveId, @@ -218,4 +228,25 @@ describe('Board card', () => { expect(wrapper.attributes('style')).toBeUndefined(); }); }); + + describe('Apollo boards', () => { + beforeEach(async () => { + createStore(); + mountComponent({ provide: { isApolloBoard: true } }); + await nextTick(); + }); + + it('set active board item on client when clicking on card', async () => { + await selectCard(); + + expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith( + {}, + { + boardItem: mockIssue, + }, + expect.anything(), + expect.anything(), + ); + }); + }); }); diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js index 011665eee68..5717031be20 100644 --- a/spec/frontend/boards/components/board_column_spec.js +++ b/spec/frontend/boards/components/board_column_spec.js @@ -81,7 +81,7 @@ describe('Board Column Component', () => { }); describe('on mount', () => { - beforeEach(async () => { + beforeEach(() => { initStore(); jest.spyOn(store, 'dispatch').mockImplementation(); }); diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js index 90376a4a553..9be2696de56 100644 --- a/spec/frontend/boards/components/board_content_sidebar_spec.js +++ b/spec/frontend/boards/components/board_content_sidebar_spec.js @@ -1,10 +1,15 @@ import { GlDrawer } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import { MountingPortal } from 'portal-vue'; -import Vue from 'vue'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; import Vuex from 'vuex'; import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdown_widget.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; import { stubComponent } from 'helpers/stub_component'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import waitForPromises from 'helpers/wait_for_promises'; + +import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql'; import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue'; import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue'; import { ISSUABLE } from '~/boards/constants'; @@ -14,13 +19,21 @@ import SidebarSeverityWidget from '~/sidebar/components/severity/sidebar_severit import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue'; import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue'; import SidebarLabelsWidget from '~/sidebar/components/labels/labels_select_widget/labels_select_root.vue'; -import { mockActiveIssue, mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data'; +import { mockActiveIssue, mockIssue, rawIssue } from '../mock_data'; Vue.use(Vuex); +Vue.use(VueApollo); describe('BoardContentSidebar', () => { let wrapper; let store; + const mockSetActiveBoardItemResolver = jest.fn(); + const mockApollo = createMockApollo([], { + Mutation: { + setActiveBoardItem: mockSetActiveBoardItemResolver, + }, + }); + const createStore = ({ mockGetters = {}, mockActions = {} } = {}) => { store = new Vuex.Store({ state: { @@ -32,30 +45,29 @@ describe('BoardContentSidebar', () => { activeBoardItem: () => { return { ...mockActiveIssue, epic: null }; }, - groupPathForActiveIssue: () => mockIssueGroupPath, - projectPathForActiveIssue: () => mockIssueProjectPath, - isSidebarOpen: () => true, ...mockGetters, }, actions: mockActions, }); }; - const createComponent = () => { - /* - Dynamically imported components (in our case ee imports) - aren't stubbed automatically in VTU v1: - https://github.com/vuejs/vue-test-utils/issues/1279. + const createComponent = ({ isApolloBoard = false } = {}) => { + mockApollo.clients.defaultClient.cache.writeQuery({ + query: activeBoardItemQuery, + data: { + activeBoardItem: rawIssue, + }, + }); - This requires us to additionally mock apollo or vuex stores. - */ - wrapper = shallowMount(BoardContentSidebar, { + wrapper = shallowMountExtended(BoardContentSidebar, { + apolloProvider: mockApollo, provide: { canUpdate: true, rootPath: '/', groupId: 1, issuableType: TYPE_ISSUE, isGroupBoard: false, + isApolloBoard, }, store, stubs: { @@ -63,24 +75,6 @@ describe('BoardContentSidebar', () => { template: '
', }), }, - mocks: { - $apollo: { - queries: { - participants: { - loading: false, - }, - currentIteration: { - loading: false, - }, - iterations: { - loading: false, - }, - attributesList: { - loading: false, - }, - }, - }, - }, }); }; @@ -101,10 +95,12 @@ describe('BoardContentSidebar', () => { }); }); - it('does not render GlDrawer when isSidebarOpen is false', () => { - createStore({ mockGetters: { isSidebarOpen: () => false } }); + it('does not render GlDrawer when no active item is set', async () => { + createStore({ mockGetters: { activeBoardItem: () => ({ id: '', iid: '' }) } }); createComponent(); + await nextTick(); + expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false); }); @@ -166,7 +162,7 @@ describe('BoardContentSidebar', () => { createComponent(); }); - it('calls toggleBoardItem with correct parameters', async () => { + it('calls toggleBoardItem with correct parameters', () => { wrapper.findComponent(GlDrawer).vm.$emit('close'); expect(toggleBoardItem).toHaveBeenCalledTimes(1); @@ -189,4 +185,27 @@ describe('BoardContentSidebar', () => { expect(wrapper.findComponent(SidebarSeverityWidget).exists()).toBe(true); }); }); + + describe('Apollo boards', () => { + beforeEach(async () => { + createStore(); + createComponent({ isApolloBoard: true }); + await nextTick(); + }); + + it('calls setActiveBoardItemMutation on close', async () => { + wrapper.findComponent(GlDrawer).vm.$emit('close'); + + await waitForPromises(); + + expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith( + {}, + { + boardItem: null, + }, + expect.anything(), + expect.anything(), + ); + }); + }); }); diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js index 33351bf8efd..ab51f477966 100644 --- a/spec/frontend/boards/components/board_content_spec.js +++ b/spec/frontend/boards/components/board_content_spec.js @@ -6,6 +6,7 @@ import Draggable from 'vuedraggable'; import Vuex from 'vuex'; import eventHub from '~/boards/eventhub'; +import { stubComponent } from 'helpers/stub_component'; import waitForPromises from 'helpers/wait_for_promises'; import createMockApollo from 'helpers/mock_apollo_helper'; import EpicsSwimlanes from 'ee_component/boards/components/epics_swimlanes.vue'; @@ -78,6 +79,11 @@ describe('BoardContent', () => { isApolloBoard, }, store, + stubs: { + BoardContentSidebar: stubComponent(BoardContentSidebar, { + template: '
', + }), + }, }); }; diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js index d8bc7f95f18..64111cfb01a 100644 --- a/spec/frontend/boards/components/board_filtered_search_spec.js +++ b/spec/frontend/boards/components/board_filtered_search_spec.js @@ -123,7 +123,7 @@ describe('BoardFilteredSearch', () => { jest.spyOn(wrapper.vm, 'performSearch').mockImplementation(); }); - it('sets the url params to the correct results', async () => { + it('sets the url params to the correct results', () => { const mockFilters = [ { type: TOKEN_TYPE_AUTHOR, value: { data: 'root', operator: '=' } }, { type: TOKEN_TYPE_ASSIGNEE, value: { data: 'root', operator: '=' } }, diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js index 62db59f8f57..f340dfab359 100644 --- a/spec/frontend/boards/components/board_form_spec.js +++ b/spec/frontend/boards/components/board_form_spec.js @@ -115,7 +115,7 @@ describe('BoardForm', () => { expect(findForm().exists()).toBe(true); }); - it('focuses an input field', async () => { + it('focuses an input field', () => { expect(document.activeElement).toBe(wrapper.vm.$refs.name); }); }); diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js index 466321cf1cc..0f91d2315cf 100644 --- a/spec/frontend/boards/components/board_list_header_spec.js +++ b/spec/frontend/boards/components/board_list_header_spec.js @@ -147,7 +147,7 @@ describe('Board List Header Component', () => { }); describe('expanding / collapsing the column', () => { - it('should display collapse icon when column is expanded', async () => { + it('should display collapse icon when column is expanded', () => { createComponent(); const icon = findCaret(); @@ -155,7 +155,7 @@ describe('Board List Header Component', () => { expect(icon.props('icon')).toBe('chevron-lg-down'); }); - it('should display expand icon when column is collapsed', async () => { + it('should display expand icon when column is collapsed', () => { createComponent({ collapsed: true }); const icon = findCaret(); diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js index aa146eb4609..13c017706ef 100644 --- a/spec/frontend/boards/components/boards_selector_spec.js +++ b/spec/frontend/boards/components/boards_selector_spec.js @@ -250,7 +250,7 @@ describe('BoardsSelector', () => { describe('dropdown visibility', () => { describe('when multipleIssueBoardsAvailable is enabled', () => { - it('show dropdown', async () => { + it('show dropdown', () => { createStore(); createComponent({ provide: { multipleIssueBoardsAvailable: true } }); expect(findDropdown().exists()).toBe(true); @@ -258,7 +258,7 @@ describe('BoardsSelector', () => { }); describe('when multipleIssueBoardsAvailable is disabled but it hasMissingBoards', () => { - it('show dropdown', async () => { + it('show dropdown', () => { createStore(); createComponent({ provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: true }, @@ -268,7 +268,7 @@ describe('BoardsSelector', () => { }); describe("when multipleIssueBoardsAvailable is disabled and it dosn't hasMissingBoards", () => { - it('hide dropdown', async () => { + it('hide dropdown', () => { createStore(); createComponent({ provide: { multipleIssueBoardsAvailable: false, hasMissingBoards: false }, diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js index a20884baf3b..fae3b0c5d1a 100644 --- a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js +++ b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js @@ -1,9 +1,17 @@ import { GlAlert, GlFormInput, GlForm, GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue'; import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue'; import { createStore } from '~/boards/stores'; +import issueSetTitleMutation from '~/boards/graphql/issue_set_title.mutation.graphql'; +import updateEpicTitleMutation from '~/sidebar/queries/update_epic_title.mutation.graphql'; +import { updateIssueTitleResponse, updateEpicTitleResponse } from '../../mock_data'; + +Vue.use(VueApollo); const TEST_TITLE = 'New item title'; const TEST_ISSUE_A = { @@ -21,24 +29,43 @@ const TEST_ISSUE_B = { webUrl: 'webUrl', }; -describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { +describe('BoardSidebarTitle', () => { let wrapper; let store; + let mockApollo; + + const issueSetTitleMutationHandlerSuccess = jest.fn().mockResolvedValue(updateIssueTitleResponse); + const updateEpicTitleMutationHandlerSuccess = jest + .fn() + .mockResolvedValue(updateEpicTitleResponse); afterEach(() => { localStorage.clear(); store = null; }); - const createWrapper = (item = TEST_ISSUE_A) => { + const createWrapper = ({ item = TEST_ISSUE_A, provide = {} } = {}) => { store = createStore(); store.state.boardItems = { [item.id]: { ...item } }; store.dispatch('setActiveId', { id: item.id }); + mockApollo = createMockApollo([ + [issueSetTitleMutation, issueSetTitleMutationHandlerSuccess], + [updateEpicTitleMutation, updateEpicTitleMutationHandlerSuccess], + ]); wrapper = shallowMount(BoardSidebarTitle, { store, + apolloProvider: mockApollo, provide: { canUpdate: true, + fullPath: 'gitlab-org', + issuableType: 'issue', + isEpicBoard: false, + isApolloBoard: false, + ...provide, + }, + propsData: { + activeItem: item, }, stubs: { 'board-editable-item': BoardEditableItem, @@ -86,7 +113,8 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { await nextTick(); }); - it('collapses sidebar and renders new title', () => { + it('collapses sidebar and renders new title', async () => { + await waitForPromises(); expect(findCollapsed().isVisible()).toBe(true); expect(findTitle().text()).toContain(TEST_TITLE); }); @@ -140,7 +168,7 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { createWrapper(); }); - it('sets title, expands item and shows alert', async () => { + it('sets title, expands item and shows alert', () => { expect(wrapper.vm.title).toBe(TEST_TITLE); expect(findCollapsed().isVisible()).toBe(false); expect(findAlert().exists()).toBe(true); @@ -149,7 +177,7 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { describe('when cancel button is clicked', () => { beforeEach(async () => { - createWrapper(TEST_ISSUE_B); + createWrapper({ item: TEST_ISSUE_B }); jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => { store.state.boardItems[TEST_ISSUE_B.id].title = TEST_TITLE; @@ -168,7 +196,7 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { describe('when the mutation fails', () => { beforeEach(async () => { - createWrapper(TEST_ISSUE_B); + createWrapper({ item: TEST_ISSUE_B }); jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => { throw new Error(['failed mutation']); @@ -185,4 +213,32 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => { expect(wrapper.vm.setError).toHaveBeenCalled(); }); }); + + describe('Apollo boards', () => { + it.each` + issuableType | isEpicBoard | queryHandler | notCalledHandler + ${'issue'} | ${false} | ${issueSetTitleMutationHandlerSuccess} | ${updateEpicTitleMutationHandlerSuccess} + ${'epic'} | ${true} | ${updateEpicTitleMutationHandlerSuccess} | ${issueSetTitleMutationHandlerSuccess} + `( + 'updates $issuableType title', + async ({ issuableType, isEpicBoard, queryHandler, notCalledHandler }) => { + createWrapper({ + provide: { + issuableType, + isEpicBoard, + isApolloBoard: true, + }, + }); + + await nextTick(); + + findFormInput().vm.$emit('input', TEST_TITLE); + findForm().vm.$emit('submit', { preventDefault: () => {} }); + await nextTick(); + + expect(queryHandler).toHaveBeenCalled(); + expect(notCalledHandler).not.toHaveBeenCalled(); + }, + ); + }); }); diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js index e5167120542..74733d1fd95 100644 --- a/spec/frontend/boards/mock_data.js +++ b/spec/frontend/boards/mock_data.js @@ -277,6 +277,9 @@ export const labels = [ }, ]; +export const mockIssueFullPath = 'gitlab-org/test-subgroup/gitlab-test'; +export const mockEpicFullPath = 'gitlab-org/test-subgroup'; + export const rawIssue = { title: 'Issue 1', id: 'gid://gitlab/Issue/436', @@ -302,12 +305,24 @@ export const rawIssue = { epic: { id: 'gid://gitlab/Epic/41', }, + totalTimeSpent: 0, + humanTimeEstimate: null, + humanTotalTimeSpent: null, + emailsDisabled: false, + hidden: false, + webUrl: `${mockIssueFullPath}/-/issue/27`, + relativePosition: null, + severity: null, + milestone: null, + weight: null, + blocked: false, + blockedByCount: 0, + iteration: null, + healthStatus: null, type: 'ISSUE', + __typename: 'Issue', }; -export const mockIssueFullPath = 'gitlab-org/test-subgroup/gitlab-test'; -export const mockEpicFullPath = 'gitlab-org/test-subgroup'; - export const mockIssue = { id: 'gid://gitlab/Issue/436', iid: '27', @@ -329,7 +344,22 @@ export const mockIssue = { epic: { id: 'gid://gitlab/Epic/41', }, + totalTimeSpent: 0, + humanTimeEstimate: null, + humanTotalTimeSpent: null, + emailsDisabled: false, + hidden: false, + webUrl: `${mockIssueFullPath}/-/issue/27`, + relativePosition: null, + severity: null, + milestone: null, + weight: null, + blocked: false, + blockedByCount: 0, + iteration: null, + healthStatus: null, type: 'ISSUE', + __typename: 'Issue', }; export const mockEpic = { @@ -425,7 +455,58 @@ export const mockIssue4 = { epic: null, }; +export const mockIssue5 = { + id: 'gid://gitlab/Issue/440', + iid: 40, + title: 'Issue 5', + referencePath: '#40', + dueDate: null, + timeEstimate: 0, + confidential: false, + path: '/gitlab-org/gitlab-test/-/issues/40', + assignees, + labels, + epic: null, +}; + +export const mockIssue6 = { + id: 'gid://gitlab/Issue/441', + iid: 41, + title: 'Issue 6', + referencePath: '#41', + dueDate: null, + timeEstimate: 0, + confidential: false, + path: '/gitlab-org/gitlab-test/-/issues/41', + assignees, + labels, + epic: null, +}; + +export const mockIssue7 = { + id: 'gid://gitlab/Issue/442', + iid: 42, + title: 'Issue 6', + referencePath: '#42', + dueDate: null, + timeEstimate: 0, + confidential: false, + path: '/gitlab-org/gitlab-test/-/issues/42', + assignees, + labels, + epic: null, +}; + export const mockIssues = [mockIssue, mockIssue2]; +export const mockIssuesMore = [ + mockIssue, + mockIssue2, + mockIssue3, + mockIssue4, + mockIssue5, + mockIssue6, + mockIssue7, +]; export const BoardsMockData = { GET: { @@ -925,4 +1006,26 @@ export const epicBoardListQueryResponse = (totalWeight = 5) => ({ }, }); +export const updateIssueTitleResponse = { + data: { + updateIssuableTitle: { + issue: { + id: 'gid://gitlab/Issue/436', + title: 'Issue 1 edit', + }, + }, + }, +}; + +export const updateEpicTitleResponse = { + data: { + updateIssuableTitle: { + epic: { + id: 'gid://gitlab/Epic/426', + title: 'Epic 1 edit', + }, + }, + }, +}; + export const DEFAULT_COLOR = '#1068bf'; diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js index f430062bb73..b8d3be28ca6 100644 --- a/spec/frontend/boards/stores/actions_spec.js +++ b/spec/frontend/boards/stores/actions_spec.js @@ -401,7 +401,7 @@ describe('fetchMilestones', () => { }, ); - it('sets milestonesLoading to true', async () => { + it('sets milestonesLoading to true', () => { jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse); const store = createStore(); diff --git a/spec/frontend/branches/components/delete_branch_modal_spec.js b/spec/frontend/branches/components/delete_branch_modal_spec.js index dd5b7fca564..7851d86466f 100644 --- a/spec/frontend/branches/components/delete_branch_modal_spec.js +++ b/spec/frontend/branches/components/delete_branch_modal_spec.js @@ -7,6 +7,8 @@ import DeleteBranchModal from '~/branches/components/delete_branch_modal.vue'; import eventHub from '~/branches/event_hub'; let wrapper; +let showMock; +let hideMock; const branchName = 'test_modal'; const defaultBranchName = 'default'; @@ -14,23 +16,20 @@ const deletePath = '/path/to/branch'; const merged = false; const isProtectedBranch = false; -const createComponent = (data = {}) => { +const createComponent = () => { + showMock = jest.fn(); + hideMock = jest.fn(); + wrapper = extendedWrapper( shallowMount(DeleteBranchModal, { - data() { - return { - branchName, - deletePath, - defaultBranchName, - merged, - isProtectedBranch, - ...data, - }; - }, stubs: { GlModal: stubComponent(GlModal, { template: '
', + methods: { + show: showMock, + hide: hideMock, + }, }), GlButton, GlFormInput, @@ -46,21 +45,36 @@ const findDeleteButton = () => wrapper.findByTestId('delete-branch-confirmation- const findCancelButton = () => wrapper.findByTestId('delete-branch-cancel-button'); const findFormInput = () => wrapper.findComponent(GlFormInput); const findForm = () => wrapper.find('form'); -const submitFormSpy = () => jest.spyOn(wrapper.vm.$refs.form, 'submit'); +const createSubmitFormSpy = () => jest.spyOn(findForm().element, 'submit'); + +const emitOpenModal = (data = {}) => + eventHub.$emit('openModal', { + isProtectedBranch, + branchName, + defaultBranchName, + deletePath, + merged, + ...data, + }); describe('Delete branch modal', () => { const expectedUnmergedWarning = "This branch hasn't been merged into default. To avoid data loss, consider merging this branch before deleting it."; + beforeEach(() => { + createComponent(); + + emitOpenModal(); + + showMock.mockClear(); + hideMock.mockClear(); + }); + describe('Deleting a regular branch', () => { const expectedTitle = 'Delete branch. Are you ABSOLUTELY SURE?'; const expectedWarning = "You're about to permanently delete the branch test_modal."; const expectedMessage = `${expectedWarning} ${expectedUnmergedWarning}`; - beforeEach(() => { - createComponent(); - }); - it('renders the modal correctly', () => { expect(findModal().props('title')).toBe(expectedTitle); expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessage); @@ -70,32 +84,30 @@ describe('Delete branch modal', () => { }); it('submits the form when the delete button is clicked', () => { + const submitSpy = createSubmitFormSpy(); + + expect(submitSpy).not.toHaveBeenCalled(); + findDeleteButton().trigger('click'); expect(findForm().attributes('action')).toBe(deletePath); - expect(submitFormSpy()).toHaveBeenCalled(); + expect(submitSpy).toHaveBeenCalled(); }); - it('calls show on the modal when a `openModal` event is received through the event hub', async () => { - const showSpy = jest.spyOn(wrapper.vm.$refs.modal, 'show'); + it('calls show on the modal when a `openModal` event is received through the event hub', () => { + expect(showMock).not.toHaveBeenCalled(); - eventHub.$emit('openModal', { - isProtectedBranch, - branchName, - defaultBranchName, - deletePath, - merged, - }); + emitOpenModal(); - expect(showSpy).toHaveBeenCalled(); + expect(showMock).toHaveBeenCalled(); }); it('calls hide on the modal when cancel button is clicked', () => { - const closeModalSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide'); + expect(hideMock).not.toHaveBeenCalled(); findCancelButton().trigger('click'); - expect(closeModalSpy).toHaveBeenCalled(); + expect(hideMock).toHaveBeenCalled(); }); }); @@ -108,7 +120,9 @@ describe('Delete branch modal', () => { 'After you confirm and select Yes, delete protected branch, you cannot recover this branch. Please type the following to confirm: test_modal'; beforeEach(() => { - createComponent({ isProtectedBranch: true }); + emitOpenModal({ + isProtectedBranch: true, + }); }); describe('rendering the modal correctly for a protected branch', () => { @@ -138,8 +152,11 @@ describe('Delete branch modal', () => { await waitForPromises(); + const submitSpy = createSubmitFormSpy(); + findDeleteButton().trigger('click'); - expect(submitFormSpy()).not.toHaveBeenCalled(); + + expect(submitSpy).not.toHaveBeenCalled(); }); it('opens with the delete button disabled and enables it when branch name is confirmed and fires submit', async () => { @@ -151,16 +168,23 @@ describe('Delete branch modal', () => { expect(findDeleteButton().props('disabled')).not.toBe(true); + const submitSpy = createSubmitFormSpy(); + + expect(submitSpy).not.toHaveBeenCalled(); + findDeleteButton().trigger('click'); - expect(submitFormSpy()).toHaveBeenCalled(); + + expect(submitSpy).toHaveBeenCalled(); }); }); describe('Deleting a merged branch', () => { - it('does not include the unmerged branch warning when merged is true', () => { - createComponent({ merged: true }); + beforeEach(() => { + emitOpenModal({ merged: true }); + }); - expect(findModalMessage().html()).not.toContain(expectedUnmergedWarning); + it('does not include the unmerged branch warning when merged is true', () => { + expect(findModalMessage().text()).not.toContain(expectedUnmergedWarning); }); }); }); diff --git a/spec/frontend/branches/components/delete_merged_branches_spec.js b/spec/frontend/branches/components/delete_merged_branches_spec.js index 75a669c78f2..1e4d7082ccd 100644 --- a/spec/frontend/branches/components/delete_merged_branches_spec.js +++ b/spec/frontend/branches/components/delete_merged_branches_spec.js @@ -1,4 +1,4 @@ -import { GlButton, GlModal, GlFormInput, GlSprintf } from '@gitlab/ui'; +import { GlButton, GlFormInput, GlModal, GlSprintf } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { stubComponent } from 'helpers/stub_component'; @@ -10,11 +10,17 @@ import { formPath, propsDataMock } from '../mock_data'; jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' })); let wrapper; +const modalShowSpy = jest.fn(); +const modalHideSpy = jest.fn(); const stubsData = { GlModal: stubComponent(GlModal, { template: '
', + methods: { + show: modalShowSpy, + hide: modalHideSpy, + }, }), GlButton, GlFormInput, @@ -65,11 +71,10 @@ describe('Delete merged branches component', () => { }); it('opens modal when clicked', () => { - createComponent(mount); - jest.spyOn(wrapper.vm.$refs.modal, 'show'); + createComponent(mount, stubsData); findDeleteButton().trigger('click'); - expect(wrapper.vm.$refs.modal.show).toHaveBeenCalled(); + expect(modalShowSpy).toHaveBeenCalled(); }); }); @@ -131,9 +136,8 @@ describe('Delete merged branches component', () => { }); it('calls hide on the modal when cancel button is clicked', () => { - const closeModalSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide'); findCancelButton().trigger('click'); - expect(closeModalSpy).toHaveBeenCalled(); + expect(modalHideSpy).toHaveBeenCalled(); }); }); }); diff --git a/spec/frontend/captcha/captcha_modal_spec.js b/spec/frontend/captcha/captcha_modal_spec.js index 6d6d8043797..4bbed8ab3bb 100644 --- a/spec/frontend/captcha/captcha_modal_spec.js +++ b/spec/frontend/captcha/captcha_modal_spec.js @@ -61,12 +61,12 @@ describe('Captcha Modal', () => { describe('functionality', () => { describe('when modal is shown', () => { describe('when initRecaptchaScript promise resolves successfully', () => { - beforeEach(async () => { + beforeEach(() => { createComponent({ props: { needsCaptchaResponse: true } }); findGlModal().vm.$emit('shown'); }); - it('shows modal', async () => { + it('shows modal', () => { expect(showSpy).toHaveBeenCalled(); }); @@ -90,7 +90,7 @@ describe('Captcha Modal', () => { expect(wrapper.emitted('receivedCaptchaResponse')).toEqual([[captchaResponse]]); }); - it('hides modal with null trigger', async () => { + it('hides modal with null trigger', () => { // Assert that hide is called with zero args, so that we don't trigger the logic // for hiding the modal via cancel, esc, headerclose, etc, without a captcha response expect(hideSpy).toHaveBeenCalledWith(); diff --git a/spec/frontend/captcha/init_recaptcha_script_spec.js b/spec/frontend/captcha/init_recaptcha_script_spec.js index 78480821d95..3e2d7ba00ee 100644 --- a/spec/frontend/captcha/init_recaptcha_script_spec.js +++ b/spec/frontend/captcha/init_recaptcha_script_spec.js @@ -50,7 +50,7 @@ describe('initRecaptchaScript', () => { await expect(result).resolves.toBe(window.grecaptcha); }); - it('sets window[RECAPTCHA_ONLOAD_CALLBACK_NAME] to undefined', async () => { + it('sets window[RECAPTCHA_ONLOAD_CALLBACK_NAME] to undefined', () => { expect(getScriptOnload()).toBeUndefined(); }); }); diff --git a/spec/frontend/ci/artifacts/components/app_spec.js b/spec/frontend/ci/artifacts/components/app_spec.js new file mode 100644 index 00000000000..435b03e82ab --- /dev/null +++ b/spec/frontend/ci/artifacts/components/app_spec.js @@ -0,0 +1,109 @@ +import { GlSkeletonLoader } from '@gitlab/ui'; +import VueApollo from 'vue-apollo'; +import Vue from 'vue'; +import { numberToHumanSize } from '~/lib/utils/number_utils'; +import ArtifactsApp from '~/ci/artifacts/components/app.vue'; +import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue'; +import getBuildArtifactsSizeQuery from '~/ci/artifacts/graphql/queries/get_build_artifacts_size.query.graphql'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import { PAGE_TITLE, TOTAL_ARTIFACTS_SIZE, SIZE_UNKNOWN } from '~/ci/artifacts/constants'; + +const TEST_BUILD_ARTIFACTS_SIZE = 1024; +const TEST_PROJECT_PATH = 'project/path'; +const TEST_PROJECT_ID = 'gid://gitlab/Project/22'; + +const createBuildArtifactsSizeResponse = (buildArtifactsSize) => ({ + data: { + project: { + __typename: 'Project', + id: TEST_PROJECT_ID, + statistics: { + __typename: 'ProjectStatistics', + buildArtifactsSize, + }, + }, + }, +}); + +Vue.use(VueApollo); + +describe('ArtifactsApp component', () => { + let wrapper; + let apolloProvider; + let getBuildArtifactsSizeSpy; + + const findTitle = () => wrapper.findByTestId('artifacts-page-title'); + const findBuildArtifactsSize = () => wrapper.findByTestId('build-artifacts-size'); + const findJobArtifactsTable = () => wrapper.findComponent(JobArtifactsTable); + const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader); + + const createComponent = () => { + wrapper = shallowMountExtended(ArtifactsApp, { + provide: { projectPath: 'project/path' }, + apolloProvider, + }); + }; + + beforeEach(() => { + getBuildArtifactsSizeSpy = jest.fn(); + + apolloProvider = createMockApollo([[getBuildArtifactsSizeQuery, getBuildArtifactsSizeSpy]]); + }); + + describe('when loading', () => { + beforeEach(() => { + // Promise that never resolves so it's always loading + getBuildArtifactsSizeSpy.mockReturnValue(new Promise(() => {})); + + createComponent(); + }); + + it('shows the page title', () => { + expect(findTitle().text()).toBe(PAGE_TITLE); + }); + + it('shows a skeleton while loading the artifacts size', () => { + expect(findSkeletonLoader().exists()).toBe(true); + }); + + it('shows the job artifacts table', () => { + expect(findJobArtifactsTable().exists()).toBe(true); + }); + + it('does not show message', () => { + expect(findBuildArtifactsSize().text()).toBe(''); + }); + + it('calls apollo query', () => { + expect(getBuildArtifactsSizeSpy).toHaveBeenCalledWith({ projectPath: TEST_PROJECT_PATH }); + }); + }); + + describe.each` + buildArtifactsSize | expectedText + ${TEST_BUILD_ARTIFACTS_SIZE} | ${numberToHumanSize(TEST_BUILD_ARTIFACTS_SIZE)} + ${null} | ${SIZE_UNKNOWN} + `('when buildArtifactsSize is $buildArtifactsSize', ({ buildArtifactsSize, expectedText }) => { + beforeEach(async () => { + getBuildArtifactsSizeSpy.mockResolvedValue( + createBuildArtifactsSizeResponse(buildArtifactsSize), + ); + + createComponent(); + + await waitForPromises(); + }); + + it('hides loader', () => { + expect(findSkeletonLoader().exists()).toBe(false); + }); + + it('shows the size', () => { + expect(findBuildArtifactsSize().text()).toMatchInterpolatedText( + `${TOTAL_ARTIFACTS_SIZE} ${expectedText}`, + ); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/artifact_row_spec.js b/spec/frontend/ci/artifacts/components/artifact_row_spec.js new file mode 100644 index 00000000000..f64d32410ed --- /dev/null +++ b/spec/frontend/ci/artifacts/components/artifact_row_spec.js @@ -0,0 +1,105 @@ +import { GlBadge, GlFriendlyWrap, GlFormCheckbox } from '@gitlab/ui'; +import mockGetJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; +import { numberToHumanSize } from '~/lib/utils/number_utils'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import ArtifactRow from '~/ci/artifacts/components/artifact_row.vue'; +import { BULK_DELETE_FEATURE_FLAG } from '~/ci/artifacts/constants'; + +describe('ArtifactRow component', () => { + let wrapper; + + const artifact = mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[0]; + + const findName = () => wrapper.findByTestId('job-artifact-row-name'); + const findBadge = () => wrapper.findComponent(GlBadge); + const findSize = () => wrapper.findByTestId('job-artifact-row-size'); + const findDownloadButton = () => wrapper.findByTestId('job-artifact-row-download-button'); + const findDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button'); + const findCheckbox = () => wrapper.findComponent(GlFormCheckbox); + + const createComponent = ({ canDestroyArtifacts = true, glFeatures = {} } = {}) => { + wrapper = shallowMountExtended(ArtifactRow, { + propsData: { + artifact, + isSelected: false, + isLoading: false, + isLastRow: false, + }, + provide: { canDestroyArtifacts, glFeatures }, + stubs: { GlBadge, GlFriendlyWrap }, + }); + }; + + describe('artifact details', () => { + beforeEach(async () => { + createComponent(); + + await waitForPromises(); + }); + + it('displays the artifact name and type', () => { + expect(findName().text()).toContain(artifact.name); + expect(findBadge().text()).toBe(artifact.fileType.toLowerCase()); + }); + + it('displays the artifact size', () => { + expect(findSize().text()).toBe(numberToHumanSize(artifact.size)); + }); + + it('displays the download button as a link to the download path', () => { + expect(findDownloadButton().attributes('href')).toBe(artifact.downloadPath); + }); + }); + + describe('delete button', () => { + it('does not show when user does not have permission', () => { + createComponent({ canDestroyArtifacts: false }); + + expect(findDeleteButton().exists()).toBe(false); + }); + + it('shows when user has permission', () => { + createComponent(); + + expect(findDeleteButton().exists()).toBe(true); + }); + + it('emits the delete event when clicked', async () => { + createComponent(); + + expect(wrapper.emitted('delete')).toBeUndefined(); + + findDeleteButton().vm.$emit('click'); + await waitForPromises(); + + expect(wrapper.emitted('delete')).toBeDefined(); + }); + }); + + describe('bulk delete checkbox', () => { + describe('with permission and feature flag enabled', () => { + beforeEach(() => { + createComponent({ glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true } }); + }); + + it('emits selectArtifact when toggled', () => { + findCheckbox().vm.$emit('input', true); + + expect(wrapper.emitted('selectArtifact')).toStrictEqual([[artifact, true]]); + }); + }); + + it('is not shown without permission', () => { + createComponent({ canDestroyArtifacts: false }); + + expect(findCheckbox().exists()).toBe(false); + }); + + it('is not shown with feature flag disabled', () => { + createComponent(); + + expect(findCheckbox().exists()).toBe(false); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/artifacts_bulk_delete_spec.js b/spec/frontend/ci/artifacts/components/artifacts_bulk_delete_spec.js new file mode 100644 index 00000000000..9e4fa6b9c6f --- /dev/null +++ b/spec/frontend/ci/artifacts/components/artifacts_bulk_delete_spec.js @@ -0,0 +1,48 @@ +import { GlSprintf } from '@gitlab/ui'; +import mockGetJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import ArtifactsBulkDelete from '~/ci/artifacts/components/artifacts_bulk_delete.vue'; + +describe('ArtifactsBulkDelete component', () => { + let wrapper; + + const selectedArtifacts = [ + mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[0].id, + mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes[1].id, + ]; + + const findText = () => wrapper.findComponent(GlSprintf).text(); + const findDeleteButton = () => wrapper.findByTestId('bulk-delete-delete-button'); + const findClearButton = () => wrapper.findByTestId('bulk-delete-clear-button'); + + const createComponent = () => { + wrapper = shallowMountExtended(ArtifactsBulkDelete, { + propsData: { + selectedArtifacts, + }, + stubs: { GlSprintf }, + }); + }; + + describe('selected artifacts box', () => { + beforeEach(() => { + createComponent(); + }); + + it('displays selected artifacts count', () => { + expect(findText()).toContain(String(selectedArtifacts.length)); + }); + + it('emits showBulkDeleteModal event when the delete button is clicked', () => { + findDeleteButton().vm.$emit('click'); + + expect(wrapper.emitted('showBulkDeleteModal')).toBeDefined(); + }); + + it('emits clearSelectedArtifacts event when the clear button is clicked', () => { + findClearButton().vm.$emit('click'); + + expect(wrapper.emitted('clearSelectedArtifacts')).toBeDefined(); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/artifacts_table_row_details_spec.js b/spec/frontend/ci/artifacts/components/artifacts_table_row_details_spec.js new file mode 100644 index 00000000000..ebdb7e25c45 --- /dev/null +++ b/spec/frontend/ci/artifacts/components/artifacts_table_row_details_spec.js @@ -0,0 +1,137 @@ +import { GlModal } from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; +import waitForPromises from 'helpers/wait_for_promises'; +import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue'; +import ArtifactRow from '~/ci/artifacts/components/artifact_row.vue'; +import ArtifactDeleteModal from '~/ci/artifacts/components/artifact_delete_modal.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import destroyArtifactMutation from '~/ci/artifacts/graphql/mutations/destroy_artifact.mutation.graphql'; +import { I18N_DESTROY_ERROR, I18N_MODAL_TITLE } from '~/ci/artifacts/constants'; +import { createAlert } from '~/alert'; + +jest.mock('~/alert'); + +const { artifacts } = getJobArtifactsResponse.data.project.jobs.nodes[0]; +const refetchArtifacts = jest.fn(); + +Vue.use(VueApollo); + +describe('ArtifactsTableRowDetails component', () => { + let wrapper; + let requestHandlers; + + const findModal = () => wrapper.findComponent(GlModal); + + const createComponent = ({ + handlers = { + destroyArtifactMutation: jest.fn(), + }, + selectedArtifacts = [], + } = {}) => { + requestHandlers = handlers; + wrapper = mountExtended(ArtifactsTableRowDetails, { + apolloProvider: createMockApollo([ + [destroyArtifactMutation, requestHandlers.destroyArtifactMutation], + ]), + propsData: { + artifacts, + selectedArtifacts, + refetchArtifacts, + queryVariables: {}, + }, + provide: { canDestroyArtifacts: true }, + data() { + return { deletingArtifactId: null }; + }, + }); + }; + + describe('passes correct props', () => { + beforeEach(() => { + createComponent(); + }); + + it('to the artifact rows', () => { + [0, 1, 2].forEach((index) => { + expect(wrapper.findAllComponents(ArtifactRow).at(index).props()).toMatchObject({ + artifact: artifacts.nodes[index], + }); + }); + }); + }); + + describe('when the artifact row emits the delete event', () => { + it('shows the artifact delete modal', async () => { + createComponent(); + await waitForPromises(); + + expect(findModal().props('visible')).toBe(false); + + await wrapper.findComponent(ArtifactRow).vm.$emit('delete'); + + expect(findModal().props('visible')).toBe(true); + expect(findModal().props('title')).toBe(I18N_MODAL_TITLE(artifacts.nodes[0].name)); + }); + }); + + describe('when the artifact delete modal emits its primary event', () => { + it('triggers the destroyArtifact GraphQL mutation', async () => { + createComponent(); + await waitForPromises(); + + wrapper.findComponent(ArtifactRow).vm.$emit('delete'); + wrapper.findComponent(ArtifactDeleteModal).vm.$emit('primary'); + + expect(requestHandlers.destroyArtifactMutation).toHaveBeenCalledWith({ + id: artifacts.nodes[0].id, + }); + }); + + it('displays an alert message and refetches artifacts when the mutation fails', async () => { + createComponent({ + destroyArtifactMutation: jest.fn().mockRejectedValue(new Error('Error!')), + }); + await waitForPromises(); + + expect(wrapper.emitted('refetch')).toBeUndefined(); + + wrapper.findComponent(ArtifactRow).vm.$emit('delete'); + wrapper.findComponent(ArtifactDeleteModal).vm.$emit('primary'); + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledWith({ message: I18N_DESTROY_ERROR }); + expect(wrapper.emitted('refetch')).toBeDefined(); + }); + }); + + describe('when the artifact delete modal is cancelled', () => { + it('does not trigger the destroyArtifact GraphQL mutation', async () => { + createComponent(); + await waitForPromises(); + + wrapper.findComponent(ArtifactRow).vm.$emit('delete'); + wrapper.findComponent(ArtifactDeleteModal).vm.$emit('cancel'); + + expect(requestHandlers.destroyArtifactMutation).not.toHaveBeenCalled(); + }); + }); + + describe('bulk delete selection', () => { + it('is not selected for unselected artifact', async () => { + createComponent(); + await waitForPromises(); + + expect(wrapper.findAllComponents(ArtifactRow).at(0).props('isSelected')).toBe(false); + }); + + it('is selected for selected artifacts', async () => { + createComponent({ selectedArtifacts: [artifacts.nodes[0].id] }); + await waitForPromises(); + + expect(wrapper.findAllComponents(ArtifactRow).at(0).props('isSelected')).toBe(true); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/feedback_banner_spec.js b/spec/frontend/ci/artifacts/components/feedback_banner_spec.js new file mode 100644 index 00000000000..53e0fdac6f6 --- /dev/null +++ b/spec/frontend/ci/artifacts/components/feedback_banner_spec.js @@ -0,0 +1,59 @@ +import { GlBanner } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import FeedbackBanner from '~/ci/artifacts/components/feedback_banner.vue'; +import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser'; +import { + I18N_FEEDBACK_BANNER_TITLE, + I18N_FEEDBACK_BANNER_BUTTON, + FEEDBACK_URL, +} from '~/ci/artifacts/constants'; + +const mockBannerImagePath = 'banner/image/path'; + +describe('Artifacts management feedback banner', () => { + let wrapper; + let userCalloutDismissSpy; + + const findBanner = () => wrapper.findComponent(GlBanner); + + const createComponent = ({ shouldShowCallout = true } = {}) => { + userCalloutDismissSpy = jest.fn(); + + wrapper = shallowMount(FeedbackBanner, { + provide: { + artifactsManagementFeedbackImagePath: mockBannerImagePath, + }, + stubs: { + UserCalloutDismisser: makeMockUserCalloutDismisser({ + dismiss: userCalloutDismissSpy, + shouldShowCallout, + }), + }, + }); + }; + + it('is displayed with the correct props', () => { + createComponent(); + + expect(findBanner().props()).toMatchObject({ + title: I18N_FEEDBACK_BANNER_TITLE, + buttonText: I18N_FEEDBACK_BANNER_BUTTON, + buttonLink: FEEDBACK_URL, + svgPath: mockBannerImagePath, + }); + }); + + it('dismisses the callout when closed', () => { + createComponent(); + + findBanner().vm.$emit('close'); + + expect(userCalloutDismissSpy).toHaveBeenCalled(); + }); + + it('is not displayed once it has been dismissed', () => { + createComponent({ shouldShowCallout: false }); + + expect(findBanner().exists()).toBe(false); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js new file mode 100644 index 00000000000..74d0d683662 --- /dev/null +++ b/spec/frontend/ci/artifacts/components/job_artifacts_table_spec.js @@ -0,0 +1,651 @@ +import { + GlLoadingIcon, + GlTable, + GlLink, + GlBadge, + GlPagination, + GlModal, + GlFormCheckbox, +} from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import getJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; +import CiIcon from '~/vue_shared/components/ci_icon.vue'; +import waitForPromises from 'helpers/wait_for_promises'; +import JobArtifactsTable from '~/ci/artifacts/components/job_artifacts_table.vue'; +import FeedbackBanner from '~/ci/artifacts/components/feedback_banner.vue'; +import ArtifactsTableRowDetails from '~/ci/artifacts/components/artifacts_table_row_details.vue'; +import ArtifactDeleteModal from '~/ci/artifacts/components/artifact_delete_modal.vue'; +import ArtifactsBulkDelete from '~/ci/artifacts/components/artifacts_bulk_delete.vue'; +import BulkDeleteModal from '~/ci/artifacts/components/bulk_delete_modal.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import getJobArtifactsQuery from '~/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql'; +import bulkDestroyArtifactsMutation from '~/ci/artifacts/graphql/mutations/bulk_destroy_job_artifacts.mutation.graphql'; +import { getIdFromGraphQLId, convertToGraphQLId } from '~/graphql_shared/utils'; +import { TYPENAME_PROJECT } from '~/graphql_shared/constants'; +import { + ARCHIVE_FILE_TYPE, + JOBS_PER_PAGE, + I18N_FETCH_ERROR, + INITIAL_CURRENT_PAGE, + BULK_DELETE_FEATURE_FLAG, + I18N_BULK_DELETE_ERROR, +} from '~/ci/artifacts/constants'; +import { totalArtifactsSizeForJob } from '~/ci/artifacts/utils'; +import { createAlert } from '~/alert'; + +jest.mock('~/alert'); + +Vue.use(VueApollo); + +describe('JobArtifactsTable component', () => { + let wrapper; + let requestHandlers; + + const mockToastShow = jest.fn(); + + const findBanner = () => wrapper.findComponent(FeedbackBanner); + + const findLoadingState = () => wrapper.findComponent(GlLoadingIcon); + const findTable = () => wrapper.findComponent(GlTable); + const findDetailsRows = () => wrapper.findAllComponents(ArtifactsTableRowDetails); + const findDetailsInRow = (i) => + findTable().findAll('tbody tr').at(i).findComponent(ArtifactsTableRowDetails); + + const findCount = () => wrapper.findByTestId('job-artifacts-count'); + const findCountAt = (i) => wrapper.findAllByTestId('job-artifacts-count').at(i); + + const findDeleteModal = () => wrapper.findComponent(ArtifactDeleteModal); + const findBulkDeleteModal = () => wrapper.findComponent(BulkDeleteModal); + + const findStatuses = () => wrapper.findAllByTestId('job-artifacts-job-status'); + const findSuccessfulJobStatus = () => findStatuses().at(0); + const findFailedJobStatus = () => findStatuses().at(1); + + const findLinks = () => wrapper.findAllComponents(GlLink); + const findJobLink = () => findLinks().at(0); + const findPipelineLink = () => findLinks().at(1); + const findRefLink = () => findLinks().at(2); + const findCommitLink = () => findLinks().at(3); + + const findSize = () => wrapper.findByTestId('job-artifacts-size'); + const findCreated = () => wrapper.findByTestId('job-artifacts-created'); + + const findDownloadButton = () => wrapper.findByTestId('job-artifacts-download-button'); + const findBrowseButton = () => wrapper.findByTestId('job-artifacts-browse-button'); + const findDeleteButton = () => wrapper.findByTestId('job-artifacts-delete-button'); + const findArtifactDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button'); + + // first checkbox is a "select all", this finder should get the first job checkbox + const findJobCheckbox = (i = 1) => wrapper.findAllComponents(GlFormCheckbox).at(i); + const findAnyCheckbox = () => wrapper.findComponent(GlFormCheckbox); + const findBulkDelete = () => wrapper.findComponent(ArtifactsBulkDelete); + const findBulkDeleteContainer = () => wrapper.findByTestId('bulk-delete-container'); + + const findPagination = () => wrapper.findComponent(GlPagination); + const setPage = async (page) => { + findPagination().vm.$emit('input', page); + await waitForPromises(); + }; + + const projectId = 'some/project/id'; + + let enoughJobsToPaginate = [...getJobArtifactsResponse.data.project.jobs.nodes]; + while (enoughJobsToPaginate.length <= JOBS_PER_PAGE) { + enoughJobsToPaginate = [ + ...enoughJobsToPaginate, + ...getJobArtifactsResponse.data.project.jobs.nodes, + ]; + } + const getJobArtifactsResponseThatPaginates = { + data: { + project: { + jobs: { + nodes: enoughJobsToPaginate, + pageInfo: { ...getJobArtifactsResponse.data.project.jobs.pageInfo, hasNextPage: true }, + }, + }, + }, + }; + + const job = getJobArtifactsResponse.data.project.jobs.nodes[0]; + const archiveArtifact = job.artifacts.nodes.find( + (artifact) => artifact.fileType === ARCHIVE_FILE_TYPE, + ); + const job2 = getJobArtifactsResponse.data.project.jobs.nodes[1]; + + const destroyedCount = job.artifacts.nodes.length; + const destroyedIds = job.artifacts.nodes.map((node) => node.id); + const bulkDestroyMutationHandler = jest.fn().mockResolvedValue({ + data: { + bulkDestroyJobArtifacts: { errors: [], destroyedCount, destroyedIds }, + }, + }); + + const createComponent = ({ + handlers = { + getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse), + bulkDestroyArtifactsMutation: bulkDestroyMutationHandler, + }, + data = {}, + canDestroyArtifacts = true, + glFeatures = {}, + } = {}) => { + requestHandlers = handlers; + wrapper = mountExtended(JobArtifactsTable, { + apolloProvider: createMockApollo([ + [getJobArtifactsQuery, requestHandlers.getJobArtifactsQuery], + [bulkDestroyArtifactsMutation, requestHandlers.bulkDestroyArtifactsMutation], + ]), + provide: { + projectPath: 'project/path', + projectId, + canDestroyArtifacts, + artifactsManagementFeedbackImagePath: 'banner/image/path', + glFeatures, + }, + mocks: { + $toast: { + show: mockToastShow, + }, + }, + data() { + return data; + }, + }); + }; + + it('renders feedback banner', () => { + createComponent(); + + expect(findBanner().exists()).toBe(true); + }); + + it('when loading, shows a loading state', () => { + createComponent(); + + expect(findLoadingState().exists()).toBe(true); + }); + + it('on error, shows an alert', async () => { + createComponent({ + handlers: { + getJobArtifactsQuery: jest.fn().mockRejectedValue(new Error('Error!')), + }, + }); + + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledWith({ message: I18N_FETCH_ERROR }); + }); + + it('with data, renders the table', async () => { + createComponent(); + + await waitForPromises(); + + expect(findTable().exists()).toBe(true); + }); + + describe('job details', () => { + beforeEach(async () => { + createComponent(); + + await waitForPromises(); + }); + + it('shows the artifact count', () => { + expect(findCount().text()).toBe(`${job.artifacts.nodes.length} files`); + }); + + it('shows the job status as an icon for a successful job', () => { + expect(findSuccessfulJobStatus().findComponent(CiIcon).exists()).toBe(true); + expect(findSuccessfulJobStatus().findComponent(GlBadge).exists()).toBe(false); + }); + + it('shows the job status as a badge for other job statuses', () => { + expect(findFailedJobStatus().findComponent(GlBadge).exists()).toBe(true); + expect(findFailedJobStatus().findComponent(CiIcon).exists()).toBe(false); + }); + + it('shows links to the job, pipeline, ref, and commit', () => { + expect(findJobLink().text()).toBe(job.name); + expect(findJobLink().attributes('href')).toBe(job.webPath); + + expect(findPipelineLink().text()).toBe(`#${getIdFromGraphQLId(job.pipeline.id)}`); + expect(findPipelineLink().attributes('href')).toBe(job.pipeline.path); + + expect(findRefLink().text()).toBe(job.refName); + expect(findRefLink().attributes('href')).toBe(job.refPath); + + expect(findCommitLink().text()).toBe(job.shortSha); + expect(findCommitLink().attributes('href')).toBe(job.commitPath); + }); + + it('shows the total size of artifacts', () => { + expect(findSize().text()).toBe(totalArtifactsSizeForJob(job)); + }); + + it('shows the created time', () => { + expect(findCreated().text()).toBe('5 years ago'); + }); + + describe('row expansion', () => { + it('toggles the visibility of the row details', async () => { + expect(findDetailsRows().length).toBe(0); + + findCount().trigger('click'); + await waitForPromises(); + + expect(findDetailsRows().length).toBe(1); + + findCount().trigger('click'); + await waitForPromises(); + + expect(findDetailsRows().length).toBe(0); + }); + + it('expands and collapses jobs', async () => { + // both jobs start collapsed + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(false); + + findCountAt(0).trigger('click'); + await waitForPromises(); + + // first job is expanded, second row has its details + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(true); + expect(findDetailsInRow(2).exists()).toBe(false); + + findCountAt(1).trigger('click'); + await waitForPromises(); + + // both jobs are expanded, each has details below it + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(true); + expect(findDetailsInRow(2).exists()).toBe(false); + expect(findDetailsInRow(3).exists()).toBe(true); + + findCountAt(0).trigger('click'); + await waitForPromises(); + + // first job collapsed, second job expanded + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(false); + expect(findDetailsInRow(2).exists()).toBe(true); + }); + + it('keeps the job expanded when an artifact is deleted', async () => { + findCount().trigger('click'); + await waitForPromises(); + + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(true); + + findArtifactDeleteButton().vm.$emit('click'); + await waitForPromises(); + + expect(findDeleteModal().findComponent(GlModal).props('visible')).toBe(true); + + findDeleteModal().vm.$emit('primary'); + await waitForPromises(); + + expect(findDetailsInRow(0).exists()).toBe(false); + expect(findDetailsInRow(1).exists()).toBe(true); + }); + }); + }); + + describe('download button', () => { + it('is a link to the download path for the archive artifact', async () => { + createComponent(); + + await waitForPromises(); + + expect(findDownloadButton().attributes('href')).toBe(archiveArtifact.downloadPath); + }); + + it('is disabled when there is no download path', async () => { + const jobWithoutDownloadPath = { + ...job, + archive: { downloadPath: null }, + }; + + createComponent({ + handlers: { getJobArtifactsQuery: jest.fn() }, + data: { jobArtifacts: [jobWithoutDownloadPath] }, + }); + + await waitForPromises(); + + expect(findDownloadButton().attributes('disabled')).toBe('disabled'); + }); + }); + + describe('browse button', () => { + it('is a link to the browse path for the job', async () => { + createComponent(); + + await waitForPromises(); + + expect(findBrowseButton().attributes('href')).toBe(job.browseArtifactsPath); + }); + + it('is disabled when there is no browse path', async () => { + const jobWithoutBrowsePath = { + ...job, + browseArtifactsPath: null, + }; + + createComponent({ + handlers: { getJobArtifactsQuery: jest.fn() }, + data: { jobArtifacts: [jobWithoutBrowsePath] }, + }); + + await waitForPromises(); + + expect(findBrowseButton().attributes('disabled')).toBe('disabled'); + }); + }); + + describe('delete button', () => { + const artifactsFromJob = job.artifacts.nodes.map((node) => node.id); + + describe('with delete permission and bulk delete feature flag enabled', () => { + beforeEach(async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, + }); + + await waitForPromises(); + }); + + it('opens the confirmation modal with the artifacts from the job', async () => { + await findDeleteButton().vm.$emit('click'); + + expect(findBulkDeleteModal().props()).toMatchObject({ + visible: true, + artifactsToDelete: artifactsFromJob, + }); + }); + + it('on confirm, deletes the artifacts from the job and shows a toast', async () => { + findDeleteButton().vm.$emit('click'); + findBulkDeleteModal().vm.$emit('primary'); + + expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({ + projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId), + ids: artifactsFromJob, + }); + + await waitForPromises(); + + expect(mockToastShow).toHaveBeenCalledWith( + `${artifactsFromJob.length} selected artifacts deleted`, + ); + }); + + it('does not clear selected artifacts on success', async () => { + // select job 2 via checkbox + findJobCheckbox(2).vm.$emit('input', true); + + // click delete button job 1 + findDeleteButton().vm.$emit('click'); + + // job 2's artifacts should still be selected + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual( + job2.artifacts.nodes.map((node) => node.id), + ); + + // confirm delete + findBulkDeleteModal().vm.$emit('primary'); + + // job 1's artifacts should be deleted + expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({ + projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId), + ids: artifactsFromJob, + }); + + await waitForPromises(); + + // job 2's artifacts should still be selected + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual( + job2.artifacts.nodes.map((node) => node.id), + ); + }); + }); + + it('shows an alert and does not clear selected artifacts on error', async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, + handlers: { + getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse), + bulkDestroyArtifactsMutation: jest.fn().mockRejectedValue(), + }, + }); + await waitForPromises(); + + // select job 2 via checkbox + findJobCheckbox(2).vm.$emit('input', true); + + // click delete button job 1 + findDeleteButton().vm.$emit('click'); + + // confirm delete + findBulkDeleteModal().vm.$emit('primary'); + + await waitForPromises(); + + // job 2's artifacts should still be selected + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual( + job2.artifacts.nodes.map((node) => node.id), + ); + expect(createAlert).toHaveBeenCalledWith({ + captureError: true, + error: expect.any(Error), + message: I18N_BULK_DELETE_ERROR, + }); + }); + + it('is disabled when bulk delete feature flag is disabled', async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false }, + }); + + await waitForPromises(); + + expect(findDeleteButton().attributes('disabled')).toBe('disabled'); + }); + + it('is hidden when user does not have delete permission', async () => { + createComponent({ + canDestroyArtifacts: false, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false }, + }); + + await waitForPromises(); + + expect(findDeleteButton().exists()).toBe(false); + }); + }); + + describe('bulk delete', () => { + const selectedArtifacts = job.artifacts.nodes.map((node) => node.id); + + describe('with permission and feature flag enabled', () => { + beforeEach(async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, + }); + + await waitForPromises(); + }); + + it('shows selected artifacts when a job is checked', async () => { + expect(findBulkDeleteContainer().exists()).toBe(false); + + await findJobCheckbox().vm.$emit('input', true); + + expect(findBulkDeleteContainer().exists()).toBe(true); + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(selectedArtifacts); + }); + + it('disappears when selected artifacts are cleared', async () => { + await findJobCheckbox().vm.$emit('input', true); + + expect(findBulkDeleteContainer().exists()).toBe(true); + + await findBulkDelete().vm.$emit('clearSelectedArtifacts'); + + expect(findBulkDeleteContainer().exists()).toBe(false); + }); + + it('shows a modal to confirm bulk delete', async () => { + findJobCheckbox().vm.$emit('input', true); + findBulkDelete().vm.$emit('showBulkDeleteModal'); + + await waitForPromises(); + + expect(findBulkDeleteModal().props('visible')).toBe(true); + }); + + it('deletes the selected artifacts and shows a toast', async () => { + findJobCheckbox().vm.$emit('input', true); + findBulkDelete().vm.$emit('showBulkDeleteModal'); + findBulkDeleteModal().vm.$emit('primary'); + + expect(bulkDestroyMutationHandler).toHaveBeenCalledWith({ + projectId: convertToGraphQLId(TYPENAME_PROJECT, projectId), + ids: selectedArtifacts, + }); + + await waitForPromises(); + + expect(mockToastShow).toHaveBeenCalledWith( + `${selectedArtifacts.length} selected artifacts deleted`, + ); + }); + + it('clears selected artifacts on success', async () => { + findJobCheckbox().vm.$emit('input', true); + findBulkDelete().vm.$emit('showBulkDeleteModal'); + findBulkDeleteModal().vm.$emit('primary'); + + await waitForPromises(); + + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual([]); + }); + }); + + it('shows an alert and does not clear selected artifacts on error', async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, + handlers: { + getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse), + bulkDestroyArtifactsMutation: jest.fn().mockRejectedValue(), + }, + }); + + await waitForPromises(); + + findJobCheckbox().vm.$emit('input', true); + findBulkDelete().vm.$emit('showBulkDeleteModal'); + findBulkDeleteModal().vm.$emit('primary'); + + await waitForPromises(); + + expect(findBulkDelete().props('selectedArtifacts')).toStrictEqual(selectedArtifacts); + expect(createAlert).toHaveBeenCalledWith({ + captureError: true, + error: expect.any(Error), + message: I18N_BULK_DELETE_ERROR, + }); + }); + + it('shows no checkboxes without permission', async () => { + createComponent({ + canDestroyArtifacts: false, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: true }, + }); + + await waitForPromises(); + + expect(findAnyCheckbox().exists()).toBe(false); + }); + + it('shows no checkboxes with feature flag disabled', async () => { + createComponent({ + canDestroyArtifacts: true, + glFeatures: { [BULK_DELETE_FEATURE_FLAG]: false }, + }); + + await waitForPromises(); + + expect(findAnyCheckbox().exists()).toBe(false); + }); + }); + + describe('pagination', () => { + const { pageInfo } = getJobArtifactsResponseThatPaginates.data.project.jobs; + const query = jest.fn().mockResolvedValue(getJobArtifactsResponseThatPaginates); + + beforeEach(async () => { + createComponent({ + handlers: { + getJobArtifactsQuery: query, + }, + data: { pageInfo }, + }); + + await waitForPromises(); + }); + + it('renders pagination and passes page props', () => { + expect(findPagination().props()).toMatchObject({ + value: INITIAL_CURRENT_PAGE, + prevPage: Number(pageInfo.hasPreviousPage), + nextPage: Number(pageInfo.hasNextPage), + }); + + expect(query).toHaveBeenCalledWith({ + projectPath: 'project/path', + firstPageSize: JOBS_PER_PAGE, + lastPageSize: null, + nextPageCursor: '', + prevPageCursor: '', + }); + }); + + it('updates query variables when going to previous page', async () => { + await setPage(1); + + expect(query).toHaveBeenLastCalledWith({ + projectPath: 'project/path', + firstPageSize: null, + lastPageSize: JOBS_PER_PAGE, + prevPageCursor: pageInfo.startCursor, + }); + expect(findPagination().props('value')).toEqual(1); + }); + + it('updates query variables when going to next page', async () => { + await setPage(2); + + expect(query).toHaveBeenLastCalledWith({ + projectPath: 'project/path', + firstPageSize: JOBS_PER_PAGE, + lastPageSize: null, + prevPageCursor: '', + nextPageCursor: pageInfo.endCursor, + }); + expect(findPagination().props('value')).toEqual(2); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/components/job_checkbox_spec.js b/spec/frontend/ci/artifacts/components/job_checkbox_spec.js new file mode 100644 index 00000000000..ae70bb4b17b --- /dev/null +++ b/spec/frontend/ci/artifacts/components/job_checkbox_spec.js @@ -0,0 +1,71 @@ +import { GlFormCheckbox } from '@gitlab/ui'; +import mockGetJobArtifactsResponse from 'test_fixtures/graphql/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql.json'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import JobCheckbox from '~/ci/artifacts/components/job_checkbox.vue'; + +describe('JobCheckbox component', () => { + let wrapper; + + const mockArtifactNodes = mockGetJobArtifactsResponse.data.project.jobs.nodes[0].artifacts.nodes; + const mockSelectedArtifacts = [mockArtifactNodes[0], mockArtifactNodes[1]]; + const mockUnselectedArtifacts = [mockArtifactNodes[2]]; + + const findCheckbox = () => wrapper.findComponent(GlFormCheckbox); + + const createComponent = ({ + hasArtifacts = true, + selectedArtifacts = mockSelectedArtifacts, + unselectedArtifacts = mockUnselectedArtifacts, + } = {}) => { + wrapper = shallowMountExtended(JobCheckbox, { + propsData: { + hasArtifacts, + selectedArtifacts, + unselectedArtifacts, + }, + mocks: { GlFormCheckbox }, + }); + }; + + it('is disabled when the job has no artifacts', () => { + createComponent({ hasArtifacts: false }); + + expect(findCheckbox().attributes('disabled')).toBe('true'); + }); + + describe('when some artifacts are selected', () => { + beforeEach(() => { + createComponent(); + }); + + it('is indeterminate', () => { + expect(findCheckbox().attributes('indeterminate')).toBe('true'); + expect(findCheckbox().attributes('checked')).toBeUndefined(); + }); + + it('selects the unselected artifacts on click', () => { + findCheckbox().vm.$emit('input', true); + + expect(wrapper.emitted('selectArtifact')).toMatchObject([[mockUnselectedArtifacts[0], true]]); + }); + }); + + describe('when all artifacts are selected', () => { + beforeEach(() => { + createComponent({ unselectedArtifacts: [] }); + }); + + it('is checked', () => { + expect(findCheckbox().attributes('checked')).toBe('true'); + }); + + it('deselects the selected artifacts on click', () => { + findCheckbox().vm.$emit('input', false); + + expect(wrapper.emitted('selectArtifact')).toMatchObject([ + [mockSelectedArtifacts[0], false], + [mockSelectedArtifacts[1], false], + ]); + }); + }); +}); diff --git a/spec/frontend/ci/artifacts/graphql/cache_update_spec.js b/spec/frontend/ci/artifacts/graphql/cache_update_spec.js new file mode 100644 index 00000000000..3c415534c7c --- /dev/null +++ b/spec/frontend/ci/artifacts/graphql/cache_update_spec.js @@ -0,0 +1,67 @@ +import getJobArtifactsQuery from '~/ci/artifacts/graphql/queries/get_job_artifacts.query.graphql'; +import { removeArtifactFromStore } from '~/ci/artifacts/graphql/cache_update'; + +describe('Artifact table cache updates', () => { + let store; + + const cacheMock = { + project: { + jobs: { + nodes: [ + { artifacts: { nodes: [{ id: 'foo' }] } }, + { artifacts: { nodes: [{ id: 'bar' }] } }, + ], + }, + }, + }; + + const query = getJobArtifactsQuery; + const variables = { fullPath: 'path/to/project' }; + + beforeEach(() => { + store = { + readQuery: jest.fn().mockReturnValue(cacheMock), + writeQuery: jest.fn(), + }; + }); + + describe('removeArtifactFromStore', () => { + it('calls readQuery', () => { + removeArtifactFromStore(store, 'foo', query, variables); + expect(store.readQuery).toHaveBeenCalledWith({ query, variables }); + }); + + it('writes the correct result in the cache', () => { + removeArtifactFromStore(store, 'foo', query, variables); + expect(store.writeQuery).toHaveBeenCalledWith({ + query, + variables, + data: { + project: { + jobs: { + nodes: [{ artifacts: { nodes: [] } }, { artifacts: { nodes: [{ id: 'bar' }] } }], + }, + }, + }, + }); + }); + + it('does not remove an unknown artifact', () => { + removeArtifactFromStore(store, 'baz', query, variables); + expect(store.writeQuery).toHaveBeenCalledWith({ + query, + variables, + data: { + project: { + jobs: { + nodes: [ + { artifacts: { nodes: [{ id: 'foo' }] } }, + { artifacts: { nodes: [{ id: 'bar' }] } }, + ], + }, + }, + }, + }); + }); + }); +}); diff --git a/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js b/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js index e4abedb412f..8990a70d4ef 100644 --- a/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js +++ b/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js @@ -1,5 +1,7 @@ import $ from 'jquery'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlPipelineSchedulesEdit from 'test_fixtures/pipeline_schedules/edit.html'; +import htmlPipelineSchedulesEditWithVariables from 'test_fixtures/pipeline_schedules/edit_with_variables.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import VariableList from '~/ci/ci_variable_list/ci_variable_list'; const HIDE_CLASS = 'hide'; @@ -11,7 +13,7 @@ describe('VariableList', () => { describe('with only key/value inputs', () => { describe('with no variables', () => { beforeEach(() => { - loadHTMLFixture('pipeline_schedules/edit.html'); + setHTMLFixture(htmlPipelineSchedulesEdit); $wrapper = $('.js-ci-variable-list-section'); variableList = new VariableList({ @@ -69,7 +71,7 @@ describe('VariableList', () => { describe('with persisted variables', () => { beforeEach(() => { - loadHTMLFixture('pipeline_schedules/edit_with_variables.html'); + setHTMLFixture(htmlPipelineSchedulesEditWithVariables); $wrapper = $('.js-ci-variable-list-section'); variableList = new VariableList({ @@ -106,7 +108,7 @@ describe('VariableList', () => { describe('toggleEnableRow method', () => { beforeEach(() => { - loadHTMLFixture('pipeline_schedules/edit_with_variables.html'); + setHTMLFixture(htmlPipelineSchedulesEditWithVariables); $wrapper = $('.js-ci-variable-list-section'); variableList = new VariableList({ diff --git a/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js index 8e012883f09..1d0dcf242a4 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js @@ -1,7 +1,16 @@ import { shallowMount } from '@vue/test-utils'; +import { + ADD_MUTATION_ACTION, + DELETE_MUTATION_ACTION, + UPDATE_MUTATION_ACTION, +} from '~/ci/ci_variable_list/constants'; import ciAdminVariables from '~/ci/ci_variable_list/components/ci_admin_variables.vue'; import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue'; +import addAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_add_variable.mutation.graphql'; +import deleteAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_delete_variable.mutation.graphql'; +import updateAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_update_variable.mutation.graphql'; +import getAdminVariables from '~/ci/ci_variable_list/graphql/queries/variables.query.graphql'; describe('Ci Project Variable wrapper', () => { let wrapper; @@ -22,8 +31,17 @@ describe('Ci Project Variable wrapper', () => { componentName: 'InstanceVariables', entity: '', hideEnvironmentScope: true, - mutationData: wrapper.vm.$options.mutationData, - queryData: wrapper.vm.$options.queryData, + mutationData: { + [ADD_MUTATION_ACTION]: addAdminVariable, + [UPDATE_MUTATION_ACTION]: updateAdminVariable, + [DELETE_MUTATION_ACTION]: deleteAdminVariable, + }, + queryData: { + ciVariables: { + lookup: expect.any(Function), + query: getAdminVariables, + }, + }, refetchAfterMutation: true, fullPath: null, id: null, diff --git a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js index 7181398c2a6..1937e3b34b7 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js @@ -1,13 +1,17 @@ import { GlListboxItem, GlCollapsibleListbox, GlDropdownItem, GlIcon } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import { allEnvironments } from '~/ci/ci_variable_list/constants'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { allEnvironments, ENVIRONMENT_QUERY_LIMIT } from '~/ci/ci_variable_list/constants'; import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue'; describe('Ci environments dropdown', () => { let wrapper; const envs = ['dev', 'prod', 'staging']; - const defaultProps = { environments: envs, selectedEnvironmentScope: '' }; + const defaultProps = { + areEnvironmentsLoading: false, + environments: envs, + selectedEnvironmentScope: '', + }; const findAllListboxItems = () => wrapper.findAllComponents(GlListboxItem); const findListboxItemByIndex = (index) => wrapper.findAllComponents(GlListboxItem).at(index); @@ -15,13 +19,19 @@ describe('Ci environments dropdown', () => { const findListbox = () => wrapper.findComponent(GlCollapsibleListbox); const findListboxText = () => findListbox().props('toggleText'); const findCreateWildcardButton = () => wrapper.findComponent(GlDropdownItem); + const findMaxEnvNote = () => wrapper.findByTestId('max-envs-notice'); - const createComponent = ({ props = {}, searchTerm = '' } = {}) => { - wrapper = mount(CiEnvironmentsDropdown, { + const createComponent = ({ props = {}, searchTerm = '', enableFeatureFlag = false } = {}) => { + wrapper = mountExtended(CiEnvironmentsDropdown, { propsData: { ...defaultProps, ...props, }, + provide: { + glFeatures: { + ciLimitEnvironmentScope: enableFeatureFlag, + }, + }, }); findListbox().vm.$emit('search', searchTerm); @@ -40,19 +50,32 @@ describe('Ci environments dropdown', () => { }); describe('Search term is empty', () => { - beforeEach(() => { - createComponent({ props: { environments: envs } }); - }); + describe.each` + featureFlag | flagStatus | defaultEnvStatus | firstItemValue | envIndices + ${true} | ${'enabled'} | ${'prepends'} | ${'*'} | ${[1, 2, 3]} + ${false} | ${'disabled'} | ${'does not prepend'} | ${envs[0]} | ${[0, 1, 2]} + `( + 'when ciLimitEnvironmentScope feature flag is $flagStatus', + ({ featureFlag, defaultEnvStatus, firstItemValue, envIndices }) => { + beforeEach(() => { + createComponent({ props: { environments: envs }, enableFeatureFlag: featureFlag }); + }); - it('renders all environments when search term is empty', () => { - expect(findListboxItemByIndex(0).text()).toBe(envs[0]); - expect(findListboxItemByIndex(1).text()).toBe(envs[1]); - expect(findListboxItemByIndex(2).text()).toBe(envs[2]); - }); + it(`${defaultEnvStatus} * in listbox`, () => { + expect(findListboxItemByIndex(0).text()).toBe(firstItemValue); + }); - it('does not display active checkmark on the inactive stage', () => { - expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true); - }); + it('renders all environments', () => { + expect(findListboxItemByIndex(envIndices[0]).text()).toBe(envs[0]); + expect(findListboxItemByIndex(envIndices[1]).text()).toBe(envs[1]); + expect(findListboxItemByIndex(envIndices[2]).text()).toBe(envs[2]); + }); + + it('does not display active checkmark', () => { + expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true); + }); + }, + ); }); describe('when `*` is the value of selectedEnvironmentScope props', () => { @@ -68,46 +91,92 @@ describe('Ci environments dropdown', () => { }); }); - describe('Environments found', () => { + describe('When ciLimitEnvironmentScope feature flag is disabled', () => { const currentEnv = envs[2]; beforeEach(() => { - createComponent({ searchTerm: currentEnv }); + createComponent(); }); - it('renders only the environment searched for', () => { + it('filters on the frontend and renders only the environment searched for', async () => { + await findListbox().vm.$emit('search', currentEnv); + expect(findAllListboxItems()).toHaveLength(1); expect(findListboxItemByIndex(0).text()).toBe(currentEnv); }); - it('does not display create button', () => { - expect(findCreateWildcardButton().exists()).toBe(false); + it('does not emit event when searching', async () => { + expect(wrapper.emitted('search-environment-scope')).toBeUndefined(); + + await findListbox().vm.$emit('search', currentEnv); + + expect(wrapper.emitted('search-environment-scope')).toBeUndefined(); }); - describe('Custom events', () => { - describe('when selecting an environment', () => { - const itemIndex = 0; + it('does not display note about max environments shown', () => { + expect(findMaxEnvNote().exists()).toBe(false); + }); + }); - beforeEach(() => { - createComponent(); - }); + describe('When ciLimitEnvironmentScope feature flag is enabled', () => { + const currentEnv = envs[2]; - it('emits `select-environment` when an environment is clicked', () => { - findListbox().vm.$emit('select', envs[itemIndex]); - expect(wrapper.emitted('select-environment')).toEqual([[envs[itemIndex]]]); - }); + beforeEach(() => { + createComponent({ enableFeatureFlag: true }); + }); + + it('renders environments passed down to it', async () => { + await findListbox().vm.$emit('search', currentEnv); + + expect(findAllListboxItems()).toHaveLength(envs.length); + }); + + it('emits event when searching', async () => { + expect(wrapper.emitted('search-environment-scope')).toHaveLength(1); + + await findListbox().vm.$emit('search', currentEnv); + + expect(wrapper.emitted('search-environment-scope')).toHaveLength(2); + expect(wrapper.emitted('search-environment-scope')[1]).toEqual([currentEnv]); + }); + + it('renders loading icon while search query is loading', () => { + createComponent({ enableFeatureFlag: true, props: { areEnvironmentsLoading: true } }); + + expect(findListbox().props('searching')).toBe(true); + }); + + it('displays note about max environments shown', () => { + expect(findMaxEnvNote().exists()).toBe(true); + expect(findMaxEnvNote().text()).toContain(String(ENVIRONMENT_QUERY_LIMIT)); + }); + }); + + describe('Custom events', () => { + describe('when selecting an environment', () => { + const itemIndex = 0; + + beforeEach(() => { + createComponent(); }); - describe('when creating a new environment from a search term', () => { - const search = 'new-env'; - beforeEach(() => { - createComponent({ searchTerm: search }); - }); + it('emits `select-environment` when an environment is clicked', () => { + findListbox().vm.$emit('select', envs[itemIndex]); - it('emits create-environment-scope', () => { - findCreateWildcardButton().vm.$emit('click'); - expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]); - }); + expect(wrapper.emitted('select-environment')).toEqual([[envs[itemIndex]]]); + }); + }); + + describe('when creating a new environment from a search term', () => { + const search = 'new-env'; + beforeEach(() => { + createComponent({ searchTerm: search }); + }); + + it('emits create-environment-scope', () => { + findCreateWildcardButton().vm.$emit('click'); + + expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]); }); }); }); diff --git a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js index 77d90a7667d..7436210fe70 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js @@ -4,6 +4,15 @@ import { convertToGraphQLId } from '~/graphql_shared/utils'; import ciGroupVariables from '~/ci/ci_variable_list/components/ci_group_variables.vue'; import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue'; +import { + ADD_MUTATION_ACTION, + DELETE_MUTATION_ACTION, + UPDATE_MUTATION_ACTION, +} from '~/ci/ci_variable_list/constants'; +import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql'; +import addGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_add_variable.mutation.graphql'; +import deleteGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_delete_variable.mutation.graphql'; +import updateGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_update_variable.mutation.graphql'; const mockProvide = { glFeatures: { @@ -37,8 +46,17 @@ describe('Ci Group Variable wrapper', () => { entity: 'group', fullPath: mockProvide.groupPath, hideEnvironmentScope: false, - mutationData: wrapper.vm.$options.mutationData, - queryData: wrapper.vm.$options.queryData, + mutationData: { + [ADD_MUTATION_ACTION]: addGroupVariable, + [UPDATE_MUTATION_ACTION]: updateGroupVariable, + [DELETE_MUTATION_ACTION]: deleteGroupVariable, + }, + queryData: { + ciVariables: { + lookup: expect.any(Function), + query: getGroupVariables, + }, + }, refetchAfterMutation: false, }); }); diff --git a/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js index ce5237a84f7..69b0d4261b2 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js @@ -4,6 +4,16 @@ import { convertToGraphQLId } from '~/graphql_shared/utils'; import ciProjectVariables from '~/ci/ci_variable_list/components/ci_project_variables.vue'; import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue'; +import { + ADD_MUTATION_ACTION, + DELETE_MUTATION_ACTION, + UPDATE_MUTATION_ACTION, +} from '~/ci/ci_variable_list/constants'; +import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql'; +import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql'; +import addProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_add_variable.mutation.graphql'; +import deleteProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_delete_variable.mutation.graphql'; +import updateProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_update_variable.mutation.graphql'; const mockProvide = { projectFullPath: '/namespace/project', @@ -33,8 +43,21 @@ describe('Ci Project Variable wrapper', () => { entity: 'project', fullPath: mockProvide.projectFullPath, hideEnvironmentScope: false, - mutationData: wrapper.vm.$options.mutationData, - queryData: wrapper.vm.$options.queryData, + mutationData: { + [ADD_MUTATION_ACTION]: addProjectVariable, + [UPDATE_MUTATION_ACTION]: updateProjectVariable, + [DELETE_MUTATION_ACTION]: deleteProjectVariable, + }, + queryData: { + ciVariables: { + lookup: expect.any(Function), + query: getProjectVariables, + }, + environments: { + lookup: expect.any(Function), + query: getProjectEnvironments, + }, + }, refetchAfterMutation: false, }); }); diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js index 8f3fccc2804..e8bfb370fb4 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js @@ -10,10 +10,12 @@ import { EVENT_LABEL, EVENT_ACTION, ENVIRONMENT_SCOPE_LINK_TITLE, + groupString, instanceString, + projectString, variableOptions, } from '~/ci/ci_variable_list/constants'; -import { mockVariablesWithScopes } from '../mocks'; +import { mockEnvs, mockVariablesWithScopes, mockVariablesWithUniqueScopes } from '../mocks'; import ModalStub from '../stubs'; describe('Ci variable modal', () => { @@ -42,12 +44,13 @@ describe('Ci variable modal', () => { }; const defaultProps = { + areEnvironmentsLoading: false, areScopedVariablesAvailable: true, environments: [], hideEnvironmentScope: false, mode: ADD_VARIABLE_ACTION, selectedVariable: {}, - variable: [], + variables: [], }; const createComponent = ({ mountFn = shallowMountExtended, props = {}, provide = {} } = {}) => { @@ -111,7 +114,6 @@ describe('Ci variable modal', () => { beforeEach(() => { createComponent({ props: { selectedVariable: currentVariable } }); - jest.spyOn(wrapper.vm, '$emit'); }); it('Dispatches `add-variable` action on submit', () => { @@ -152,7 +154,7 @@ describe('Ci variable modal', () => { findModal().vm.$emit('shown'); }); - it('keeps the value as false', async () => { + it('keeps the value as false', () => { expect( findProtectedVariableCheckbox().attributes('data-is-protected-checked'), ).toBeUndefined(); @@ -237,7 +239,6 @@ describe('Ci variable modal', () => { it('defaults to expanded and raw:false when adding a variable', () => { createComponent({ props: { selectedVariable: variable } }); - jest.spyOn(wrapper.vm, '$emit'); findModal().vm.$emit('shown'); @@ -262,7 +263,6 @@ describe('Ci variable modal', () => { mode: EDIT_VARIABLE_ACTION, }, }); - jest.spyOn(wrapper.vm, '$emit'); findModal().vm.$emit('shown'); await findExpandedVariableCheckbox().vm.$emit('change'); @@ -301,7 +301,6 @@ describe('Ci variable modal', () => { beforeEach(() => { createComponent({ props: { selectedVariable: variable, mode: EDIT_VARIABLE_ACTION } }); - jest.spyOn(wrapper.vm, '$emit'); }); it('button text is Update variable when updating', () => { @@ -349,6 +348,42 @@ describe('Ci variable modal', () => { expect(link.attributes('title')).toBe(ENVIRONMENT_SCOPE_LINK_TITLE); expect(link.attributes('href')).toBe(defaultProvide.environmentScopeLink); }); + + describe('when feature flag is enabled', () => { + beforeEach(() => { + createComponent({ + props: { + environments: mockEnvs, + variables: mockVariablesWithUniqueScopes(projectString), + }, + provide: { glFeatures: { ciLimitEnvironmentScope: true } }, + }); + }); + + it('does not merge environment scope sources', () => { + const expectedLength = mockEnvs.length; + + expect(findCiEnvironmentsDropdown().props('environments')).toHaveLength(expectedLength); + }); + }); + + describe('when feature flag is disabled', () => { + const mockGroupVariables = mockVariablesWithUniqueScopes(groupString); + beforeEach(() => { + createComponent({ + props: { + environments: mockEnvs, + variables: mockGroupVariables, + }, + }); + }); + + it('merges environment scope sources', () => { + const expectedLength = mockGroupVariables.length + mockEnvs.length; + + expect(findCiEnvironmentsDropdown().props('environments')).toHaveLength(expectedLength); + }); + }); }); describe('and section is hidden', () => { diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js index 0141232a299..12ca9a78369 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js @@ -1,4 +1,3 @@ -import { nextTick } from 'vue'; import { shallowMount } from '@vue/test-utils'; import CiVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue'; import ciVariableModal from '~/ci/ci_variable_list/components/ci_variable_modal.vue'; @@ -16,6 +15,7 @@ describe('Ci variable table', () => { let wrapper; const defaultProps = { + areEnvironmentsLoading: false, areScopedVariablesAvailable: true, entity: 'project', environments: mapEnvironmentNames(mockEnvs), @@ -54,10 +54,10 @@ describe('Ci variable table', () => { it('passes props down correctly to the ci modal', async () => { createComponent(); - findCiVariableTable().vm.$emit('set-selected-variable'); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable'); expect(findCiVariableModal().props()).toEqual({ + areEnvironmentsLoading: defaultProps.areEnvironmentsLoading, areScopedVariablesAvailable: defaultProps.areScopedVariablesAvailable, environments: defaultProps.environments, hideEnvironmentScope: defaultProps.hideEnvironmentScope, @@ -74,15 +74,13 @@ describe('Ci variable table', () => { }); it('passes down ADD mode when receiving an empty variable', async () => { - findCiVariableTable().vm.$emit('set-selected-variable'); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable'); expect(findCiVariableModal().props('mode')).toBe(ADD_VARIABLE_ACTION); }); it('passes down EDIT mode when receiving a variable', async () => { - findCiVariableTable().vm.$emit('set-selected-variable', newVariable); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable', newVariable); expect(findCiVariableModal().props('mode')).toBe(EDIT_VARIABLE_ACTION); }); @@ -98,25 +96,21 @@ describe('Ci variable table', () => { }); it('shows modal when adding a new variable', async () => { - findCiVariableTable().vm.$emit('set-selected-variable'); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable'); expect(findCiVariableModal().exists()).toBe(true); }); it('shows modal when updating a variable', async () => { - findCiVariableTable().vm.$emit('set-selected-variable', newVariable); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable', newVariable); expect(findCiVariableModal().exists()).toBe(true); }); it('hides modal when receiving the event from the modal', async () => { - findCiVariableTable().vm.$emit('set-selected-variable'); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable'); - findCiVariableModal().vm.$emit('hideModal'); - await nextTick(); + await findCiVariableModal().vm.$emit('hideModal'); expect(findCiVariableModal().exists()).toBe(false); }); @@ -133,11 +127,9 @@ describe('Ci variable table', () => { ${'update-variable'} ${'delete-variable'} `('bubbles up the $eventName event', async ({ eventName }) => { - findCiVariableTable().vm.$emit('set-selected-variable'); - await nextTick(); + await findCiVariableTable().vm.$emit('set-selected-variable'); - findCiVariableModal().vm.$emit(eventName, newVariable); - await nextTick(); + await findCiVariableModal().vm.$emit(eventName, newVariable); expect(wrapper.emitted(eventName)).toEqual([[newVariable]]); }); @@ -154,10 +146,23 @@ describe('Ci variable table', () => { ${'handle-next-page'} | ${undefined} ${'sort-changed'} | ${{ sortDesc: true }} `('bubbles up the $eventName event', async ({ args, eventName }) => { - findCiVariableTable().vm.$emit(eventName, args); - await nextTick(); + await findCiVariableTable().vm.$emit(eventName, args); expect(wrapper.emitted(eventName)).toEqual([[args]]); }); }); + + describe('environment events', () => { + beforeEach(() => { + createComponent(); + }); + + it('bubbles up the search event', async () => { + await findCiVariableTable().vm.$emit('set-selected-variable'); + + await findCiVariableModal().vm.$emit('search-environment-scope', 'staging'); + + expect(wrapper.emitted('search-environment-scope')).toEqual([['staging']]); + }); + }); }); diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js index 87192006efc..a25d325f7a1 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js @@ -1,13 +1,12 @@ -import Vue, { nextTick } from 'vue'; +import Vue from 'vue'; import VueApollo from 'vue-apollo'; import { GlLoadingIcon, GlTable } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; +import { assertProps } from 'helpers/assert_props'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { createAlert } from '~/alert'; import { resolvers } from '~/ci/ci_variable_list/graphql/settings'; -import { TYPENAME_GROUP } from '~/graphql_shared/constants'; -import { convertToGraphQLId } from '~/graphql_shared/utils'; import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue'; import ciVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue'; @@ -18,12 +17,11 @@ import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_varia import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql'; import { - ADD_MUTATION_ACTION, - DELETE_MUTATION_ACTION, - UPDATE_MUTATION_ACTION, + ENVIRONMENT_QUERY_LIMIT, environmentFetchErrorText, genericMutationErrorText, variableFetchErrorText, + mapMutationActionToToast, } from '~/ci/ci_variable_list/constants'; import { @@ -63,15 +61,22 @@ describe('Ci Variable Shared Component', () => { let mockApollo; let mockEnvironments; + let mockMutation; + let mockAddMutation; + let mockUpdateMutation; + let mockDeleteMutation; let mockVariables; + const mockToastShow = jest.fn(); + const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); const findCiTable = () => wrapper.findComponent(GlTable); const findCiSettings = () => wrapper.findComponent(ciVariableSettings); // eslint-disable-next-line consistent-return - async function createComponentWithApollo({ + function createComponentWithApollo({ customHandlers = null, + customResolvers = null, isLoading = false, props = { ...createProjectProps() }, provide = {}, @@ -81,7 +86,9 @@ describe('Ci Variable Shared Component', () => { [getProjectVariables, mockVariables], ]; - mockApollo = createMockApollo(handlers, resolvers); + const mutationResolvers = customResolvers || resolvers; + + mockApollo = createMockApollo(handlers, mutationResolvers); wrapper = shallowMount(ciVariableShared, { propsData: { @@ -94,6 +101,11 @@ describe('Ci Variable Shared Component', () => { }, apolloProvider: mockApollo, stubs: { ciVariableSettings, ciVariableTable }, + mocks: { + $toast: { + show: mockToastShow, + }, + }, }); if (!isLoading) { @@ -104,6 +116,10 @@ describe('Ci Variable Shared Component', () => { beforeEach(() => { mockEnvironments = jest.fn(); mockVariables = jest.fn(); + mockMutation = jest.fn(); + mockAddMutation = jest.fn(); + mockUpdateMutation = jest.fn(); + mockDeleteMutation = jest.fn(); }); describe.each` @@ -111,11 +127,11 @@ describe('Ci Variable Shared Component', () => { ${true} | ${'enabled'} ${false} | ${'disabled'} `('When Pages FF is $text', ({ isVariablePagesEnabled }) => { - const featureFlagProvide = isVariablePagesEnabled + const pagesFeatureFlagProvide = isVariablePagesEnabled ? { glFeatures: { ciVariablesPages: true } } : {}; - describe('while queries are being fetch', () => { + describe('while queries are being fetched', () => { beforeEach(() => { createComponentWithApollo({ isLoading: true }); }); @@ -133,7 +149,7 @@ describe('Ci Variable Shared Component', () => { mockVariables.mockResolvedValue(mockProjectVariables); await createComponentWithApollo({ - provide: { ...createProjectProvide(), ...featureFlagProvide }, + provide: { ...createProjectProvide(), ...pagesFeatureFlagProvide }, }); }); @@ -163,7 +179,7 @@ describe('Ci Variable Shared Component', () => { mockEnvironments.mockResolvedValue(mockProjectEnvironments); mockVariables.mockRejectedValue(); - await createComponentWithApollo({ provide: featureFlagProvide }); + await createComponentWithApollo({ provide: pagesFeatureFlagProvide }); }); it('calls createAlert with the expected error message', () => { @@ -176,7 +192,7 @@ describe('Ci Variable Shared Component', () => { mockEnvironments.mockRejectedValue(); mockVariables.mockResolvedValue(mockProjectVariables); - await createComponentWithApollo({ provide: featureFlagProvide }); + await createComponentWithApollo({ provide: pagesFeatureFlagProvide }); }); it('calls createAlert with the expected error message', () => { @@ -187,134 +203,283 @@ describe('Ci Variable Shared Component', () => { describe('environment query', () => { describe('when there is an environment key in queryData', () => { - beforeEach(async () => { + beforeEach(() => { mockEnvironments.mockResolvedValue(mockProjectEnvironments); + mockVariables.mockResolvedValue(mockProjectVariables); + }); + it('environments are fetched', async () => { await createComponentWithApollo({ props: { ...createProjectProps() }, - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); + + expect(mockEnvironments).toHaveBeenCalled(); }); - it('is executed', () => { - expect(mockVariables).toHaveBeenCalled(); + describe('when Limit Environment Scope FF is enabled', () => { + beforeEach(async () => { + await createComponentWithApollo({ + props: { ...createProjectProps() }, + provide: { + glFeatures: { + ciLimitEnvironmentScope: true, + ciVariablesPages: isVariablePagesEnabled, + }, + }, + }); + }); + + it('initial query is called with the correct variables', () => { + expect(mockEnvironments).toHaveBeenCalledWith({ + first: ENVIRONMENT_QUERY_LIMIT, + fullPath: '/namespace/project/', + search: '', + }); + }); + + it(`refetches environments when search term is present`, async () => { + expect(mockEnvironments).toHaveBeenCalledTimes(1); + expect(mockEnvironments).toHaveBeenCalledWith(expect.objectContaining({ search: '' })); + + await findCiSettings().vm.$emit('search-environment-scope', 'staging'); + + expect(mockEnvironments).toHaveBeenCalledTimes(2); + expect(mockEnvironments).toHaveBeenCalledWith( + expect.objectContaining({ search: 'staging' }), + ); + }); + }); + + describe('when Limit Environment Scope FF is disabled', () => { + beforeEach(async () => { + await createComponentWithApollo({ + props: { ...createProjectProps() }, + provide: pagesFeatureFlagProvide, + }); + }); + + it('initial query is called with the correct variables', () => { + expect(mockEnvironments).toHaveBeenCalledWith({ fullPath: '/namespace/project/' }); + }); + + it(`does not refetch environments when search term is present`, async () => { + expect(mockEnvironments).toHaveBeenCalledTimes(1); + + await findCiSettings().vm.$emit('search-environment-scope', 'staging'); + + expect(mockEnvironments).toHaveBeenCalledTimes(1); + }); }); }); - describe('when there isnt an environment key in queryData', () => { + describe("when there isn't an environment key in queryData", () => { beforeEach(async () => { mockVariables.mockResolvedValue(mockGroupVariables); await createComponentWithApollo({ props: { ...createGroupProps() }, - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); }); - it('is skipped', () => { - expect(mockVariables).not.toHaveBeenCalled(); + it('fetching environments is skipped', () => { + expect(mockEnvironments).not.toHaveBeenCalled(); }); }); }); describe('mutations', () => { const groupProps = createGroupProps(); + const instanceProps = createInstanceProps(); + const projectProps = createProjectProps(); - beforeEach(async () => { - mockVariables.mockResolvedValue(mockGroupVariables); + let mockMutationMap; - await createComponentWithApollo({ - customHandlers: [[getGroupVariables, mockVariables]], - props: groupProps, - provide: featureFlagProvide, - }); - }); - it.each` - actionName | mutation | event - ${'add'} | ${groupProps.mutationData[ADD_MUTATION_ACTION]} | ${'add-variable'} - ${'update'} | ${groupProps.mutationData[UPDATE_MUTATION_ACTION]} | ${'update-variable'} - ${'delete'} | ${groupProps.mutationData[DELETE_MUTATION_ACTION]} | ${'delete-variable'} - `( - 'calls the right mutation from propsData when user performs $actionName variable', - async ({ event, mutation }) => { - jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(); - - await findCiSettings().vm.$emit(event, newVariable); + describe('error handling and feedback', () => { + beforeEach(async () => { + mockVariables.mockResolvedValue(mockGroupVariables); + mockMutation.mockResolvedValue({ ...mockGroupVariables.data, errors: [] }); - expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({ - mutation, - variables: { - endpoint: mockProvide.endpoint, - fullPath: groupProps.fullPath, - id: convertToGraphQLId(TYPENAME_GROUP, groupProps.id), - variable: newVariable, + await createComponentWithApollo({ + customHandlers: [[getGroupVariables, mockVariables]], + customResolvers: { + Mutation: { + ...resolvers.Mutation, + addGroupVariable: mockMutation, + updateGroupVariable: mockMutation, + deleteGroupVariable: mockMutation, + }, }, + props: groupProps, + provide: pagesFeatureFlagProvide, }); - }, - ); + }); - it.each` - actionName | event - ${'add'} | ${'add-variable'} - ${'update'} | ${'update-variable'} - ${'delete'} | ${'delete-variable'} - `( - 'throws with the specific graphql error if present when user performs $actionName variable', - async ({ event }) => { - const graphQLErrorMessage = 'There is a problem with this graphQL action'; - jest - .spyOn(wrapper.vm.$apollo, 'mutate') - .mockResolvedValue({ data: { ciVariableMutation: { errors: [graphQLErrorMessage] } } }); - await findCiSettings().vm.$emit(event, newVariable); - await nextTick(); - - expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled(); - expect(createAlert).toHaveBeenCalledWith({ message: graphQLErrorMessage }); - }, - ); + it.each` + actionName | event + ${'add'} | ${'add-variable'} + ${'update'} | ${'update-variable'} + ${'delete'} | ${'delete-variable'} + `( + 'throws the specific graphql error if present when user performs $actionName variable', + async ({ event }) => { + const graphQLErrorMessage = 'There is a problem with this graphQL action'; + mockMutation.mockResolvedValue({ + ...mockGroupVariables.data, + errors: [graphQLErrorMessage], + }); + + await findCiSettings().vm.$emit(event, newVariable); + await waitForPromises(); - it.each` - actionName | event - ${'add'} | ${'add-variable'} - ${'update'} | ${'update-variable'} - ${'delete'} | ${'delete-variable'} + expect(mockMutation).toHaveBeenCalled(); + expect(createAlert).toHaveBeenCalledWith({ message: graphQLErrorMessage }); + }, + ); + + it.each` + actionName | event + ${'add'} | ${'add-variable'} + ${'update'} | ${'update-variable'} + ${'delete'} | ${'delete-variable'} + `( + 'throws generic error on failure with no graphql errors and user performs $actionName variable', + async ({ event }) => { + mockMutation.mockRejectedValue(); + + await findCiSettings().vm.$emit(event, newVariable); + await waitForPromises(); + + expect(mockMutation).toHaveBeenCalled(); + expect(createAlert).toHaveBeenCalledWith({ message: genericMutationErrorText }); + }, + ); + + it.each` + actionName | event + ${'add'} | ${'add-variable'} + ${'update'} | ${'update-variable'} + ${'delete'} | ${'delete-variable'} + `( + 'displays toast message after user performs $actionName variable', + async ({ actionName, event }) => { + await findCiSettings().vm.$emit(event, newVariable); + await waitForPromises(); + + expect(mockMutation).toHaveBeenCalled(); + expect(mockToastShow).toHaveBeenCalledWith( + mapMutationActionToToast[actionName](newVariable.key), + ); + }, + ); + }); + + const setupMockMutations = (mockResolvedMutation) => { + mockAddMutation.mockResolvedValue(mockResolvedMutation); + mockUpdateMutation.mockResolvedValue(mockResolvedMutation); + mockDeleteMutation.mockResolvedValue(mockResolvedMutation); + + return { + add: mockAddMutation, + update: mockUpdateMutation, + delete: mockDeleteMutation, + }; + }; + + describe.each` + scope | mockVariablesResolvedValue | getVariablesHandler | addMutationName | updateMutationName | deleteMutationName | props + ${'instance'} | ${mockVariables} | ${getAdminVariables} | ${'addAdminVariable'} | ${'updateAdminVariable'} | ${'deleteAdminVariable'} | ${instanceProps} + ${'group'} | ${mockGroupVariables} | ${getGroupVariables} | ${'addGroupVariable'} | ${'updateGroupVariable'} | ${'deleteGroupVariable'} | ${groupProps} + ${'project'} | ${mockProjectVariables} | ${getProjectVariables} | ${'addProjectVariable'} | ${'updateProjectVariable'} | ${'deleteProjectVariable'} | ${projectProps} `( - 'throws generic error on failure with no graphql errors and user performs $actionName variable', - async ({ event }) => { - jest.spyOn(wrapper.vm.$apollo, 'mutate').mockImplementationOnce(() => { - throw new Error(); + '$scope variable mutations', + ({ + addMutationName, + deleteMutationName, + getVariablesHandler, + mockVariablesResolvedValue, + updateMutationName, + props, + }) => { + beforeEach(async () => { + mockVariables.mockResolvedValue(mockVariablesResolvedValue); + mockMutationMap = setupMockMutations({ ...mockVariables.data, errors: [] }); + + await createComponentWithApollo({ + customHandlers: [[getVariablesHandler, mockVariables]], + customResolvers: { + Mutation: { + ...resolvers.Mutation, + [addMutationName]: mockAddMutation, + [updateMutationName]: mockUpdateMutation, + [deleteMutationName]: mockDeleteMutation, + }, + }, + props, + provide: pagesFeatureFlagProvide, + }); }); - await findCiSettings().vm.$emit(event, newVariable); - expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled(); - expect(createAlert).toHaveBeenCalledWith({ message: genericMutationErrorText }); + it.each` + actionName | event + ${'add'} | ${'add-variable'} + ${'update'} | ${'update-variable'} + ${'delete'} | ${'delete-variable'} + `( + 'calls the right mutation when user performs $actionName variable', + async ({ event, actionName }) => { + await findCiSettings().vm.$emit(event, newVariable); + await waitForPromises(); + + expect(mockMutationMap[actionName]).toHaveBeenCalledWith( + expect.anything(), + { + endpoint: mockProvide.endpoint, + fullPath: props.fullPath, + id: props.id, + variable: newVariable, + }, + expect.anything(), + expect.anything(), + ); + }, + ); }, ); describe('without fullpath and ID props', () => { beforeEach(async () => { + mockMutation.mockResolvedValue({ ...mockAdminVariables.data, errors: [] }); mockVariables.mockResolvedValue(mockAdminVariables); await createComponentWithApollo({ customHandlers: [[getAdminVariables, mockVariables]], + customResolvers: { + Mutation: { + ...resolvers.Mutation, + addAdminVariable: mockMutation, + }, + }, props: createInstanceProps(), - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); }); it('does not pass fullPath and ID to the mutation', async () => { - jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(); - await findCiSettings().vm.$emit('add-variable', newVariable); + await waitForPromises(); - expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({ - mutation: wrapper.props().mutationData[ADD_MUTATION_ACTION], - variables: { + expect(mockMutation).toHaveBeenCalledWith( + expect.anything(), + { endpoint: mockProvide.endpoint, variable: newVariable, }, - }); + expect.anything(), + expect.anything(), + ); }); }); }); @@ -359,10 +524,11 @@ describe('Ci Variable Shared Component', () => { await createComponentWithApollo({ customHandlers, props, - provide: { ...provide, ...featureFlagProvide }, + provide: { ...provide, ...pagesFeatureFlagProvide }, }); expect(findCiSettings().props()).toEqual({ + areEnvironmentsLoading: false, areScopedVariablesAvailable: wrapper.props().areScopedVariablesAvailable, hideEnvironmentScope: defaultProps.hideEnvironmentScope, pageInfo: defaultProps.pageInfo, @@ -379,29 +545,29 @@ describe('Ci Variable Shared Component', () => { describe('refetchAfterMutation', () => { it.each` - bool | text - ${true} | ${'refetches the variables'} - ${false} | ${'does not refetch the variables'} - `('when $bool it $text', async ({ bool }) => { + bool | text | timesQueryCalled + ${true} | ${'refetches the variables'} | ${2} + ${false} | ${'does not refetch the variables'} | ${1} + `('when $bool it $text', async ({ bool, timesQueryCalled }) => { + mockMutation.mockResolvedValue({ ...mockAdminVariables.data, errors: [] }); + mockVariables.mockResolvedValue(mockAdminVariables); + await createComponentWithApollo({ + customHandlers: [[getAdminVariables, mockVariables]], + customResolvers: { + Mutation: { + ...resolvers.Mutation, + addAdminVariable: mockMutation, + }, + }, props: { ...createInstanceProps(), refetchAfterMutation: bool }, - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); - jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({ data: {} }); - jest - .spyOn(wrapper.vm.$apollo.queries.ciVariables, 'refetch') - .mockImplementation(jest.fn()); - await findCiSettings().vm.$emit('add-variable', newVariable); + await waitForPromises(); - await nextTick(); - - if (bool) { - expect(wrapper.vm.$apollo.queries.ciVariables.refetch).toHaveBeenCalled(); - } else { - expect(wrapper.vm.$apollo.queries.ciVariables.refetch).not.toHaveBeenCalled(); - } + expect(mockVariables).toHaveBeenCalledTimes(timesQueryCalled); }); }); @@ -409,7 +575,7 @@ describe('Ci Variable Shared Component', () => { describe('queryData', () => { let error; - beforeEach(async () => { + beforeEach(() => { mockVariables.mockResolvedValue(mockGroupVariables); }); @@ -418,7 +584,7 @@ describe('Ci Variable Shared Component', () => { await createComponentWithApollo({ customHandlers: [[getGroupVariables, mockVariables]], props: { ...createGroupProps() }, - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); } catch (e) { error = e; @@ -428,26 +594,21 @@ describe('Ci Variable Shared Component', () => { } }); - it('will not mount component with wrong data', async () => { - try { - await createComponentWithApollo({ - customHandlers: [[getGroupVariables, mockVariables]], - props: { ...createGroupProps(), queryData: { wrongKey: {} } }, - provide: featureFlagProvide, - }); - } catch (e) { - error = e; - } finally { - expect(wrapper.exists()).toBe(false); - expect(error.toString()).toContain('custom validator check failed for prop'); - } + it('report custom validator error on wrong data', () => { + expect(() => + assertProps( + ciVariableShared, + { ...defaultProps, ...createGroupProps(), queryData: { wrongKey: {} } }, + { provide: mockProvide }, + ), + ).toThrow('custom validator check failed for prop'); }); }); describe('mutationData', () => { let error; - beforeEach(async () => { + beforeEach(() => { mockVariables.mockResolvedValue(mockGroupVariables); }); @@ -455,7 +616,7 @@ describe('Ci Variable Shared Component', () => { try { await createComponentWithApollo({ props: { ...createGroupProps() }, - provide: featureFlagProvide, + provide: pagesFeatureFlagProvide, }); } catch (e) { error = e; @@ -465,18 +626,14 @@ describe('Ci Variable Shared Component', () => { } }); - it('will not mount component with wrong data', async () => { - try { - await createComponentWithApollo({ - props: { ...createGroupProps(), mutationData: { wrongKey: {} } }, - provide: featureFlagProvide, - }); - } catch (e) { - error = e; - } finally { - expect(wrapper.exists()).toBe(false); - expect(error.toString()).toContain('custom validator check failed for prop'); - } + it('report custom validator error on wrong data', () => { + expect(() => + assertProps( + ciVariableShared, + { ...defaultProps, ...createGroupProps(), mutationData: { wrongKey: {} } }, + { provide: { ...mockProvide, ...pagesFeatureFlagProvide } }, + ), + ).toThrow('custom validator check failed for prop'); }); }); }); diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js index 2ef789e89c3..0b28cb06cec 100644 --- a/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js +++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js @@ -82,11 +82,11 @@ describe('Ci variable table', () => { expect(findRevealButton().exists()).toBe(true); }); - it('displays the correct amount of variables', async () => { + it('displays the correct amount of variables', () => { expect(wrapper.findAll('.js-ci-variable-row')).toHaveLength(defaultProps.variables.length); }); - it('displays the correct variable options', async () => { + it('displays the correct variable options', () => { expect(findOptionsValues(0)).toBe('Protected, Expanded'); expect(findOptionsValues(1)).toBe('Masked'); }); diff --git a/spec/frontend/ci/ci_variable_list/mocks.js b/spec/frontend/ci/ci_variable_list/mocks.js index 4da4f53f69f..f9450803308 100644 --- a/spec/frontend/ci/ci_variable_list/mocks.js +++ b/spec/frontend/ci/ci_variable_list/mocks.js @@ -56,6 +56,11 @@ export const mockVariablesWithScopes = (kind) => return { ...variable, environmentScope: '*' }; }); +export const mockVariablesWithUniqueScopes = (kind) => + mockVariables(kind).map((variable) => { + return { ...variable, environmentScope: variable.value }; + }); + const createDefaultVars = ({ withScope = true, kind } = {}) => { let base = mockVariables(kind); diff --git a/spec/frontend/ci/pipeline_editor/components/commit/commit_form_spec.js b/spec/frontend/ci/pipeline_editor/components/commit/commit_form_spec.js index b2dfa900b1d..03f346181e4 100644 --- a/spec/frontend/ci/pipeline_editor/components/commit/commit_form_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/commit/commit_form_spec.js @@ -34,7 +34,7 @@ describe('Pipeline Editor | Commit Form', () => { const findCancelBtn = () => wrapper.find('[type="reset"]'); describe('when the form is displayed', () => { - beforeEach(async () => { + beforeEach(() => { createComponent(); }); @@ -57,7 +57,7 @@ describe('Pipeline Editor | Commit Form', () => { }); describe('when buttons are clicked', () => { - beforeEach(async () => { + beforeEach(() => { createComponent({}, mount); }); diff --git a/spec/frontend/ci/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js b/spec/frontend/ci/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js index 5399924b462..0296ab5a65c 100644 --- a/spec/frontend/ci/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js @@ -68,7 +68,7 @@ describe('Pipeline config reference card', () => { }); }; - it('tracks help page links', async () => { + it('tracks help page links', () => { const { CI_EXAMPLES_LINK, CI_HELP_LINK, diff --git a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js index 560e8840d57..2861fc35342 100644 --- a/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/editor/ci_editor_header_spec.js @@ -58,7 +58,7 @@ describe('CI Editor Header', () => { expect(findLinkBtn().props('icon')).toBe('external-link'); }); - it('tracks the click on the browse button', async () => { + it('tracks the click on the browse button', () => { const { browseTemplates } = pipelineEditorTrackingOptions.actions; testTracker(findLinkBtn(), browseTemplates); @@ -91,7 +91,7 @@ describe('CI Editor Header', () => { expect(wrapper.emitted('open-drawer')).toHaveLength(1); }); - it('tracks open help drawer action', async () => { + it('tracks open help drawer action', () => { const { actions } = pipelineEditorTrackingOptions; testTracker(findHelpBtn(), actions.openHelpDrawer); diff --git a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js index bf14f4c4cd6..3a99949413b 100644 --- a/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/file-nav/branch_switcher_spec.js @@ -288,7 +288,7 @@ describe('Pipeline editor branch switcher', () => { }); describe('with a search term', () => { - beforeEach(async () => { + beforeEach(() => { mockAvailableBranchQuery.mockResolvedValue(mockSearchBranches); }); diff --git a/spec/frontend/ci/pipeline_editor/components/file-tree/container_spec.js b/spec/frontend/ci/pipeline_editor/components/file-tree/container_spec.js index 306dd78d395..f2effcb2966 100644 --- a/spec/frontend/ci/pipeline_editor/components/file-tree/container_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/file-tree/container_spec.js @@ -60,11 +60,11 @@ describe('Pipeline editor file nav', () => { expect(fileTreeItems().exists()).toBe(false); }); - it('renders alert tip', async () => { + it('renders alert tip', () => { expect(findTip().exists()).toBe(true); }); - it('renders learn more link', async () => { + it('renders learn more link', () => { expect(findTip().props('secondaryButtonLink')).toBe(mockIncludesHelpPagePath); }); @@ -87,7 +87,7 @@ describe('Pipeline editor file nav', () => { }); }); - it('does not render alert tip', async () => { + it('does not render alert tip', () => { expect(findTip().exists()).toBe(false); }); }); diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js index 7bf955012c7..b8526e569ec 100644 --- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_editor_mini_graph_spec.js @@ -96,7 +96,7 @@ describe('Pipeline Status', () => { await waitForPromises(); }); - it('should emit an error event when query fails', async () => { + it('should emit an error event when query fails', () => { expect(wrapper.emitted('showError')).toHaveLength(1); expect(wrapper.emitted('showError')[0]).toEqual([ { diff --git a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js index 3faa2890254..8ca88472bf1 100644 --- a/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/header/pipeline_status_spec.js @@ -77,7 +77,7 @@ describe('Pipeline Status', () => { await waitForPromises(); }); - it('query is called with correct variables', async () => { + it('query is called with correct variables', () => { expect(mockPipelineQuery).toHaveBeenCalledTimes(1); expect(mockPipelineQuery).toHaveBeenCalledWith({ fullPath: mockProjectFullPath, diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js new file mode 100644 index 00000000000..9046be4a45e --- /dev/null +++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item_spec.js @@ -0,0 +1,127 @@ +import ArtifactsAndCacheItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item.vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { JOB_TEMPLATE } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants'; + +describe('Artifacts and cache item', () => { + let wrapper; + + const findArtifactsPathsInputByIndex = (index) => + wrapper.findByTestId(`artifacts-paths-input-${index}`); + const findArtifactsExcludeInputByIndex = (index) => + wrapper.findByTestId(`artifacts-exclude-input-${index}`); + const findCachePathsInputByIndex = (index) => wrapper.findByTestId(`cache-paths-input-${index}`); + const findCacheKeyInput = () => wrapper.findByTestId('cache-key-input'); + const findDeleteArtifactsPathsButtonByIndex = (index) => + wrapper.findByTestId(`delete-artifacts-paths-button-${index}`); + const findDeleteArtifactsExcludeButtonByIndex = (index) => + wrapper.findByTestId(`delete-artifacts-exclude-button-${index}`); + const findDeleteCachePathsButtonByIndex = (index) => + wrapper.findByTestId(`delete-cache-paths-button-${index}`); + const findAddArtifactsPathsButton = () => wrapper.findByTestId('add-artifacts-paths-button'); + const findAddArtifactsExcludeButton = () => wrapper.findByTestId('add-artifacts-exclude-button'); + const findAddCachePathsButton = () => wrapper.findByTestId('add-cache-paths-button'); + + const dummyArtifactsPath = 'dummyArtifactsPath'; + const dummyArtifactsExclude = 'dummyArtifactsExclude'; + const dummyCachePath = 'dummyCachePath'; + const dummyCacheKey = 'dummyCacheKey'; + + const createComponent = ({ job = JSON.parse(JSON.stringify(JOB_TEMPLATE)) } = {}) => { + wrapper = shallowMountExtended(ArtifactsAndCacheItem, { + propsData: { + job, + }, + }); + }; + + it('should emit update job event when filling inputs', () => { + createComponent(); + + expect(wrapper.emitted('update-job')).toBeUndefined(); + + findArtifactsPathsInputByIndex(0).vm.$emit('input', dummyArtifactsPath); + + expect(wrapper.emitted('update-job')).toHaveLength(1); + expect(wrapper.emitted('update-job')[0]).toStrictEqual([ + 'artifacts.paths[0]', + dummyArtifactsPath, + ]); + + findArtifactsExcludeInputByIndex(0).vm.$emit('input', dummyArtifactsExclude); + + expect(wrapper.emitted('update-job')).toHaveLength(2); + expect(wrapper.emitted('update-job')[1]).toStrictEqual([ + 'artifacts.exclude[0]', + dummyArtifactsExclude, + ]); + + findCachePathsInputByIndex(0).vm.$emit('input', dummyCachePath); + + expect(wrapper.emitted('update-job')).toHaveLength(3); + expect(wrapper.emitted('update-job')[2]).toStrictEqual(['cache.paths[0]', dummyCachePath]); + + findCacheKeyInput().vm.$emit('input', dummyCacheKey); + + expect(wrapper.emitted('update-job')).toHaveLength(4); + expect(wrapper.emitted('update-job')[3]).toStrictEqual(['cache.key', dummyCacheKey]); + }); + + it('should emit update job event when click add item button', () => { + createComponent(); + + findAddArtifactsPathsButton().vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(1); + expect(wrapper.emitted('update-job')[0]).toStrictEqual(['artifacts.paths[1]', '']); + + findAddArtifactsExcludeButton().vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(2); + expect(wrapper.emitted('update-job')[1]).toStrictEqual(['artifacts.exclude[1]', '']); + + findAddCachePathsButton().vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(3); + expect(wrapper.emitted('update-job')[2]).toStrictEqual(['cache.paths[1]', '']); + }); + + it('should emit update job event when click delete item button', () => { + createComponent({ + job: { + artifacts: { + paths: ['0', '1'], + exclude: ['0', '1'], + }, + cache: { + paths: ['0', '1'], + key: '', + }, + }, + }); + + findDeleteArtifactsPathsButtonByIndex(0).vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(1); + expect(wrapper.emitted('update-job')[0]).toStrictEqual(['artifacts.paths[0]']); + + findDeleteArtifactsExcludeButtonByIndex(0).vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(2); + expect(wrapper.emitted('update-job')[1]).toStrictEqual(['artifacts.exclude[0]']); + + findDeleteCachePathsButtonByIndex(0).vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toHaveLength(3); + expect(wrapper.emitted('update-job')[2]).toStrictEqual(['cache.paths[0]']); + }); + + it('should not emit update job event when click the only one delete item button', () => { + createComponent(); + + findDeleteArtifactsPathsButtonByIndex(0).vm.$emit('click'); + findDeleteArtifactsExcludeButtonByIndex(0).vm.$emit('click'); + findDeleteCachePathsButtonByIndex(0).vm.$emit('click'); + + expect(wrapper.emitted('update-job')).toBeUndefined(); + }); +}); diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item_spec.js index eaad0dae90d..373fb1b70c7 100644 --- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item_spec.js @@ -1,4 +1,3 @@ -import createStore from '~/ci/pipeline_editor/store'; import JobSetupItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item.vue'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { JOB_TEMPLATE } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants'; @@ -18,8 +17,8 @@ describe('Job setup item', () => { const createComponent = () => { wrapper = shallowMountExtended(JobSetupItem, { - store: createStore(), propsData: { + availableStages: ['.pre', dummyJobStage, '.post'], tagOptions: [ { id: 'tag1', name: 'tag1' }, { id: 'tag2', name: 'tag2' }, diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js new file mode 100644 index 00000000000..659ccb25996 --- /dev/null +++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item_spec.js @@ -0,0 +1,70 @@ +import RulesItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item.vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { + JOB_TEMPLATE, + JOB_RULES_WHEN, + JOB_RULES_START_IN, +} from '~/ci/pipeline_editor/components/job_assistant_drawer/constants'; + +describe('Rules item', () => { + let wrapper; + + const findRulesWhenSelect = () => wrapper.findByTestId('rules-when-select'); + const findRulesStartInNumberInput = () => wrapper.findByTestId('rules-start-in-number-input'); + const findRulesStartInUnitSelect = () => wrapper.findByTestId('rules-start-in-unit-select'); + const findRulesAllowFailureCheckBox = () => wrapper.findByTestId('rules-allow-failure-checkbox'); + + const dummyRulesWhen = JOB_RULES_WHEN.delayed.value; + const dummyRulesStartInNumber = 2; + const dummyRulesStartInUnit = JOB_RULES_START_IN.week.value; + const dummyRulesAllowFailure = true; + + const createComponent = () => { + wrapper = shallowMountExtended(RulesItem, { + propsData: { + isStartValid: true, + job: JSON.parse(JSON.stringify(JOB_TEMPLATE)), + }, + }); + }; + + beforeEach(() => { + createComponent(); + }); + + it('should emit update job event when filling inputs', () => { + expect(wrapper.emitted('update-job')).toBeUndefined(); + + findRulesWhenSelect().vm.$emit('input', dummyRulesWhen); + + expect(wrapper.emitted('update-job')).toHaveLength(1); + expect(wrapper.emitted('update-job')[0]).toEqual([ + 'rules[0].when', + JOB_RULES_WHEN.delayed.value, + ]); + + findRulesStartInNumberInput().vm.$emit('input', dummyRulesStartInNumber); + + expect(wrapper.emitted('update-job')).toHaveLength(2); + expect(wrapper.emitted('update-job')[1]).toEqual([ + 'rules[0].start_in', + `2 ${JOB_RULES_START_IN.second.value}s`, + ]); + + findRulesStartInUnitSelect().vm.$emit('input', dummyRulesStartInUnit); + + expect(wrapper.emitted('update-job')).toHaveLength(3); + expect(wrapper.emitted('update-job')[2]).toEqual([ + 'rules[0].start_in', + `2 ${dummyRulesStartInUnit}s`, + ]); + + findRulesAllowFailureCheckBox().vm.$emit('input', dummyRulesAllowFailure); + + expect(wrapper.emitted('update-job')).toHaveLength(4); + expect(wrapper.emitted('update-job')[3]).toEqual([ + 'rules[0].allow_failure', + dummyRulesAllowFailure, + ]); + }); +}); diff --git a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js index b293805d653..08aa7e3a11a 100644 --- a/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer_spec.js @@ -5,13 +5,16 @@ import { stringify } from 'yaml'; import JobAssistantDrawer from '~/ci/pipeline_editor/components/job_assistant_drawer/job_assistant_drawer.vue'; import JobSetupItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/job_setup_item.vue'; import ImageItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/image_item.vue'; -import getAllRunners from '~/ci/runner/graphql/list/all_runners.query.graphql'; +import ArtifactsAndCacheItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/artifacts_and_cache_item.vue'; +import RulesItem from '~/ci/pipeline_editor/components/job_assistant_drawer/accordion_items/rules_item.vue'; +import { JOB_RULES_WHEN } from '~/ci/pipeline_editor/components/job_assistant_drawer/constants'; +import getRunnerTags from '~/ci/pipeline_editor/graphql/queries/runner_tags.query.graphql'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import createStore from '~/ci/pipeline_editor/store'; -import { mockAllRunnersQueryResponse } from 'jest/ci/pipeline_editor/mock_data'; + import { mountExtended } from 'helpers/vue_test_utils_helper'; import eventHub, { SCROLL_EDITOR_TO_BOTTOM } from '~/ci/pipeline_editor/event_hub'; +import { mockRunnersTagsQueryResponse, mockLintResponse, mockCiYml } from '../../mock_data'; Vue.use(VueApollo); @@ -23,22 +26,32 @@ describe('Job assistant drawer', () => { const dummyJobScript = 'b'; const dummyImageName = 'c'; const dummyImageEntrypoint = 'd'; + const dummyArtifactsPath = 'e'; + const dummyArtifactsExclude = 'f'; + const dummyCachePath = 'g'; + const dummyCacheKey = 'h'; + const dummyRulesWhen = JOB_RULES_WHEN.delayed.value; + const dummyRulesStartIn = '1 second'; + const dummyRulesAllowFailure = true; const findDrawer = () => wrapper.findComponent(GlDrawer); const findJobSetupItem = () => wrapper.findComponent(JobSetupItem); const findImageItem = () => wrapper.findComponent(ImageItem); + const findArtifactsAndCacheItem = () => wrapper.findComponent(ArtifactsAndCacheItem); + const findRulesItem = () => wrapper.findComponent(RulesItem); const findConfirmButton = () => wrapper.findByTestId('confirm-button'); const findCancelButton = () => wrapper.findByTestId('cancel-button'); const createComponent = () => { mockApollo = createMockApollo([ - [getAllRunners, jest.fn().mockResolvedValue(mockAllRunnersQueryResponse)], + [getRunnerTags, jest.fn().mockResolvedValue(mockRunnersTagsQueryResponse)], ]); wrapper = mountExtended(JobAssistantDrawer, { - store: createStore(), propsData: { + ciConfigData: mockLintResponse, + ciFileContent: mockCiYml, isVisible: true, }, apolloProvider: mockApollo, @@ -54,10 +67,27 @@ describe('Job assistant drawer', () => { expect(findJobSetupItem().exists()).toBe(true); }); + it('job setup item should have tag options', () => { + expect(findJobSetupItem().props('tagOptions')).toEqual([ + { id: 'tag1', name: 'tag1' }, + { id: 'tag2', name: 'tag2' }, + { id: 'tag3', name: 'tag3' }, + { id: 'tag4', name: 'tag4' }, + ]); + }); + it('should contain image accordion', () => { expect(findImageItem().exists()).toBe(true); }); + it('should contain artifacts and cache item accordion', () => { + expect(findArtifactsAndCacheItem().exists()).toBe(true); + }); + + it('should contain rules accordion', () => { + expect(findRulesItem().exists()).toBe(true); + }); + it('should emit close job assistant drawer event when closing the drawer', () => { expect(wrapper.emitted('close-job-assistant-drawer')).toBeUndefined(); @@ -74,8 +104,7 @@ describe('Job assistant drawer', () => { expect(wrapper.emitted('close-job-assistant-drawer')).toHaveLength(1); }); - it('trigger validate if job name is empty', async () => { - const updateCiConfigSpy = jest.spyOn(wrapper.vm, 'updateCiConfig'); + it('should block submit if job name is empty', async () => { findJobSetupItem().vm.$emit('update-job', 'script', 'b'); findConfirmButton().trigger('click'); @@ -83,7 +112,17 @@ describe('Job assistant drawer', () => { expect(findJobSetupItem().props('isNameValid')).toBe(false); expect(findJobSetupItem().props('isScriptValid')).toBe(true); - expect(updateCiConfigSpy).toHaveBeenCalledTimes(0); + expect(wrapper.emitted('updateCiConfig')).toBeUndefined(); + }); + + it('should block submit if rules when is delayed and start in is out of range', async () => { + findRulesItem().vm.$emit('update-job', 'rules[0].when', JOB_RULES_WHEN.delayed.value); + findRulesItem().vm.$emit('update-job', 'rules[0].start_in', '2 weeks'); + findConfirmButton().trigger('click'); + + await nextTick(); + + expect(wrapper.emitted('updateCiConfig')).toBeUndefined(); }); describe('when enter valid input', () => { @@ -92,10 +131,24 @@ describe('Job assistant drawer', () => { findJobSetupItem().vm.$emit('update-job', 'script', dummyJobScript); findImageItem().vm.$emit('update-job', 'image.name', dummyImageName); findImageItem().vm.$emit('update-job', 'image.entrypoint', [dummyImageEntrypoint]); + findArtifactsAndCacheItem().vm.$emit('update-job', 'artifacts.paths', [dummyArtifactsPath]); + findArtifactsAndCacheItem().vm.$emit('update-job', 'artifacts.exclude', [ + dummyArtifactsExclude, + ]); + findArtifactsAndCacheItem().vm.$emit('update-job', 'cache.paths', [dummyCachePath]); + findArtifactsAndCacheItem().vm.$emit('update-job', 'cache.key', dummyCacheKey); + findRulesItem().vm.$emit('update-job', 'rules[0].allow_failure', dummyRulesAllowFailure); + findRulesItem().vm.$emit('update-job', 'rules[0].when', dummyRulesWhen); + findRulesItem().vm.$emit('update-job', 'rules[0].start_in', dummyRulesStartIn); }); it('passes correct prop to accordions', () => { - const accordions = [findJobSetupItem(), findImageItem()]; + const accordions = [ + findJobSetupItem(), + findImageItem(), + findArtifactsAndCacheItem(), + findRulesItem(), + ]; accordions.forEach((accordion) => { expect(accordion.props('job')).toMatchObject({ name: dummyJobName, @@ -104,6 +157,21 @@ describe('Job assistant drawer', () => { name: dummyImageName, entrypoint: [dummyImageEntrypoint], }, + artifacts: { + paths: [dummyArtifactsPath], + exclude: [dummyArtifactsExclude], + }, + cache: { + paths: [dummyCachePath], + key: dummyCacheKey, + }, + rules: [ + { + allow_failure: dummyRulesAllowFailure, + when: dummyRulesWhen, + start_in: dummyRulesStartIn, + }, + ], }); }); }); @@ -129,19 +197,60 @@ describe('Job assistant drawer', () => { expect(findJobSetupItem().props('job')).toMatchObject({ name: '', script: '' }); }); - it('should update correct ci content when click add button', () => { - const updateCiConfigSpy = jest.spyOn(wrapper.vm, 'updateCiConfig'); + it('should omit keys with default value when click add button', () => { + findRulesItem().vm.$emit('update-job', 'rules[0].allow_failure', false); + findRulesItem().vm.$emit('update-job', 'rules[0].when', JOB_RULES_WHEN.onSuccess.value); + findRulesItem().vm.$emit('update-job', 'rules[0].start_in', dummyRulesStartIn); + findConfirmButton().trigger('click'); + + expect(wrapper.emitted('updateCiConfig')).toStrictEqual([ + [ + `${wrapper.props('ciFileContent')}\n${stringify({ + [dummyJobName]: { + script: dummyJobScript, + image: { name: dummyImageName, entrypoint: [dummyImageEntrypoint] }, + artifacts: { + paths: [dummyArtifactsPath], + exclude: [dummyArtifactsExclude], + }, + cache: { + paths: [dummyCachePath], + key: dummyCacheKey, + }, + }, + })}`, + ], + ]); + }); + it('should update correct ci content when click add button', () => { findConfirmButton().trigger('click'); - expect(updateCiConfigSpy).toHaveBeenCalledWith( - `\n${stringify({ - [dummyJobName]: { - script: dummyJobScript, - image: { name: dummyImageName, entrypoint: [dummyImageEntrypoint] }, - }, - })}`, - ); + expect(wrapper.emitted('updateCiConfig')).toStrictEqual([ + [ + `${wrapper.props('ciFileContent')}\n${stringify({ + [dummyJobName]: { + script: dummyJobScript, + image: { name: dummyImageName, entrypoint: [dummyImageEntrypoint] }, + artifacts: { + paths: [dummyArtifactsPath], + exclude: [dummyArtifactsExclude], + }, + cache: { + paths: [dummyCachePath], + key: dummyCacheKey, + }, + rules: [ + { + allow_failure: dummyRulesAllowFailure, + when: dummyRulesWhen, + start_in: dummyRulesStartIn, + }, + ], + }, + })}`, + ], + ]); }); it('should emit scroll editor to button event when click add button', () => { diff --git a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js index 52a543c7686..cbdf01105c7 100644 --- a/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/pipeline_editor_tabs_spec.js @@ -314,13 +314,13 @@ describe('Pipeline editor tabs component', () => { createComponent(); }); - it('shows walkthrough popover', async () => { + it('shows walkthrough popover', () => { expect(findWalkthroughPopover().exists()).toBe(true); }); }); describe('when isNewCiConfigFile prop is false', () => { - it('does not show walkthrough popover', async () => { + it('does not show walkthrough popover', () => { createComponent({ props: { isNewCiConfigFile: false } }); expect(findWalkthroughPopover().exists()).toBe(false); }); diff --git a/spec/frontend/ci/pipeline_editor/components/popovers/file_tree_popover_spec.js b/spec/frontend/ci/pipeline_editor/components/popovers/file_tree_popover_spec.js index a9aabb103f2..3d84f06967a 100644 --- a/spec/frontend/ci/pipeline_editor/components/popovers/file_tree_popover_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/popovers/file_tree_popover_spec.js @@ -25,7 +25,7 @@ describe('FileTreePopover component', () => { }); describe('default', () => { - beforeEach(async () => { + beforeEach(() => { createComponent({ stubs: { GlSprintf } }); }); @@ -45,7 +45,7 @@ describe('FileTreePopover component', () => { }); describe('when popover has already been dismissed before', () => { - it('does not render popover', async () => { + it('does not render popover', () => { localStorage.setItem(FILE_TREE_POPOVER_DISMISSED_KEY, 'true'); createComponent(); diff --git a/spec/frontend/ci/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js b/spec/frontend/ci/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js index 23f9c7a87ee..18eec48ad83 100644 --- a/spec/frontend/ci/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/popovers/validate_pipeline_popover_spec.js @@ -20,7 +20,7 @@ describe('ValidatePopover component', () => { const findFeedbackLink = () => wrapper.findByTestId('feedback-link'); describe('template', () => { - beforeEach(async () => { + beforeEach(() => { createComponent({ stubs: { GlLink, GlSprintf }, }); diff --git a/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js b/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js index 186fd803d47..37339b1c422 100644 --- a/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js @@ -18,7 +18,7 @@ describe('WalkthroughPopover component', () => { await wrapper.findByTestId('ctaBtn').trigger('click'); }); - it('emits "walkthrough-popover-cta-clicked" event', async () => { + it('emits "walkthrough-popover-cta-clicked" event', () => { expect(wrapper.emitted()['walkthrough-popover-cta-clicked']).toHaveLength(1); }); }); diff --git a/spec/frontend/ci/pipeline_editor/components/ui/editor_tab_spec.js b/spec/frontend/ci/pipeline_editor/components/ui/editor_tab_spec.js index a4e7abba7b0..f02b1f5efbc 100644 --- a/spec/frontend/ci/pipeline_editor/components/ui/editor_tab_spec.js +++ b/spec/frontend/ci/pipeline_editor/components/ui/editor_tab_spec.js @@ -64,7 +64,7 @@ describe('~/ci/pipeline_editor/components/ui/editor_tab.vue', () => { mockChildMounted = jest.fn(); }); - it('tabs are mounted lazily', async () => { + it('tabs are mounted lazily', () => { createMockedWrapper(); expect(mockChildMounted).toHaveBeenCalledTimes(0); @@ -192,7 +192,7 @@ describe('~/ci/pipeline_editor/components/ui/editor_tab.vue', () => { createMockedWrapper(); }); - it('renders correct number of badges', async () => { + it('renders correct number of badges', () => { expect(findBadges()).toHaveLength(1); expect(findBadges().at(0).text()).toBe('NEW'); }); diff --git a/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js index 6a6cc3a14de..893f6775ac5 100644 --- a/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js +++ b/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js @@ -34,7 +34,7 @@ describe('~/ci/pipeline_editor/graphql/resolvers', () => { }); /* eslint-disable no-underscore-dangle */ - it('lint data has correct type names', async () => { + it('lint data has correct type names', () => { expect(result.__typename).toBe('CiLintContent'); expect(result.jobs[0].__typename).toBe('CiLintJob'); diff --git a/spec/frontend/ci/pipeline_editor/mock_data.js b/spec/frontend/ci/pipeline_editor/mock_data.js index ecfc477184b..865dd34fbfe 100644 --- a/spec/frontend/ci/pipeline_editor/mock_data.js +++ b/spec/frontend/ci/pipeline_editor/mock_data.js @@ -583,86 +583,31 @@ export const mockCommitCreateResponse = { }, }; -export const mockAllRunnersQueryResponse = { +export const mockRunnersTagsQueryResponse = { data: { runners: { nodes: [ { id: 'gid://gitlab/Ci::Runner/1', - description: 'test', - runnerType: 'PROJECT_TYPE', - shortSha: 'DdTYMQGS', - version: '15.6.1', - ipAddress: '127.0.0.1', - active: true, - locked: true, - jobCount: 0, - jobExecutionStatus: 'IDLE', - tagList: ['tag1', 'tag2', 'tag3'], - createdAt: '2022-11-29T09:37:43Z', - contactedAt: null, - status: 'NEVER_CONTACTED', - userPermissions: { - updateRunner: true, - deleteRunner: true, - __typename: 'RunnerPermissions', - }, - groups: null, - ownerProject: { - id: 'gid://gitlab/Project/1', - name: '123', - nameWithNamespace: 'Administrator / 123', - webUrl: 'http://127.0.0.1:3000/root/test', - __typename: 'Project', - }, + tagList: ['tag1', 'tag2'], __typename: 'CiRunner', - upgradeStatus: 'NOT_AVAILABLE', - adminUrl: 'http://127.0.0.1:3000/admin/runners/1', - editAdminUrl: 'http://127.0.0.1:3000/admin/runners/1/edit', }, { id: 'gid://gitlab/Ci::Runner/2', - description: 'test', - runnerType: 'PROJECT_TYPE', - shortSha: 'DdTYMQGA', - version: '15.6.1', - ipAddress: '127.0.0.1', - active: true, - locked: true, - jobCount: 0, - jobExecutionStatus: 'IDLE', - tagList: ['tag3', 'tag4'], - createdAt: '2022-11-29T09:37:43Z', - contactedAt: null, - status: 'NEVER_CONTACTED', - userPermissions: { - updateRunner: true, - deleteRunner: true, - __typename: 'RunnerPermissions', - }, - groups: null, - ownerProject: { - id: 'gid://gitlab/Project/1', - name: '123', - nameWithNamespace: 'Administrator / 123', - webUrl: 'http://127.0.0.1:3000/root/test', - __typename: 'Project', - }, + tagList: ['tag2', 'tag3'], + __typename: 'CiRunner', + }, + { + id: 'gid://gitlab/Ci::Runner/3', + tagList: ['tag2', 'tag4'], + __typename: 'CiRunner', + }, + { + id: 'gid://gitlab/Ci::Runner/4', + tagList: [], __typename: 'CiRunner', - upgradeStatus: 'NOT_AVAILABLE', - adminUrl: 'http://127.0.0.1:3000/admin/runners/2', - editAdminUrl: 'http://127.0.0.1:3000/admin/runners/2/edit', }, ], - pageInfo: { - hasNextPage: false, - hasPreviousPage: false, - startCursor: - 'eyJjcmVhdGVkX2F0IjoiMjAyMi0xMS0yOSAwOTozNzo0My40OTEwNTEwMDAgKzAwMDAiLCJpZCI6IjIifQ', - endCursor: - 'eyJjcmVhdGVkX2F0IjoiMjAyMi0xMS0yOSAwOTozNzo0My40OTEwNTEwMDAgKzAwMDAiLCJpZCI6IjIifQ', - __typename: 'PageInfo', - }, __typename: 'CiRunnerConnection', }, }, diff --git a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js index 7a13bfbd1ab..8bac46a3e9c 100644 --- a/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js +++ b/spec/frontend/ci/pipeline_editor/pipeline_editor_app_spec.js @@ -8,7 +8,6 @@ import waitForPromises from 'helpers/wait_for_promises'; import { HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status'; import { objectToQuery, redirectTo } from '~/lib/utils/url_utility'; import { resolvers } from '~/ci/pipeline_editor/graphql/resolvers'; -import createStore from '~/ci/pipeline_editor/store'; import PipelineEditorTabs from '~/ci/pipeline_editor/components/pipeline_editor_tabs.vue'; import PipelineEditorEmptyState from '~/ci/pipeline_editor/components/ui/pipeline_editor_empty_state.vue'; import PipelineEditorMessages from '~/ci/pipeline_editor/components/ui/pipeline_editor_messages.vue'; @@ -81,9 +80,7 @@ describe('Pipeline editor app component', () => { provide = {}, stubs = {}, } = {}) => { - const store = createStore(); wrapper = shallowMount(PipelineEditorApp, { - store, provide: { ...defaultProvide, ...provide }, stubs, mocks: { @@ -99,7 +96,7 @@ describe('Pipeline editor app component', () => { }); }; - const createComponentWithApollo = async ({ + const createComponentWithApollo = ({ provide = {}, stubs = {}, withUndefinedBranch = false, @@ -255,10 +252,6 @@ describe('Pipeline editor app component', () => { .mockImplementation(jest.fn()); }); - it('available stages is updated', () => { - expect(wrapper.vm.$store.state.availableStages).toStrictEqual(['test', 'build']); - }); - it('shows pipeline editor home component', () => { expect(findEditorHome().exists()).toBe(true); }); @@ -267,7 +260,7 @@ describe('Pipeline editor app component', () => { expect(findAlert().exists()).toBe(false); }); - it('ci config query is called with correct variables', async () => { + it('ci config query is called with correct variables', () => { expect(mockCiConfigData).toHaveBeenCalledWith({ content: mockCiYml, projectPath: mockProjectFullPath, @@ -294,7 +287,7 @@ describe('Pipeline editor app component', () => { .mockImplementation(jest.fn()); }); - it('shows an empty state and does not show editor home component', async () => { + it('shows an empty state and does not show editor home component', () => { expect(findEmptyState().exists()).toBe(true); expect(findAlert().exists()).toBe(false); expect(findEditorHome().exists()).toBe(false); diff --git a/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js index 1349461d8bc..9015031b6c8 100644 --- a/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js +++ b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js @@ -142,7 +142,7 @@ describe('Pipeline New Form', () => { await waitForPromises(); }); - it('displays the correct values for the provided query params', async () => { + it('displays the correct values for the provided query params', () => { expect(findVariableTypes().at(0).props('text')).toBe('Variable'); expect(findVariableTypes().at(1).props('text')).toBe('File'); expect(findRefsDropdown().props('value')).toEqual({ shortName: 'tag-1' }); @@ -154,7 +154,7 @@ describe('Pipeline New Form', () => { expect(findValueInputs().at(0).element.value).toBe('test_var_val'); }); - it('displays an empty variable for the user to fill out', async () => { + it('displays an empty variable for the user to fill out', () => { expect(findKeyInputs().at(2).element.value).toBe(''); expect(findValueInputs().at(2).element.value).toBe(''); expect(findVariableTypes().at(2).props('text')).toBe('Variable'); @@ -186,12 +186,12 @@ describe('Pipeline New Form', () => { }); describe('Pipeline creation', () => { - beforeEach(async () => { + beforeEach(() => { mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse); mock.onPost(pipelinesPath).reply(HTTP_STATUS_OK, newPipelinePostResponse); }); - it('does not submit the native HTML form', async () => { + it('does not submit the native HTML form', () => { createComponentWithApollo(); findForm().vm.$emit('submit', dummySubmitEvent); @@ -328,7 +328,7 @@ describe('Pipeline New Form', () => { }); const testBehaviorWhenCacheIsPopulated = (queryResponse) => { - beforeEach(async () => { + beforeEach(() => { mockCiConfigVariables.mockResolvedValue(queryResponse); createComponentWithApollo({ method: mountExtended }); }); @@ -406,7 +406,7 @@ describe('Pipeline New Form', () => { await waitForPromises(); }); - it('displays all the variables', async () => { + it('displays all the variables', () => { expect(findVariableRows()).toHaveLength(6); }); @@ -445,7 +445,7 @@ describe('Pipeline New Form', () => { await waitForPromises(); }); - it('displays variables with description only', async () => { + it('displays variables with description only', () => { expect(findVariableRows()).toHaveLength(2); // extra empty variable is added at the end }); }); diff --git a/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js b/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js index 60ace483712..82dac1358c5 100644 --- a/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js +++ b/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js @@ -54,7 +54,7 @@ describe('Pipeline New Form', () => { expect(findRefsDropdownItems()).toHaveLength(0); }); - it('does not make requests immediately', async () => { + it('does not make requests immediately', () => { expect(mock.history.get).toHaveLength(0); }); @@ -117,14 +117,14 @@ describe('Pipeline New Form', () => { await waitForPromises(); }); - it('requests filtered tags and branches', async () => { + it('requests filtered tags and branches', () => { expect(mock.history.get).toHaveLength(2); expect(mock.history.get[1].params).toEqual({ search: mockSearchTerm, }); }); - it('displays dropdown with branches and tags', async () => { + it('displays dropdown with branches and tags', () => { const filteredRefLength = mockFilteredRefs.Tags.length + mockFilteredRefs.Branches.length; expect(findRefsDropdownItems()).toHaveLength(filteredRefLength); diff --git a/spec/frontend/ci/pipeline_schedules/components/delete_pipeline_schedule_modal_spec.js b/spec/frontend/ci/pipeline_schedules/components/delete_pipeline_schedule_modal_spec.js index c45267e5a47..e48f556c246 100644 --- a/spec/frontend/ci/pipeline_schedules/components/delete_pipeline_schedule_modal_spec.js +++ b/spec/frontend/ci/pipeline_schedules/components/delete_pipeline_schedule_modal_spec.js @@ -20,13 +20,13 @@ describe('Delete pipeline schedule modal', () => { createComponent(); }); - it('emits the deleteSchedule event', async () => { + it('emits the deleteSchedule event', () => { findModal().vm.$emit('primary'); expect(wrapper.emitted()).toEqual({ deleteSchedule: [[]] }); }); - it('emits the hideModal event', async () => { + it('emits the hideModal event', () => { findModal().vm.$emit('hide'); expect(wrapper.emitted()).toEqual({ hideModal: [[]] }); diff --git a/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_spec.js b/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_spec.js index e3965d13c19..7cc254b7653 100644 --- a/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_spec.js +++ b/spec/frontend/ci/pipeline_schedules/components/take_ownership_modal_spec.js @@ -26,13 +26,13 @@ describe('Take ownership modal', () => { ); }); - it('emits the takeOwnership event', async () => { + it('emits the takeOwnership event', () => { findModal().vm.$emit('primary'); expect(wrapper.emitted()).toEqual({ takeOwnership: [[]] }); }); - it('emits the hideModal event', async () => { + it('emits the hideModal event', () => { findModal().vm.$emit('hide'); expect(wrapper.emitted()).toEqual({ hideModal: [[]] }); diff --git a/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js b/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js index 85b1d3b1b2f..58a1c0bc18d 100644 --- a/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js +++ b/spec/frontend/ci/runner/admin_new_runner_app/admin_new_runner_app_spec.js @@ -1,5 +1,3 @@ -import Vue from 'vue'; -import VueApollo from 'vue-apollo'; import { GlSprintf } from '@gitlab/ui'; import { s__ } from '~/locale'; @@ -11,14 +9,15 @@ import AdminNewRunnerApp from '~/ci/runner/admin_new_runner/admin_new_runner_app import { saveAlertToLocalStorage } from '~/ci/runner/local_storage_alert/save_alert_to_local_storage'; import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue'; import RunnerPlatformsRadioGroup from '~/ci/runner/components/runner_platforms_radio_group.vue'; -import { PARAM_KEY_PLATFORM, DEFAULT_PLATFORM, WINDOWS_PLATFORM } from '~/ci/runner/constants'; +import { + PARAM_KEY_PLATFORM, + INSTANCE_TYPE, + DEFAULT_PLATFORM, + WINDOWS_PLATFORM, +} from '~/ci/runner/constants'; import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue'; import { redirectTo } from '~/lib/utils/url_utility'; -import { runnerCreateResult } from '../mock_data'; - -const mockLegacyRegistrationToken = 'LEGACY_REGISTRATION_TOKEN'; - -Vue.use(VueApollo); +import { runnerCreateResult, mockRegistrationToken } from '../mock_data'; jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage'); jest.mock('~/alert'); @@ -40,7 +39,7 @@ describe('AdminNewRunnerApp', () => { const createComponent = () => { wrapper = shallowMountExtended(AdminNewRunnerApp, { propsData: { - legacyRegistrationToken: mockLegacyRegistrationToken, + legacyRegistrationToken: mockRegistrationToken, }, directives: { GlModal: createMockDirective('gl-modal'), @@ -58,7 +57,7 @@ describe('AdminNewRunnerApp', () => { describe('Shows legacy modal', () => { it('passes legacy registration to modal', () => { expect(findRunnerInstructionsModal().props('registrationToken')).toEqual( - mockLegacyRegistrationToken, + mockRegistrationToken, ); }); @@ -76,8 +75,11 @@ describe('AdminNewRunnerApp', () => { }); describe('Runner form', () => { - it('shows the runner create form', () => { - expect(findRunnerCreateForm().exists()).toBe(true); + it('shows the runner create form for an instance runner', () => { + expect(findRunnerCreateForm().props()).toEqual({ + runnerType: INSTANCE_TYPE, + groupId: null, + }); }); describe('When a runner is saved', () => { @@ -93,7 +95,7 @@ describe('AdminNewRunnerApp', () => { }); it('redirects to the registration page', () => { - const url = `${mockCreatedRunner.registerAdminUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`; + const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`; expect(redirectTo).toHaveBeenCalledWith(url); }); @@ -106,7 +108,7 @@ describe('AdminNewRunnerApp', () => { }); it('redirects to the registration page with the platform', () => { - const url = `${mockCreatedRunner.registerAdminUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`; + const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`; expect(redirectTo).toHaveBeenCalledWith(url); }); diff --git a/spec/frontend/ci/runner/admin_register_runner/admin_register_runner_app_spec.js b/spec/frontend/ci/runner/admin_register_runner/admin_register_runner_app_spec.js index d04df85d58f..60244ba5bc2 100644 --- a/spec/frontend/ci/runner/admin_register_runner/admin_register_runner_app_spec.js +++ b/spec/frontend/ci/runner/admin_register_runner/admin_register_runner_app_spec.js @@ -37,7 +37,7 @@ describe('AdminRegisterRunnerApp', () => { }; describe('When showing runner details', () => { - beforeEach(async () => { + beforeEach(() => { createComponent(); }); @@ -64,7 +64,7 @@ describe('AdminRegisterRunnerApp', () => { }); describe('When another platform has been selected', () => { - beforeEach(async () => { + beforeEach(() => { setWindowLocation(`?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`); createComponent(); diff --git a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js index 9d9142f2c68..d1f95aef349 100644 --- a/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js +++ b/spec/frontend/ci/runner/admin_runner_show/admin_runner_show_app_spec.js @@ -81,7 +81,7 @@ describe('AdminRunnerShowApp', () => { await createComponent({ mountFn: mountExtended }); }); - it('expect GraphQL ID to be requested', async () => { + it('expect GraphQL ID to be requested', () => { expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunnerGraphqlId }); }); @@ -89,7 +89,7 @@ describe('AdminRunnerShowApp', () => { expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`); }); - it('displays the runner edit and pause buttons', async () => { + it('displays the runner edit and pause buttons', () => { expect(findRunnerEditButton().attributes('href')).toBe(mockRunner.editAdminUrl); expect(findRunnerPauseButton().exists()).toBe(true); expect(findRunnerDeleteButton().exists()).toBe(true); @@ -99,7 +99,7 @@ describe('AdminRunnerShowApp', () => { expect(findRunnerDetailsTabs().props('runner')).toEqual(mockRunner); }); - it('shows basic runner details', async () => { + it('shows basic runner details', () => { const expected = `Description My Runner Last contact Never contacted Version 1.0.0 diff --git a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js index 0cf6241c24f..2cd1bc0b2f8 100644 --- a/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js +++ b/spec/frontend/ci/runner/admin_runners/admin_runners_app_spec.js @@ -57,13 +57,13 @@ import { allRunnersDataPaginated, onlineContactTimeoutSecs, staleTimeoutSecs, + mockRegistrationToken, newRunnerPath, emptyPageInfo, emptyStateSvgPath, emptyStateFilteredSvgPath, } from '../mock_data'; -const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN'; const mockRunners = allRunnersData.data.runners.nodes; const mockRunnersCount = runnersCountData.data.runners.count; @@ -208,13 +208,13 @@ describe('AdminRunnersApp', () => { it('runner item links to the runner admin page', async () => { await createComponent({ mountFn: mountExtended }); - const { id, shortSha } = mockRunners[0]; + const { id, shortSha, adminUrl } = mockRunners[0]; const numericId = getIdFromGraphQLId(id); const runnerLink = wrapper.find('tr [data-testid="td-summary"]').findComponent(GlLink); expect(runnerLink.text()).toBe(`#${numericId} (${shortSha})`); - expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${numericId}`); + expect(runnerLink.attributes('href')).toBe(adminUrl); }); it('renders runner actions for each runner', async () => { @@ -264,7 +264,7 @@ describe('AdminRunnersApp', () => { }); describe('Single runner row', () => { - const { id: graphqlId, shortSha } = mockRunners[0]; + const { id: graphqlId, shortSha, adminUrl } = mockRunners[0]; const id = getIdFromGraphQLId(graphqlId); beforeEach(async () => { @@ -273,11 +273,11 @@ describe('AdminRunnersApp', () => { await createComponent({ mountFn: mountExtended }); }); - it('Links to the runner page', async () => { + it('Links to the runner page', () => { const runnerLink = wrapper.find('tr [data-testid="td-summary"]').findComponent(GlLink); expect(runnerLink.text()).toBe(`#${id} (${shortSha})`); - expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${id}`); + expect(runnerLink.attributes('href')).toBe(adminUrl); }); it('Shows job status and links to jobs', () => { @@ -286,13 +286,10 @@ describe('AdminRunnersApp', () => { .findComponent(RunnerJobStatusBadge); expect(badge.props('jobStatus')).toBe(mockRunners[0].jobExecutionStatus); - - const badgeHref = new URL(badge.attributes('href')); - expect(badgeHref.pathname).toBe(`/admin/runners/${id}`); - expect(badgeHref.hash).toBe(`#${JOBS_ROUTE_PATH}`); + expect(badge.attributes('href')).toBe(`${adminUrl}#${JOBS_ROUTE_PATH}`); }); - it('When runner is paused or unpaused, some data is refetched', async () => { + it('When runner is paused or unpaused, some data is refetched', () => { expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES); findRunnerActionsCell().vm.$emit('toggledPaused'); @@ -301,7 +298,7 @@ describe('AdminRunnersApp', () => { expect(showToast).toHaveBeenCalledTimes(0); }); - it('When runner is deleted, data is refetched and a toast message is shown', async () => { + it('When runner is deleted, data is refetched and a toast message is shown', () => { findRunnerActionsCell().vm.$emit('deleted', { message: 'Runner deleted' }); expect(showToast).toHaveBeenCalledTimes(1); @@ -324,7 +321,7 @@ describe('AdminRunnersApp', () => { { type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }, { type: PARAM_KEY_PAUSED, value: { data: 'true', operator: '=' } }, ], - sort: 'CREATED_DESC', + sort: DEFAULT_SORT, pagination: {}, }); }); @@ -410,7 +407,7 @@ describe('AdminRunnersApp', () => { await createComponent({ mountFn: mountExtended }); }); - it('count data is refetched', async () => { + it('count data is refetched', () => { expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES); findRunnerList().vm.$emit('deleted', { message: 'Runners deleted' }); @@ -418,7 +415,7 @@ describe('AdminRunnersApp', () => { expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES * 2); }); - it('toast is shown', async () => { + it('toast is shown', () => { expect(showToast).toHaveBeenCalledTimes(0); findRunnerList().vm.$emit('deleted', { message: 'Runners deleted' }); @@ -480,11 +477,11 @@ describe('AdminRunnersApp', () => { await createComponent(); }); - it('error is shown to the user', async () => { + it('error is shown to the user', () => { expect(createAlert).toHaveBeenCalledTimes(1); }); - it('error is reported to sentry', async () => { + it('error is reported to sentry', () => { expect(captureException).toHaveBeenCalledWith({ error: new Error('Error!'), component: 'AdminRunnersApp', diff --git a/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js index ec23d8415e8..c435dd57de2 100644 --- a/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js +++ b/spec/frontend/ci/runner/components/cells/runner_status_cell_spec.js @@ -1,4 +1,4 @@ -import { mount } from '@vue/test-utils'; +import { shallowMount } from '@vue/test-utils'; import RunnerStatusCell from '~/ci/runner/components/cells/runner_status_cell.vue'; import RunnerStatusBadge from '~/ci/runner/components/runner_status_badge.vue'; @@ -20,7 +20,7 @@ describe('RunnerStatusCell', () => { const findPausedBadge = () => wrapper.findComponent(RunnerPausedBadge); const createComponent = ({ runner = {}, ...options } = {}) => { - wrapper = mount(RunnerStatusCell, { + wrapper = shallowMount(RunnerStatusCell, { propsData: { runner: { runnerType: INSTANCE_TYPE, @@ -30,6 +30,10 @@ describe('RunnerStatusCell', () => { ...runner, }, }, + stubs: { + RunnerStatusBadge, + RunnerPausedBadge, + }, ...options, }); }; diff --git a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js index 585a03c0811..23ec170961a 100644 --- a/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js +++ b/spec/frontend/ci/runner/components/cells/runner_summary_cell_spec.js @@ -1,5 +1,6 @@ -import { __ } from '~/locale'; +import { __, sprintf } from '~/locale'; import { mountExtended } from 'helpers/vue_test_utils_helper'; +import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue'; import RunnerSummaryCell from '~/ci/runner/components/cells/runner_summary_cell.vue'; import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue'; import RunnerTags from '~/ci/runner/components/runner_tags.vue'; @@ -11,11 +12,13 @@ import { I18N_INSTANCE_TYPE, PROJECT_TYPE, I18N_NO_DESCRIPTION, + I18N_CREATED_AT_LABEL, + I18N_CREATED_AT_BY_LABEL, } from '~/ci/runner/constants'; -import { allRunnersData } from '../../mock_data'; +import { allRunnersWithCreatorData } from '../../mock_data'; -const mockRunner = allRunnersData.data.runners.nodes[0]; +const mockRunner = allRunnersWithCreatorData.data.runners.nodes[0]; describe('RunnerTypeCell', () => { let wrapper; @@ -142,10 +145,42 @@ describe('RunnerTypeCell', () => { expect(findRunnerSummaryField('pipeline').text()).toContain('1,000+'); }); - it('Displays created at', () => { - expect(findRunnerSummaryField('calendar').findComponent(TimeAgo).props('time')).toBe( - mockRunner.createdAt, - ); + describe('Displays creation info', () => { + const findCreatedTime = () => findRunnerSummaryField('calendar').findComponent(TimeAgo); + + it('Displays created at ...', () => { + createComponent({ + createdBy: null, + }); + + expect(findRunnerSummaryField('calendar').text()).toMatchInterpolatedText( + sprintf(I18N_CREATED_AT_LABEL, { + timeAgo: findCreatedTime().text(), + }), + ); + expect(findCreatedTime().props('time')).toBe(mockRunner.createdAt); + }); + + it('Displays created at ... by ...', () => { + expect(findRunnerSummaryField('calendar').text()).toMatchInterpolatedText( + sprintf(I18N_CREATED_AT_BY_LABEL, { + timeAgo: findCreatedTime().text(), + avatar: mockRunner.createdBy.username, + }), + ); + expect(findCreatedTime().props('time')).toBe(mockRunner.createdAt); + }); + + it('Displays creator avatar', () => { + const { name, avatarUrl, webUrl, username } = mockRunner.createdBy; + + expect(wrapper.findComponent(UserAvatarLink).props()).toMatchObject({ + imgAlt: expect.stringContaining(name), + imgSrc: avatarUrl, + linkHref: webUrl, + tooltipText: username, + }); + }); }); it('Displays tag list', () => { diff --git a/spec/frontend/ci/runner/components/registration/__snapshots__/utils_spec.js.snap b/spec/frontend/ci/runner/components/registration/__snapshots__/utils_spec.js.snap index 09d032fd32d..5eb7ffaacd6 100644 --- a/spec/frontend/ci/runner/components/registration/__snapshots__/utils_spec.js.snap +++ b/spec/frontend/ci/runner/components/registration/__snapshots__/utils_spec.js.snap @@ -75,8 +75,7 @@ exports[`registration utils for "linux" platform registerCommand is correct 1`] Array [ "gitlab-runner register", " --url http://test.host", - " --registration-token REGISTRATION_TOKEN", - " --description 'RUNNER'", + " --token MOCK_AUTHENTICATION_TOKEN", ] `; @@ -130,8 +129,7 @@ exports[`registration utils for "osx" platform registerCommand is correct 1`] = Array [ "gitlab-runner register", " --url http://test.host", - " --registration-token REGISTRATION_TOKEN", - " --description 'RUNNER'", + " --token MOCK_AUTHENTICATION_TOKEN", ] `; @@ -189,8 +187,7 @@ exports[`registration utils for "windows" platform registerCommand is correct 1` Array [ ".\\\\gitlab-runner.exe register", " --url http://test.host", - " --registration-token REGISTRATION_TOKEN", - " --description 'RUNNER'", + " --token MOCK_AUTHENTICATION_TOKEN", ] `; diff --git a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js index 9ed59b0a57d..d23723807b1 100644 --- a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js +++ b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js @@ -1,10 +1,10 @@ import { GlModal, GlDropdown, GlDropdownItem, GlDropdownForm } from '@gitlab/ui'; -import { mount, shallowMount, createWrapper } from '@vue/test-utils'; +import { createWrapper } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import { s__ } from '~/locale'; -import { extendedWrapper } from 'helpers/vue_test_utils_helper'; +import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -21,9 +21,7 @@ import { mockRunnerPlatforms, mockInstructions, } from 'jest/vue_shared/components/runner_instructions/mock_data'; - -const mockToken = '0123456789'; -const maskToken = '**********'; +import { mockRegistrationToken } from '../../mock_data'; Vue.use(VueApollo); @@ -53,17 +51,15 @@ describe('RegistrationDropdown', () => { await waitForPromises(); }; - const createComponent = ({ props = {}, ...options } = {}, mountFn = shallowMount) => { - wrapper = extendedWrapper( - mountFn(RegistrationDropdown, { - propsData: { - registrationToken: mockToken, - type: INSTANCE_TYPE, - ...props, - }, - ...options, - }), - ); + const createComponent = ({ props = {}, ...options } = {}, mountFn = shallowMountExtended) => { + wrapper = mountFn(RegistrationDropdown, { + propsData: { + registrationToken: mockRegistrationToken, + type: INSTANCE_TYPE, + ...props, + }, + ...options, + }); }; const createComponentWithModal = () => { @@ -79,7 +75,7 @@ describe('RegistrationDropdown', () => { // Use `attachTo` to find the modal attachTo: document.body, }, - mount, + mountExtended, ); }; @@ -89,7 +85,7 @@ describe('RegistrationDropdown', () => { ${GROUP_TYPE} | ${s__('Runners|Register a group runner')} ${PROJECT_TYPE} | ${s__('Runners|Register a project runner')} `('Dropdown text for type $type is "$text"', () => { - createComponent({ props: { type: INSTANCE_TYPE } }, mount); + createComponent({ props: { type: INSTANCE_TYPE } }, mountExtended); expect(wrapper.text()).toContain('Register an instance runner'); }); @@ -111,7 +107,7 @@ describe('RegistrationDropdown', () => { describe('When the dropdown item is clicked', () => { beforeEach(async () => { - createComponentWithModal({}, mount); + createComponentWithModal({}, mountExtended); await openModal(); }); @@ -142,7 +138,15 @@ describe('RegistrationDropdown', () => { }); it('Displays masked value by default', () => { - createComponent({}, mount); + const mockToken = '0123456789'; + const maskToken = '**********'; + + createComponent( + { + props: { registrationToken: mockToken }, + }, + mountExtended, + ); expect(findRegistrationTokenInput().element.value).toBe(maskToken); }); @@ -171,7 +175,7 @@ describe('RegistrationDropdown', () => { }; it('Updates token input', async () => { - createComponent({}, mount); + createComponent({}, mountExtended); expect(findRegistrationToken().props('value')).not.toBe(newToken); @@ -181,11 +185,11 @@ describe('RegistrationDropdown', () => { }); it('Updates token in modal', async () => { - createComponentWithModal({}, mount); + createComponentWithModal({}, mountExtended); await openModal(); - expect(findModalContent()).toContain(mockToken); + expect(findModalContent()).toContain(mockRegistrationToken); await resetToken(); diff --git a/spec/frontend/ci/runner/components/registration/registration_instructions_spec.js b/spec/frontend/ci/runner/components/registration/registration_instructions_spec.js index eb4b659091d..8c196d7b5e3 100644 --- a/spec/frontend/ci/runner/components/registration/registration_instructions_spec.js +++ b/spec/frontend/ci/runner/components/registration/registration_instructions_spec.js @@ -22,16 +22,13 @@ import { RUNNER_REGISTRATION_POLLING_INTERVAL_MS, I18N_REGISTRATION_SUCCESS, } from '~/ci/runner/constants'; -import { runnerForRegistration } from '../../mock_data'; +import { runnerForRegistration, mockAuthenticationToken } from '../../mock_data'; Vue.use(VueApollo); -const MOCK_TOKEN = 'MOCK_TOKEN'; -const mockDescription = runnerForRegistration.data.runner.description; - const mockRunner = { ...runnerForRegistration.data.runner, - ephemeralAuthenticationToken: MOCK_TOKEN, + ephemeralAuthenticationToken: mockAuthenticationToken, }; const mockRunnerWithoutToken = { ...runnerForRegistration.data.runner, @@ -53,6 +50,18 @@ describe('RegistrationInstructions', () => { await waitForPromises(); }; + const mockBeforeunload = () => { + const event = new Event('beforeunload'); + const preventDefault = jest.spyOn(event, 'preventDefault'); + const returnValueSetter = jest.spyOn(event, 'returnValue', 'set'); + + return { + event, + preventDefault, + returnValueSetter, + }; + }; + const mockResolvedRunner = (runner = mockRunner) => { mockRunnerQuery.mockResolvedValue({ data: { @@ -84,7 +93,7 @@ describe('RegistrationInstructions', () => { window.gon.gitlab_url = TEST_HOST; }); - it('loads runner with id', async () => { + it('loads runner with id', () => { createComponent(); expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunner.id }); @@ -139,13 +148,12 @@ describe('RegistrationInstructions', () => { command: [ 'gitlab-runner register', ` --url ${TEST_HOST}`, - ` --registration-token ${MOCK_TOKEN}`, - ` --description '${mockDescription}'`, + ` --token ${mockAuthenticationToken}`, ], prompt: '$', }); - expect(step1.find('[data-testid="runner-token"]').text()).toBe(MOCK_TOKEN); - expect(step1.findComponent(ClipboardButton).props('text')).toBe(MOCK_TOKEN); + expect(step1.findByTestId('runner-token').text()).toBe(mockAuthenticationToken); + expect(step1.findComponent(ClipboardButton).props('text')).toBe(mockAuthenticationToken); }); it('renders step 1 in loading state', () => { @@ -169,9 +177,8 @@ describe('RegistrationInstructions', () => { expect(step1.findComponent(CliCommand).props('command')).toEqual([ 'gitlab-runner register', ` --url ${TEST_HOST}`, - ` --description '${mockDescription}'`, ]); - expect(step1.find('[data-testid="runner-token"]').exists()).toBe(false); + expect(step1.findByTestId('runner-token').exists()).toBe(false); expect(step1.findComponent(ClipboardButton).exists()).toBe(false); }); @@ -211,11 +218,10 @@ describe('RegistrationInstructions', () => { expect(step1.findComponent(CliCommand).props('command')).toEqual([ 'gitlab-runner register', ` --url ${TEST_HOST}`, - ` --registration-token ${MOCK_TOKEN}`, - ` --description '${mockDescription}'`, + ` --token ${mockAuthenticationToken}`, ]); - expect(step1.find('[data-testid="runner-token"]').text()).toBe(MOCK_TOKEN); - expect(step1.findComponent(ClipboardButton).props('text')).toBe(MOCK_TOKEN); + expect(step1.findByTestId('runner-token').text()).toBe(mockAuthenticationToken); + expect(step1.findComponent(ClipboardButton).props('text')).toBe(mockAuthenticationToken); }); it('when runner is not available (e.g. deleted), the UI does not update', async () => { @@ -226,11 +232,10 @@ describe('RegistrationInstructions', () => { expect(step1.findComponent(CliCommand).props('command')).toEqual([ 'gitlab-runner register', ` --url ${TEST_HOST}`, - ` --registration-token ${MOCK_TOKEN}`, - ` --description '${mockDescription}'`, + ` --token ${mockAuthenticationToken}`, ]); - expect(step1.find('[data-testid="runner-token"]').text()).toBe(MOCK_TOKEN); - expect(step1.findComponent(ClipboardButton).props('text')).toBe(MOCK_TOKEN); + expect(step1.findByTestId('runner-token').text()).toBe(mockAuthenticationToken); + expect(step1.findComponent(ClipboardButton).props('text')).toBe(mockAuthenticationToken); }); }); }); @@ -273,6 +278,20 @@ describe('RegistrationInstructions', () => { it('does not show success message', () => { expect(wrapper.text()).not.toContain(I18N_REGISTRATION_SUCCESS); }); + + describe('when the page is closing', () => { + it('warns the user against closing', () => { + const { event, preventDefault, returnValueSetter } = mockBeforeunload(); + + expect(preventDefault).not.toHaveBeenCalled(); + expect(returnValueSetter).not.toHaveBeenCalled(); + + window.dispatchEvent(event); + + expect(preventDefault).toHaveBeenCalledWith(); + expect(returnValueSetter).toHaveBeenCalledWith(expect.any(String)); + }); + }); }); describe('when the runner has been registered', () => { @@ -288,6 +307,20 @@ describe('RegistrationInstructions', () => { expect(wrapper.text()).toContain('🎉'); expect(wrapper.text()).toContain(I18N_REGISTRATION_SUCCESS); }); + + describe('when the page is closing', () => { + it('does not warn the user against closing', () => { + const { event, preventDefault, returnValueSetter } = mockBeforeunload(); + + expect(preventDefault).not.toHaveBeenCalled(); + expect(returnValueSetter).not.toHaveBeenCalled(); + + window.dispatchEvent(event); + + expect(preventDefault).not.toHaveBeenCalled(); + expect(returnValueSetter).not.toHaveBeenCalled(); + }); + }); }); }); }); diff --git a/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js b/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js index ff69fd6d3d6..bfdde922e17 100644 --- a/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js +++ b/spec/frontend/ci/runner/components/registration/registration_token_reset_dropdown_item_spec.js @@ -18,7 +18,7 @@ jest.mock('~/ci/runner/sentry_utils'); Vue.use(VueApollo); Vue.use(GlToast); -const mockNewToken = 'NEW_TOKEN'; +const mockNewRegistrationToken = 'MOCK_NEW_REGISTRATION_TOKEN'; const modalID = 'token-reset-modal'; describe('RegistrationTokenResetDropdownItem', () => { @@ -54,7 +54,7 @@ describe('RegistrationTokenResetDropdownItem', () => { runnersRegistrationTokenResetMutationHandler = jest.fn().mockResolvedValue({ data: { runnersRegistrationTokenReset: { - token: mockNewToken, + token: mockNewRegistrationToken, errors: [], }, }, @@ -109,7 +109,7 @@ describe('RegistrationTokenResetDropdownItem', () => { it('emits result', () => { expect(wrapper.emitted('tokenReset')).toHaveLength(1); - expect(wrapper.emitted('tokenReset')[0]).toEqual([mockNewToken]); + expect(wrapper.emitted('tokenReset')[0]).toEqual([mockNewRegistrationToken]); }); it('does not show a loading state', () => { diff --git a/spec/frontend/ci/runner/components/registration/registration_token_spec.js b/spec/frontend/ci/runner/components/registration/registration_token_spec.js index 4f44e6e10b2..fc659f7974f 100644 --- a/spec/frontend/ci/runner/components/registration/registration_token_spec.js +++ b/spec/frontend/ci/runner/components/registration/registration_token_spec.js @@ -3,9 +3,7 @@ import Vue from 'vue'; import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper'; import RegistrationToken from '~/ci/runner/components/registration/registration_token.vue'; import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue'; - -const mockToken = '01234567890'; -const mockMasked = '***********'; +import { mockRegistrationToken } from '../../mock_data'; describe('RegistrationToken', () => { let wrapper; @@ -18,7 +16,7 @@ describe('RegistrationToken', () => { const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => { wrapper = mountFn(RegistrationToken, { propsData: { - value: mockToken, + value: mockRegistrationToken, inputId: 'token-value', ...props, }, @@ -30,7 +28,7 @@ describe('RegistrationToken', () => { it('Displays value and copy button', () => { createComponent(); - expect(findInputCopyToggleVisibility().props('value')).toBe(mockToken); + expect(findInputCopyToggleVisibility().props('value')).toBe(mockRegistrationToken); expect(findInputCopyToggleVisibility().props('copyButtonTitle')).toBe( 'Copy registration token', ); @@ -38,9 +36,17 @@ describe('RegistrationToken', () => { // Component integration test to ensure secure masking it('Displays masked value by default', () => { - createComponent({ mountFn: mountExtended }); + const mockToken = '0123456789'; + const maskToken = '**********'; + + createComponent({ + props: { + value: mockToken, + }, + mountFn: mountExtended, + }); - expect(wrapper.find('input').element.value).toBe(mockMasked); + expect(wrapper.find('input').element.value).toBe(maskToken); }); describe('When the copy to clipboard button is clicked', () => { diff --git a/spec/frontend/ci/runner/components/registration/utils_spec.js b/spec/frontend/ci/runner/components/registration/utils_spec.js index acf5993b15b..997cc5769ee 100644 --- a/spec/frontend/ci/runner/components/registration/utils_spec.js +++ b/spec/frontend/ci/runner/components/registration/utils_spec.js @@ -14,8 +14,7 @@ import { platformArchitectures, } from '~/ci/runner/components/registration/utils'; -const REGISTRATION_TOKEN = 'REGISTRATION_TOKEN'; -const DESCRIPTION = 'RUNNER'; +import { mockAuthenticationToken } from '../../mock_data'; describe('registration utils', () => { beforeEach(() => { @@ -33,8 +32,7 @@ describe('registration utils', () => { expect( registerCommand({ platform, - registrationToken: REGISTRATION_TOKEN, - description: DESCRIPTION, + token: mockAuthenticationToken, }), ).toMatchSnapshot(); @@ -47,26 +45,6 @@ describe('registration utils', () => { }, ); - describe.each([LINUX_PLATFORM, MACOS_PLATFORM])('for "%s" platform', (platform) => { - it.each` - description | parameter - ${'my runner'} | ${"'my runner'"} - ${"bob's runner"} | ${"'bob'\\''s runner'"} - `('registerCommand escapes description `$description`', ({ description, parameter }) => { - expect(registerCommand({ platform, description })[2]).toBe(` --description ${parameter}`); - }); - }); - - describe.each([WINDOWS_PLATFORM])('for "%s" platform', (platform) => { - it.each` - description | parameter - ${'my runner'} | ${"'my runner'"} - ${"bob's runner"} | ${"'bob''s runner'"} - `('registerCommand escapes description `$description`', ({ description, parameter }) => { - expect(registerCommand({ platform, description })[2]).toBe(` --description ${parameter}`); - }); - }); - describe('for missing platform', () => { it('commandPrompt uses the default', () => { const expected = commandPrompt({ platform: DEFAULT_PLATFORM }); @@ -78,15 +56,13 @@ describe('registration utils', () => { it('registerCommand uses the default', () => { const expected = registerCommand({ platform: DEFAULT_PLATFORM, - registrationToken: REGISTRATION_TOKEN, + token: mockAuthenticationToken, }); - expect(registerCommand({ platform: null, registrationToken: REGISTRATION_TOKEN })).toEqual( + expect(registerCommand({ platform: null, token: mockAuthenticationToken })).toEqual(expected); + expect(registerCommand({ platform: undefined, token: mockAuthenticationToken })).toEqual( expected, ); - expect( - registerCommand({ platform: undefined, registrationToken: REGISTRATION_TOKEN }), - ).toEqual(expected); }); it('runCommand uses the default', () => { diff --git a/spec/frontend/ci/runner/components/runner_create_form_spec.js b/spec/frontend/ci/runner/components/runner_create_form_spec.js index 1123a026a4d..a13a19db067 100644 --- a/spec/frontend/ci/runner/components/runner_create_form_spec.js +++ b/spec/frontend/ci/runner/components/runner_create_form_spec.js @@ -6,7 +6,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue'; import RunnerFormFields from '~/ci/runner/components/runner_form_fields.vue'; -import { DEFAULT_ACCESS_LEVEL } from '~/ci/runner/constants'; +import { DEFAULT_ACCESS_LEVEL, INSTANCE_TYPE, GROUP_TYPE } from '~/ci/runner/constants'; import runnerCreateMutation from '~/ci/runner/graphql/new/runner_create.mutation.graphql'; import { captureException } from '~/ci/runner/sentry_utils'; import { runnerCreateResult } from '../mock_data'; @@ -35,30 +35,42 @@ describe('RunnerCreateForm', () => { const findRunnerFormFields = () => wrapper.findComponent(RunnerFormFields); const findSubmitBtn = () => wrapper.find('[type="submit"]'); - const createComponent = () => { + const createComponent = ({ props } = {}) => { wrapper = shallowMountExtended(RunnerCreateForm, { + propsData: { + runnerType: INSTANCE_TYPE, + ...props, + }, apolloProvider: createMockApollo([[runnerCreateMutation, runnerCreateHandler]]), }); }; beforeEach(() => { runnerCreateHandler = jest.fn().mockResolvedValue(runnerCreateResult); - - createComponent(); }); it('shows default runner values', () => { + createComponent(); + expect(findRunnerFormFields().props('value')).toEqual(defaultRunnerModel); }); it('shows a submit button', () => { + createComponent(); + expect(findSubmitBtn().exists()).toBe(true); }); - describe('when user submits', () => { + describe.each` + typeName | props | scopeData + ${'an instance runner'} | ${{ runnerType: INSTANCE_TYPE }} | ${{ runnerType: INSTANCE_TYPE }} + ${'a group runner'} | ${{ runnerType: GROUP_TYPE, groupId: 'gid://gitlab/Group/72' }} | ${{ runnerType: GROUP_TYPE, groupId: 'gid://gitlab/Group/72' }} + `('when user submits $typeName', ({ props, scopeData }) => { let preventDefault; beforeEach(() => { + createComponent({ props }); + preventDefault = jest.fn(); findRunnerFormFields().vm.$emit('input', { @@ -82,10 +94,11 @@ describe('RunnerCreateForm', () => { expect(findSubmitBtn().props('loading')).toBe(true); }); - it('saves runner', async () => { + it('saves runner', () => { expect(runnerCreateHandler).toHaveBeenCalledWith({ input: { ...defaultRunnerModel, + ...scopeData, description: 'My runner', maximumTimeout: 0, tagList: ['tag1', 'tag2'], @@ -100,7 +113,7 @@ describe('RunnerCreateForm', () => { await waitForPromises(); }); - it('emits "saved" result', async () => { + it('emits "saved" result', () => { expect(wrapper.emitted('saved')[0]).toEqual([mockCreatedRunner]); }); @@ -119,7 +132,7 @@ describe('RunnerCreateForm', () => { await waitForPromises(); }); - it('emits "error" result', async () => { + it('emits "error" result', () => { expect(wrapper.emitted('error')[0]).toEqual([error]); }); @@ -154,7 +167,7 @@ describe('RunnerCreateForm', () => { await waitForPromises(); }); - it('emits "error" results', async () => { + it('emits "error" results', () => { expect(wrapper.emitted('error')[0]).toEqual([new Error(`${errorMsg1} ${errorMsg2}`)]); }); diff --git a/spec/frontend/ci/runner/components/runner_delete_button_spec.js b/spec/frontend/ci/runner/components/runner_delete_button_spec.js index f9bea318d84..3123f2894fb 100644 --- a/spec/frontend/ci/runner/components/runner_delete_button_spec.js +++ b/spec/frontend/ci/runner/components/runner_delete_button_spec.js @@ -124,15 +124,15 @@ describe('RunnerDeleteButton', () => { }); describe('Immediately after the delete button is clicked', () => { - beforeEach(async () => { + beforeEach(() => { findModal().vm.$emit('primary'); }); - it('The button has a loading state', async () => { + it('The button has a loading state', () => { expect(findBtn().props('loading')).toBe(true); }); - it('The stale tooltip is removed', async () => { + it('The stale tooltip is removed', () => { expect(getTooltip()).toBe(''); }); }); @@ -255,15 +255,15 @@ describe('RunnerDeleteButton', () => { }); describe('Immediately after the button is clicked', () => { - beforeEach(async () => { + beforeEach(() => { findModal().vm.$emit('primary'); }); - it('The button has a loading state', async () => { + it('The button has a loading state', () => { expect(findBtn().props('loading')).toBe(true); }); - it('The stale tooltip is removed', async () => { + it('The stale tooltip is removed', () => { expect(getTooltip()).toBe(''); }); }); diff --git a/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js index ac84c7898bf..7572122a5f3 100644 --- a/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js +++ b/spec/frontend/ci/runner/components/runner_filtered_search_bar_spec.js @@ -1,5 +1,6 @@ import { GlFilteredSearch, GlDropdown, GlDropdownItem } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { assertProps } from 'helpers/assert_props'; import RunnerFilteredSearchBar from '~/ci/runner/components/runner_filtered_search_bar.vue'; import { statusTokenConfig } from '~/ci/runner/components/search_tokens/status_token_config'; import TagToken from '~/ci/runner/components/search_tokens/tag_token.vue'; @@ -43,12 +44,12 @@ describe('RunnerList', () => { expect(inputs[inputs.length - 1][0]).toEqual(value); }; + const defaultProps = { namespace: 'runners', tokens: [], value: mockSearch }; + const createComponent = ({ props = {}, options = {} } = {}) => { wrapper = shallowMountExtended(RunnerFilteredSearchBar, { propsData: { - namespace: 'runners', - tokens: [], - value: mockSearch, + ...defaultProps, ...props, }, stubs: { @@ -109,11 +110,14 @@ describe('RunnerList', () => { it('fails validation for v-model with the wrong shape', () => { expect(() => { - createComponent({ props: { value: { filters: 'wrong_filters', sort: 'sort' } } }); + assertProps(RunnerFilteredSearchBar, { + ...defaultProps, + value: { filters: 'wrong_filters', sort: 'sort' }, + }); }).toThrow('Invalid prop: custom validator check failed'); expect(() => { - createComponent({ props: { value: { sort: 'sort' } } }); + assertProps(RunnerFilteredSearchBar, { ...defaultProps, value: { sort: 'sort' } }); }).toThrow('Invalid prop: custom validator check failed'); }); diff --git a/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js b/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js index 3e813723b5b..e4ca84853c3 100644 --- a/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js +++ b/spec/frontend/ci/runner/components/runner_list_empty_state_spec.js @@ -5,6 +5,7 @@ import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue'; import { + mockRegistrationToken, newRunnerPath, emptyStateSvgPath, emptyStateFilteredSvgPath, @@ -12,8 +13,6 @@ import { import RunnerListEmptyState from '~/ci/runner/components/runner_list_empty_state.vue'; -const mockRegistrationToken = 'REGISTRATION_TOKEN'; - describe('RunnerListEmptyState', () => { let wrapper; diff --git a/spec/frontend/ci/runner/components/runner_list_spec.js b/spec/frontend/ci/runner/components/runner_list_spec.js index 6f4913dca3e..0f4ec717c3e 100644 --- a/spec/frontend/ci/runner/components/runner_list_spec.js +++ b/spec/frontend/ci/runner/components/runner_list_spec.js @@ -164,7 +164,7 @@ describe('RunnerList', () => { }); }); - it('Emits a deleted event', async () => { + it('Emits a deleted event', () => { const event = { message: 'Deleted!' }; findRunnerBulkDelete().vm.$emit('deleted', event); diff --git a/spec/frontend/ci/runner/components/runner_pause_button_spec.js b/spec/frontend/ci/runner/components/runner_pause_button_spec.js index 62e6cc902b7..350d029f3fc 100644 --- a/spec/frontend/ci/runner/components/runner_pause_button_spec.js +++ b/spec/frontend/ci/runner/components/runner_pause_button_spec.js @@ -134,7 +134,7 @@ describe('RunnerPauseButton', () => { await clickAndWait(); }); - it(`The mutation to that sets active to ${newActiveValue} is called`, async () => { + it(`The mutation to that sets active to ${newActiveValue} is called`, () => { expect(runnerToggleActiveHandler).toHaveBeenCalledTimes(1); expect(runnerToggleActiveHandler).toHaveBeenCalledWith({ input: { diff --git a/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js b/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js index db6fd2c369b..d419b34df1b 100644 --- a/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js +++ b/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js @@ -84,7 +84,7 @@ describe('RunnerPlatformsRadioGroup', () => { text | href ${'Docker'} | ${DOCKER_HELP_URL} ${'Kubernetes'} | ${KUBERNETES_HELP_URL} - `('provides link to "$text" docs', async ({ text, href }) => { + `('provides link to "$text" docs', ({ text, href }) => { const radio = findFormRadioByText(text); expect(radio.findComponent(GlLink).attributes()).toEqual({ diff --git a/spec/frontend/ci/runner/components/runner_platforms_radio_spec.js b/spec/frontend/ci/runner/components/runner_platforms_radio_spec.js index fb81edd1ae2..340b04637f8 100644 --- a/spec/frontend/ci/runner/components/runner_platforms_radio_spec.js +++ b/spec/frontend/ci/runner/components/runner_platforms_radio_spec.js @@ -41,7 +41,7 @@ describe('RunnerPlatformsRadio', () => { expect(findFormRadio().attributes('value')).toBe(mockValue); }); - it('emits when item is clicked', async () => { + it('emits when item is clicked', () => { findDiv().trigger('click'); expect(wrapper.emitted('input')).toEqual([[mockValue]]); @@ -94,7 +94,7 @@ describe('RunnerPlatformsRadio', () => { expect(wrapper.classes('gl-cursor-pointer')).toBe(false); }); - it('does not emit when item is clicked', async () => { + it('does not emit when item is clicked', () => { findDiv().trigger('click'); expect(wrapper.emitted('input')).toBe(undefined); diff --git a/spec/frontend/ci/runner/components/runner_projects_spec.js b/spec/frontend/ci/runner/components/runner_projects_spec.js index ccc1bc18675..afdc54d8ebc 100644 --- a/spec/frontend/ci/runner/components/runner_projects_spec.js +++ b/spec/frontend/ci/runner/components/runner_projects_spec.js @@ -89,7 +89,7 @@ describe('RunnerProjects', () => { await waitForPromises(); }); - it('Shows a heading', async () => { + it('Shows a heading', () => { const expected = sprintf(I18N_ASSIGNED_PROJECTS, { projectCount: mockProjects.length }); expect(findHeading().text()).toBe(expected); diff --git a/spec/frontend/ci/runner/components/runner_type_badge_spec.js b/spec/frontend/ci/runner/components/runner_type_badge_spec.js index 7a0fb6f69ea..f7ecd108967 100644 --- a/spec/frontend/ci/runner/components/runner_type_badge_spec.js +++ b/spec/frontend/ci/runner/components/runner_type_badge_spec.js @@ -2,6 +2,7 @@ import { GlBadge } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import RunnerTypeBadge from '~/ci/runner/components/runner_type_badge.vue'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; +import { assertProps } from 'helpers/assert_props'; import { INSTANCE_TYPE, GROUP_TYPE, @@ -50,7 +51,7 @@ describe('RunnerTypeBadge', () => { it('validation fails for an incorrect type', () => { expect(() => { - createComponent({ props: { type: 'AN_UNKNOWN_VALUE' } }); + assertProps(RunnerTypeBadge, { type: 'AN_UNKNOWN_VALUE' }); }).toThrow(); }); diff --git a/spec/frontend/ci/runner/components/runner_type_tabs_spec.js b/spec/frontend/ci/runner/components/runner_type_tabs_spec.js index 6e15c84ad7e..71dcc5b4226 100644 --- a/spec/frontend/ci/runner/components/runner_type_tabs_spec.js +++ b/spec/frontend/ci/runner/components/runner_type_tabs_spec.js @@ -8,6 +8,7 @@ import { PROJECT_TYPE, DEFAULT_MEMBERSHIP, DEFAULT_SORT, + STATUS_ONLINE, } from '~/ci/runner/constants'; const mockSearch = { @@ -111,7 +112,7 @@ describe('RunnerTypeTabs', () => { it('Renders a count next to each tab', () => { const mockVariables = { paused: true, - status: 'ONLINE', + status: STATUS_ONLINE, }; createComponent({ diff --git a/spec/frontend/ci/runner/components/stat/runner_count_spec.js b/spec/frontend/ci/runner/components/stat/runner_count_spec.js index 42d8c9a1080..df774ba3e57 100644 --- a/spec/frontend/ci/runner/components/stat/runner_count_spec.js +++ b/spec/frontend/ci/runner/components/stat/runner_count_spec.js @@ -2,7 +2,7 @@ import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import { shallowMount } from '@vue/test-utils'; import RunnerCount from '~/ci/runner/components/stat/runner_count.vue'; -import { INSTANCE_TYPE, GROUP_TYPE } from '~/ci/runner/constants'; +import { INSTANCE_TYPE, GROUP_TYPE, STATUS_ONLINE } from '~/ci/runner/constants'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { captureException } from '~/ci/runner/sentry_utils'; @@ -47,7 +47,7 @@ describe('RunnerCount', () => { }); describe('in admin scope', () => { - const mockVariables = { status: 'ONLINE' }; + const mockVariables = { status: STATUS_ONLINE }; beforeEach(async () => { await createComponent({ props: { scope: INSTANCE_TYPE } }); @@ -67,7 +67,7 @@ describe('RunnerCount', () => { expect(wrapper.html()).toBe(`${runnersCountData.data.runners.count}`); }); - it('does not fetch from the group query', async () => { + it('does not fetch from the group query', () => { expect(mockGroupRunnersCountHandler).not.toHaveBeenCalled(); }); @@ -89,7 +89,7 @@ describe('RunnerCount', () => { await createComponent({ props: { scope: INSTANCE_TYPE, skip: true } }); }); - it('does not fetch data', async () => { + it('does not fetch data', () => { expect(mockRunnersCountHandler).not.toHaveBeenCalled(); expect(mockGroupRunnersCountHandler).not.toHaveBeenCalled(); @@ -106,7 +106,7 @@ describe('RunnerCount', () => { await createComponent({ props: { scope: INSTANCE_TYPE } }); }); - it('data is not shown and error is reported', async () => { + it('data is not shown and error is reported', () => { expect(wrapper.html()).toBe(''); expect(captureException).toHaveBeenCalledWith({ @@ -121,7 +121,7 @@ describe('RunnerCount', () => { await createComponent({ props: { scope: GROUP_TYPE } }); }); - it('fetches data from the group query', async () => { + it('fetches data from the group query', () => { expect(mockGroupRunnersCountHandler).toHaveBeenCalledTimes(1); expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({}); @@ -141,7 +141,7 @@ describe('RunnerCount', () => { wrapper.vm.refetch(); }); - it('data is not shown and error is reported', async () => { + it('data is not shown and error is reported', () => { expect(mockRunnersCountHandler).toHaveBeenCalledTimes(2); }); }); diff --git a/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js new file mode 100644 index 00000000000..027196ab004 --- /dev/null +++ b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js @@ -0,0 +1,132 @@ +import { GlSprintf } from '@gitlab/ui'; +import { s__ } from '~/locale'; + +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; +import { createAlert, VARIANT_SUCCESS } from '~/alert'; + +import GroupRunnerRunnerApp from '~/ci/runner/group_new_runner/group_new_runner_app.vue'; +import { saveAlertToLocalStorage } from '~/ci/runner/local_storage_alert/save_alert_to_local_storage'; +import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue'; +import RunnerPlatformsRadioGroup from '~/ci/runner/components/runner_platforms_radio_group.vue'; +import { + PARAM_KEY_PLATFORM, + GROUP_TYPE, + DEFAULT_PLATFORM, + WINDOWS_PLATFORM, +} from '~/ci/runner/constants'; +import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue'; +import { redirectTo } from '~/lib/utils/url_utility'; +import { runnerCreateResult, mockRegistrationToken } from '../mock_data'; + +const mockGroupId = 'gid://gitlab/Group/72'; + +jest.mock('~/ci/runner/local_storage_alert/save_alert_to_local_storage'); +jest.mock('~/alert'); +jest.mock('~/lib/utils/url_utility', () => ({ + ...jest.requireActual('~/lib/utils/url_utility'), + redirectTo: jest.fn(), +})); + +const mockCreatedRunner = runnerCreateResult.data.runnerCreate.runner; + +describe('GroupRunnerRunnerApp', () => { + let wrapper; + + const findLegacyInstructionsLink = () => wrapper.findByTestId('legacy-instructions-link'); + const findRunnerInstructionsModal = () => wrapper.findComponent(RunnerInstructionsModal); + const findRunnerPlatformsRadioGroup = () => wrapper.findComponent(RunnerPlatformsRadioGroup); + const findRunnerCreateForm = () => wrapper.findComponent(RunnerCreateForm); + + const createComponent = () => { + wrapper = shallowMountExtended(GroupRunnerRunnerApp, { + propsData: { + groupId: mockGroupId, + legacyRegistrationToken: mockRegistrationToken, + }, + directives: { + GlModal: createMockDirective('gl-modal'), + }, + stubs: { + GlSprintf, + }, + }); + }; + + beforeEach(() => { + createComponent(); + }); + + describe('Shows legacy modal', () => { + it('passes legacy registration to modal', () => { + expect(findRunnerInstructionsModal().props('registrationToken')).toEqual( + mockRegistrationToken, + ); + }); + + it('opens a modal with the legacy instructions', () => { + const modalId = getBinding(findLegacyInstructionsLink().element, 'gl-modal').value; + + expect(findRunnerInstructionsModal().props('modalId')).toBe(modalId); + }); + }); + + describe('Platform', () => { + it('shows the platforms radio group', () => { + expect(findRunnerPlatformsRadioGroup().props('value')).toBe(DEFAULT_PLATFORM); + }); + }); + + describe('Runner form', () => { + it('shows the runner create form for an instance runner', () => { + expect(findRunnerCreateForm().props()).toEqual({ + runnerType: GROUP_TYPE, + groupId: mockGroupId, + }); + }); + + describe('When a runner is saved', () => { + beforeEach(() => { + findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner); + }); + + it('pushes an alert to be shown after redirection', () => { + expect(saveAlertToLocalStorage).toHaveBeenCalledWith({ + message: s__('Runners|Runner created.'), + variant: VARIANT_SUCCESS, + }); + }); + + it('redirects to the registration page', () => { + const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`; + + expect(redirectTo).toHaveBeenCalledWith(url); + }); + }); + + describe('When another platform is selected and a runner is saved', () => { + beforeEach(() => { + findRunnerPlatformsRadioGroup().vm.$emit('input', WINDOWS_PLATFORM); + findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner); + }); + + it('redirects to the registration page with the platform', () => { + const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`; + + expect(redirectTo).toHaveBeenCalledWith(url); + }); + }); + + describe('When runner fails to save', () => { + const ERROR_MSG = 'Cannot save!'; + + beforeEach(() => { + findRunnerCreateForm().vm.$emit('error', new Error(ERROR_MSG)); + }); + + it('shows an error message', () => { + expect(createAlert).toHaveBeenCalledWith({ message: ERROR_MSG }); + }); + }); + }); +}); diff --git a/spec/frontend/ci/runner/group_register_runner_app/group_register_runner_app_spec.js b/spec/frontend/ci/runner/group_register_runner_app/group_register_runner_app_spec.js new file mode 100644 index 00000000000..2f0807c700c --- /dev/null +++ b/spec/frontend/ci/runner/group_register_runner_app/group_register_runner_app_spec.js @@ -0,0 +1,120 @@ +import { nextTick } from 'vue'; +import { GlButton } from '@gitlab/ui'; + +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import setWindowLocation from 'helpers/set_window_location_helper'; +import { TEST_HOST } from 'helpers/test_constants'; + +import { updateHistory } from '~/lib/utils/url_utility'; +import { PARAM_KEY_PLATFORM, DEFAULT_PLATFORM, WINDOWS_PLATFORM } from '~/ci/runner/constants'; +import GroupRegisterRunnerApp from '~/ci/runner/group_register_runner/group_register_runner_app.vue'; +import RegistrationInstructions from '~/ci/runner/components/registration/registration_instructions.vue'; +import PlatformsDrawer from '~/ci/runner/components/registration/platforms_drawer.vue'; +import { runnerForRegistration } from '../mock_data'; + +const mockRunnerId = runnerForRegistration.data.runner.id; +const mockRunnersPath = '/groups/group1/-/runners'; + +jest.mock('~/lib/utils/url_utility', () => ({ + ...jest.requireActual('~/lib/utils/url_utility'), + updateHistory: jest.fn(), +})); + +describe('GroupRegisterRunnerApp', () => { + let wrapper; + + const findRegistrationInstructions = () => wrapper.findComponent(RegistrationInstructions); + const findPlatformsDrawer = () => wrapper.findComponent(PlatformsDrawer); + const findBtn = () => wrapper.findComponent(GlButton); + + const createComponent = () => { + wrapper = shallowMountExtended(GroupRegisterRunnerApp, { + propsData: { + runnerId: mockRunnerId, + runnersPath: mockRunnersPath, + }, + }); + }; + + describe('When showing runner details', () => { + beforeEach(() => { + createComponent(); + }); + + describe('when runner token is available', () => { + it('shows registration instructions', () => { + expect(findRegistrationInstructions().props()).toEqual({ + platform: DEFAULT_PLATFORM, + runnerId: mockRunnerId, + }); + }); + + it('configures platform drawer', () => { + expect(findPlatformsDrawer().props()).toEqual({ + open: false, + platform: DEFAULT_PLATFORM, + }); + }); + + it('shows runner list button', () => { + expect(findBtn().attributes('href')).toBe(mockRunnersPath); + expect(findBtn().props('variant')).toBe('confirm'); + }); + }); + }); + + describe('When another platform has been selected', () => { + beforeEach(() => { + setWindowLocation(`?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`); + + createComponent(); + }); + + it('shows registration instructions for the platform', () => { + expect(findRegistrationInstructions().props('platform')).toBe(WINDOWS_PLATFORM); + }); + }); + + describe('When opening install instructions', () => { + beforeEach(() => { + createComponent(); + + findRegistrationInstructions().vm.$emit('toggleDrawer'); + }); + + it('opens platform drawer', () => { + expect(findPlatformsDrawer().props('open')).toBe(true); + }); + + it('closes platform drawer', async () => { + findRegistrationInstructions().vm.$emit('toggleDrawer'); + await nextTick(); + + expect(findPlatformsDrawer().props('open')).toBe(false); + }); + + it('closes platform drawer from drawer', async () => { + findPlatformsDrawer().vm.$emit('close'); + await nextTick(); + + expect(findPlatformsDrawer().props('open')).toBe(false); + }); + + describe('when selecting a platform', () => { + beforeEach(() => { + findPlatformsDrawer().vm.$emit('selectPlatform', WINDOWS_PLATFORM); + }); + + it('updates the url', () => { + expect(updateHistory).toHaveBeenCalledTimes(1); + expect(updateHistory).toHaveBeenCalledWith({ + url: `${TEST_HOST}/?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`, + }); + }); + + it('updates the registration instructions', () => { + expect(findRegistrationInstructions().props('platform')).toBe(WINDOWS_PLATFORM); + }); + }); + }); +}); diff --git a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js index fadc6e5ebc5..60f51704c0e 100644 --- a/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js +++ b/spec/frontend/ci/runner/group_runner_show/group_runner_show_app_spec.js @@ -83,7 +83,7 @@ describe('GroupRunnerShowApp', () => { await createComponent({ mountFn: mountExtended }); }); - it('expect GraphQL ID to be requested', async () => { + it('expect GraphQL ID to be requested', () => { expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunnerGraphqlId }); }); @@ -91,7 +91,7 @@ describe('GroupRunnerShowApp', () => { expect(findRunnerHeader().text()).toContain(`Runner #${mockRunnerId}`); }); - it('displays the runner edit and pause buttons', async () => { + it('displays the runner edit and pause buttons', () => { expect(findRunnerEditButton().attributes('href')).toBe(mockEditGroupRunnerPath); expect(findRunnerPauseButton().exists()).toBe(true); expect(findRunnerDeleteButton().exists()).toBe(true); diff --git a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js index 00c7262e38b..6824242cba9 100644 --- a/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js +++ b/spec/frontend/ci/runner/group_runners/group_runners_app_spec.js @@ -58,6 +58,8 @@ import { groupRunnersCountData, onlineContactTimeoutSecs, staleTimeoutSecs, + mockRegistrationToken, + newRunnerPath, emptyPageInfo, emptyStateSvgPath, emptyStateFilteredSvgPath, @@ -67,7 +69,6 @@ Vue.use(VueApollo); Vue.use(GlToast); const mockGroupFullPath = 'group1'; -const mockRegistrationToken = 'AABBCC'; const mockGroupRunnersEdges = groupRunnersData.data.group.runners.edges; const mockGroupRunnersCount = mockGroupRunnersEdges.length; @@ -87,6 +88,7 @@ describe('GroupRunnersApp', () => { const findRunnerStats = () => wrapper.findComponent(RunnerStats); const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell); const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown); + const findNewRunnerBtn = () => wrapper.findByText(s__('Runners|New group runner')); const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs); const findRunnerList = () => wrapper.findComponent(RunnerList); const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState); @@ -114,6 +116,7 @@ describe('GroupRunnersApp', () => { propsData: { registrationToken: mockRegistrationToken, groupFullPath: mockGroupFullPath, + newRunnerPath, ...props, }, provide: { @@ -287,7 +290,7 @@ describe('GroupRunnersApp', () => { }); }); - it('When runner is paused or unpaused, some data is refetched', async () => { + it('When runner is paused or unpaused, some data is refetched', () => { expect(mockGroupRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES); findRunnerActionsCell().vm.$emit('toggledPaused'); @@ -299,7 +302,7 @@ describe('GroupRunnersApp', () => { expect(showToast).toHaveBeenCalledTimes(0); }); - it('When runner is deleted, data is refetched and a toast message is shown', async () => { + it('When runner is deleted, data is refetched and a toast message is shown', () => { findRunnerActionsCell().vm.$emit('deleted', { message: 'Runner deleted' }); expect(showToast).toHaveBeenCalledTimes(1); @@ -416,7 +419,7 @@ describe('GroupRunnersApp', () => { expect(createAlert).not.toHaveBeenCalled(); }); - it('shows an empty state', async () => { + it('shows an empty state', () => { expect(findRunnerListEmptyState().exists()).toBe(true); }); }); @@ -427,11 +430,11 @@ describe('GroupRunnersApp', () => { await createComponent(); }); - it('error is shown to the user', async () => { + it('error is shown to the user', () => { expect(createAlert).toHaveBeenCalledTimes(1); }); - it('error is reported to sentry', async () => { + it('error is reported to sentry', () => { expect(captureException).toHaveBeenCalledWith({ error: new Error('Error!'), component: 'GroupRunnersApp', @@ -468,32 +471,69 @@ describe('GroupRunnersApp', () => { }); describe('when user has permission to register group runner', () => { - beforeEach(() => { + it('shows the register group runner button', () => { createComponent({ - propsData: { + props: { registrationToken: mockRegistrationToken, - groupFullPath: mockGroupFullPath, }, }); + expect(findRegistrationDropdown().exists()).toBe(true); }); - it('shows the register group runner button', () => { - expect(findRegistrationDropdown().exists()).toBe(true); + it('when create_runner_workflow_for_namespace is enabled', () => { + createComponent({ + props: { + newRunnerPath, + }, + provide: { + glFeatures: { + createRunnerWorkflowForNamespace: true, + }, + }, + }); + + expect(findNewRunnerBtn().attributes('href')).toBe(newRunnerPath); + }); + + it('when create_runner_workflow_for_namespace is disabled', () => { + createComponent({ + props: { + newRunnerPath, + }, + provide: { + glFeatures: { + createRunnerWorkflowForNamespace: false, + }, + }, + }); + + expect(findNewRunnerBtn().exists()).toBe(false); }); }); describe('when user has no permission to register group runner', () => { - beforeEach(() => { + it('does not show the register group runner button', () => { createComponent({ - propsData: { + props: { registrationToken: null, - groupFullPath: mockGroupFullPath, }, }); + expect(findRegistrationDropdown().exists()).toBe(false); }); - it('does not show the register group runner button', () => { - expect(findRegistrationDropdown().exists()).toBe(false); + it('when create_runner_workflow_for_namespace is enabled', () => { + createComponent({ + props: { + newRunnerPath: null, + }, + provide: { + glFeatures: { + createRunnerWorkflowForNamespace: true, + }, + }, + }); + + expect(findNewRunnerBtn().exists()).toBe(false); }); }); }); diff --git a/spec/frontend/ci/runner/mock_data.js b/spec/frontend/ci/runner/mock_data.js index 092a419c1fe..196005c9882 100644 --- a/spec/frontend/ci/runner/mock_data.js +++ b/spec/frontend/ci/runner/mock_data.js @@ -1,5 +1,14 @@ // Fixtures generated by: spec/frontend/fixtures/runner.rb +// List queries +import allRunnersData from 'test_fixtures/graphql/ci/runner/list/all_runners.query.graphql.json'; +import allRunnersDataPaginated from 'test_fixtures/graphql/ci/runner/list/all_runners.query.graphql.paginated.json'; +import allRunnersWithCreatorData from 'test_fixtures/graphql/ci/runner/list/all_runners.query.graphql.with_creator.json'; +import runnersCountData from 'test_fixtures/graphql/ci/runner/list/all_runners_count.query.graphql.json'; +import groupRunnersData from 'test_fixtures/graphql/ci/runner/list/group_runners.query.graphql.json'; +import groupRunnersDataPaginated from 'test_fixtures/graphql/ci/runner/list/group_runners.query.graphql.paginated.json'; +import groupRunnersCountData from 'test_fixtures/graphql/ci/runner/list/group_runners_count.query.graphql.json'; + // Register runner queries import runnerForRegistration from 'test_fixtures/graphql/ci/runner/register/runner_for_registration.query.graphql.json'; @@ -14,16 +23,15 @@ import runnerJobsData from 'test_fixtures/graphql/ci/runner/show/runner_jobs.que import runnerFormData from 'test_fixtures/graphql/ci/runner/edit/runner_form.query.graphql.json'; // New runner queries - -// List queries -import allRunnersData from 'test_fixtures/graphql/ci/runner/list/all_runners.query.graphql.json'; -import allRunnersDataPaginated from 'test_fixtures/graphql/ci/runner/list/all_runners.query.graphql.paginated.json'; -import runnersCountData from 'test_fixtures/graphql/ci/runner/list/all_runners_count.query.graphql.json'; -import groupRunnersData from 'test_fixtures/graphql/ci/runner/list/group_runners.query.graphql.json'; -import groupRunnersDataPaginated from 'test_fixtures/graphql/ci/runner/list/group_runners.query.graphql.paginated.json'; -import groupRunnersCountData from 'test_fixtures/graphql/ci/runner/list/group_runners_count.query.graphql.json'; - -import { DEFAULT_MEMBERSHIP, RUNNER_PAGE_SIZE } from '~/ci/runner/constants'; +import { + DEFAULT_MEMBERSHIP, + INSTANCE_TYPE, + CREATED_DESC, + CREATED_ASC, + STATUS_ONLINE, + STATUS_STALE, + RUNNER_PAGE_SIZE, +} from '~/ci/runner/constants'; import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants'; const emptyPageInfo = { @@ -46,29 +54,29 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, isDefault: true, }, { name: 'a single status', - urlQuery: '?status[]=ACTIVE', + urlQuery: '?status[]=ONLINE', search: { runnerType: null, membership: DEFAULT_MEMBERSHIP, - filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }], + filters: [{ type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, - status: 'ACTIVE', - sort: 'CREATED_DESC', + status: STATUS_ONLINE, + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, @@ -85,12 +93,12 @@ export const mockSearchExamples = [ }, ], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, search: 'something', - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, @@ -111,12 +119,12 @@ export const mockSearchExamples = [ }, ], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, search: 'something else', - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, @@ -124,54 +132,54 @@ export const mockSearchExamples = [ name: 'single instance type', urlQuery: '?runner_type[]=INSTANCE_TYPE', search: { - runnerType: 'INSTANCE_TYPE', + runnerType: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, filters: [], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { - type: 'INSTANCE_TYPE', + type: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, { name: 'multiple runner status', - urlQuery: '?status[]=ACTIVE&status[]=PAUSED', + urlQuery: '?status[]=ONLINE&status[]=STALE', search: { runnerType: null, membership: DEFAULT_MEMBERSHIP, filters: [ - { type: 'status', value: { data: 'ACTIVE', operator: '=' } }, - { type: 'status', value: { data: 'PAUSED', operator: '=' } }, + { type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }, + { type: 'status', value: { data: STATUS_STALE, operator: '=' } }, ], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { - status: 'ACTIVE', + status: STATUS_ONLINE, membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, { name: 'multiple status, a single instance type and a non default sort', - urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC', + urlQuery: '?status[]=ONLINE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC', search: { - runnerType: 'INSTANCE_TYPE', + runnerType: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, - filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }], + filters: [{ type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }], pagination: {}, - sort: 'CREATED_ASC', + sort: CREATED_ASC, }, graphqlVariables: { - status: 'ACTIVE', - type: 'INSTANCE_TYPE', + status: STATUS_ONLINE, + type: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_ASC', + sort: CREATED_ASC, first: RUNNER_PAGE_SIZE, }, }, @@ -183,13 +191,13 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, tagList: ['tag-1'], - first: 20, - sort: 'CREATED_DESC', + first: RUNNER_PAGE_SIZE, + sort: CREATED_DESC, }, }, { @@ -203,13 +211,13 @@ export const mockSearchExamples = [ { type: 'tag', value: { data: 'tag-2', operator: '=' } }, ], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, tagList: ['tag-1', 'tag-2'], - first: 20, - sort: 'CREATED_DESC', + first: RUNNER_PAGE_SIZE, + sort: CREATED_DESC, }, }, { @@ -220,11 +228,11 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [], pagination: { after: 'AFTER_CURSOR' }, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE, }, @@ -237,11 +245,11 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [], pagination: { before: 'BEFORE_CURSOR' }, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE, }, @@ -249,24 +257,24 @@ export const mockSearchExamples = [ { name: 'the next page filtered by a status, an instance type, tags and a non default sort', urlQuery: - '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&after=AFTER_CURSOR', + '?status[]=ONLINE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&after=AFTER_CURSOR', search: { - runnerType: 'INSTANCE_TYPE', + runnerType: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, filters: [ - { type: 'status', value: { data: 'ACTIVE', operator: '=' } }, + { type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }, { type: 'tag', value: { data: 'tag-1', operator: '=' } }, { type: 'tag', value: { data: 'tag-2', operator: '=' } }, ], pagination: { after: 'AFTER_CURSOR' }, - sort: 'CREATED_ASC', + sort: CREATED_ASC, }, graphqlVariables: { - status: 'ACTIVE', - type: 'INSTANCE_TYPE', + status: STATUS_ONLINE, + type: INSTANCE_TYPE, membership: DEFAULT_MEMBERSHIP, tagList: ['tag-1', 'tag-2'], - sort: 'CREATED_ASC', + sort: CREATED_ASC, after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE, }, @@ -279,12 +287,12 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { paused: true, membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, @@ -296,12 +304,12 @@ export const mockSearchExamples = [ membership: DEFAULT_MEMBERSHIP, filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }], pagination: {}, - sort: 'CREATED_DESC', + sort: CREATED_DESC, }, graphqlVariables: { paused: false, membership: DEFAULT_MEMBERSHIP, - sort: 'CREATED_DESC', + sort: CREATED_DESC, first: RUNNER_PAGE_SIZE, }, }, @@ -310,12 +318,16 @@ export const mockSearchExamples = [ export const onlineContactTimeoutSecs = 2 * 60 * 60; export const staleTimeoutSecs = 7889238; // Ruby's `3.months` +export const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN'; +export const mockAuthenticationToken = 'MOCK_AUTHENTICATION_TOKEN'; + export const newRunnerPath = '/runners/new'; export const emptyStateSvgPath = 'emptyStateSvgPath.svg'; export const emptyStateFilteredSvgPath = 'emptyStateFilteredSvgPath.svg'; export { allRunnersData, + allRunnersWithCreatorData, allRunnersDataPaginated, runnersCountData, groupRunnersData, diff --git a/spec/frontend/ci/runner/runner_search_utils_spec.js b/spec/frontend/ci/runner/runner_search_utils_spec.js index f64b89d47fd..9a4a6139198 100644 --- a/spec/frontend/ci/runner/runner_search_utils_spec.js +++ b/spec/frontend/ci/runner/runner_search_utils_spec.js @@ -7,6 +7,7 @@ import { isSearchFiltered, } from 'ee_else_ce/ci/runner/runner_search_utils'; import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants'; +import { DEFAULT_SORT } from '~/ci/runner/constants'; import { mockSearchExamples } from './mock_data'; describe('search_params.js', () => { @@ -68,7 +69,7 @@ describe('search_params.js', () => { 'http://test.host/?paused[]=true', 'http://test.host/?search=my_text', ])('When a filter is removed, it is removed from the URL', (initialUrl) => { - const search = { filters: [], sort: 'CREATED_DESC' }; + const search = { filters: [], sort: DEFAULT_SORT }; const expectedUrl = `http://test.host/`; expect(fromSearchToUrl(search, initialUrl)).toBe(expectedUrl); @@ -76,7 +77,7 @@ describe('search_params.js', () => { it('When unrelated search parameter is present, it does not get removed', () => { const initialUrl = `http://test.host/?unrelated=UNRELATED&status[]=ACTIVE`; - const search = { filters: [], sort: 'CREATED_DESC' }; + const search = { filters: [], sort: DEFAULT_SORT }; const expectedUrl = `http://test.host/?unrelated=UNRELATED`; expect(fromSearchToUrl(search, initialUrl)).toBe(expectedUrl); diff --git a/spec/frontend/ci/runner/sentry_utils_spec.js b/spec/frontend/ci/runner/sentry_utils_spec.js index f7b689272ce..2f17cc43ac5 100644 --- a/spec/frontend/ci/runner/sentry_utils_spec.js +++ b/spec/frontend/ci/runner/sentry_utils_spec.js @@ -6,7 +6,7 @@ jest.mock('@sentry/browser'); describe('~/ci/runner/sentry_utils', () => { let mockSetTag; - beforeEach(async () => { + beforeEach(() => { mockSetTag = jest.fn(); Sentry.withScope.mockImplementation((fn) => { diff --git a/spec/frontend/clusters/agents/components/create_token_modal_spec.js b/spec/frontend/clusters/agents/components/create_token_modal_spec.js index ff698952c6b..42e6a70ee26 100644 --- a/spec/frontend/clusters/agents/components/create_token_modal_spec.js +++ b/spec/frontend/clusters/agents/components/create_token_modal_spec.js @@ -213,7 +213,7 @@ describe('CreateTokenModal', () => { await mockCreatedResponse(createAgentTokenErrorResponse); }); - it('displays the error message', async () => { + it('displays the error message', () => { expect(findAlert().text()).toBe( createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0], ); diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js index a2ec19c5b4a..d657566713f 100644 --- a/spec/frontend/clusters/clusters_bundle_spec.js +++ b/spec/frontend/clusters/clusters_bundle_spec.js @@ -1,5 +1,6 @@ import MockAdapter from 'axios-mock-adapter'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlShowCluster from 'test_fixtures/clusters/show_cluster.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; import Clusters from '~/clusters/clusters_bundle'; import axios from '~/lib/utils/axios_utils'; @@ -24,7 +25,7 @@ describe('Clusters', () => { }; beforeEach(() => { - loadHTMLFixture('clusters/show_cluster.html'); + setHTMLFixture(htmlShowCluster); mockGetClusterStatusRequest(); diff --git a/spec/frontend/clusters_list/components/agent_table_spec.js b/spec/frontend/clusters_list/components/agent_table_spec.js index 9cbb83eedd2..0f68a69458e 100644 --- a/spec/frontend/clusters_list/components/agent_table_spec.js +++ b/spec/frontend/clusters_list/components/agent_table_spec.js @@ -13,9 +13,9 @@ const defaultConfigHelpUrl = const provideData = { gitlabVersion: '14.8', - kasVersion: '14.8', + kasVersion: '14.8.0', }; -const propsData = { +const defaultProps = { agents: clusterAgents, }; @@ -26,9 +26,6 @@ const DeleteAgentButtonStub = stubComponent(DeleteAgentButton, { const outdatedTitle = I18N_AGENT_TABLE.versionOutdatedTitle; const mismatchTitle = I18N_AGENT_TABLE.versionMismatchTitle; const mismatchOutdatedTitle = I18N_AGENT_TABLE.versionMismatchOutdatedTitle; -const outdatedText = sprintf(I18N_AGENT_TABLE.versionOutdatedText, { - version: provideData.kasVersion, -}); const mismatchText = I18N_AGENT_TABLE.versionMismatchText; describe('AgentTable', () => { @@ -39,127 +36,150 @@ describe('AgentTable', () => { const findStatusIcon = (at) => findStatusText(at).findComponent(GlIcon); const findLastContactText = (at) => wrapper.findAllByTestId('cluster-agent-last-contact').at(at); const findVersionText = (at) => wrapper.findAllByTestId('cluster-agent-version').at(at); + const findAgentId = (at) => wrapper.findAllByTestId('cluster-agent-id').at(at); const findConfiguration = (at) => wrapper.findAllByTestId('cluster-agent-configuration-link').at(at); const findDeleteAgentButton = () => wrapper.findAllComponents(DeleteAgentButton); - beforeEach(() => { + const createWrapper = ({ provide = provideData, propsData = defaultProps } = {}) => { wrapper = mountExtended(AgentTable, { propsData, - provide: provideData, + provide, stubs: { DeleteAgentButton: DeleteAgentButtonStub, }, }); - }); - - afterEach(() => { - if (wrapper) { - wrapper.destroy(); - } - }); + }; describe('agent table', () => { - it.each` - agentName | link | lineNumber - ${'agent-1'} | ${'/agent-1'} | ${0} - ${'agent-2'} | ${'/agent-2'} | ${1} - `('displays agent link for $agentName', ({ agentName, link, lineNumber }) => { - expect(findAgentLink(lineNumber).text()).toBe(agentName); - expect(findAgentLink(lineNumber).attributes('href')).toBe(link); + describe('default', () => { + beforeEach(() => { + createWrapper(); + }); + + it.each` + agentName | link | lineNumber + ${'agent-1'} | ${'/agent-1'} | ${0} + ${'agent-2'} | ${'/agent-2'} | ${1} + `('displays agent link for $agentName', ({ agentName, link, lineNumber }) => { + expect(findAgentLink(lineNumber).text()).toBe(agentName); + expect(findAgentLink(lineNumber).attributes('href')).toBe(link); + }); + + it.each` + agentGraphQLId | agentId | lineNumber + ${'gid://gitlab/Clusters::Agent/1'} | ${'1'} | ${0} + ${'gid://gitlab/Clusters::Agent/2'} | ${'2'} | ${1} + `( + 'displays agent id as "$agentId" for "$agentGraphQLId" at line $lineNumber', + ({ agentId, lineNumber }) => { + expect(findAgentId(lineNumber).text()).toBe(agentId); + }, + ); + + it.each` + status | iconName | lineNumber + ${'Never connected'} | ${'status-neutral'} | ${0} + ${'Connected'} | ${'status-success'} | ${1} + ${'Not connected'} | ${'status-alert'} | ${2} + `( + 'displays agent connection status as "$status" at line $lineNumber', + ({ status, iconName, lineNumber }) => { + expect(findStatusText(lineNumber).text()).toBe(status); + expect(findStatusIcon(lineNumber).props('name')).toBe(iconName); + }, + ); + + it.each` + lastContact | lineNumber + ${'Never'} | ${0} + ${timeagoMixin.methods.timeFormatted(connectedTimeNow)} | ${1} + ${timeagoMixin.methods.timeFormatted(connectedTimeInactive)} | ${2} + `( + 'displays agent last contact time as "$lastContact" at line $lineNumber', + ({ lastContact, lineNumber }) => { + expect(findLastContactText(lineNumber).text()).toBe(lastContact); + }, + ); + + it.each` + agentConfig | link | lineNumber + ${'.gitlab/agents/agent-1'} | ${'/agent/full/path'} | ${0} + ${'Default configuration'} | ${defaultConfigHelpUrl} | ${1} + `( + 'displays config file path as "$agentPath" at line $lineNumber', + ({ agentConfig, link, lineNumber }) => { + const findLink = findConfiguration(lineNumber).findComponent(GlLink); + + expect(findLink.attributes('href')).toBe(link); + expect(findConfiguration(lineNumber).text()).toBe(agentConfig); + }, + ); + + it('displays actions menu for each agent', () => { + expect(findDeleteAgentButton()).toHaveLength(clusterAgents.length); + }); }); - it.each` - status | iconName | lineNumber - ${'Never connected'} | ${'status-neutral'} | ${0} - ${'Connected'} | ${'status-success'} | ${1} - ${'Not connected'} | ${'status-alert'} | ${2} + describe.each` + agentMockIdx | agentVersion | kasVersion | versionMismatch | versionOutdated | title + ${0} | ${''} | ${'14.8.0'} | ${false} | ${false} | ${''} + ${1} | ${'14.8.0'} | ${'14.8.0'} | ${false} | ${false} | ${''} + ${2} | ${'14.6.0'} | ${'14.8.0'} | ${false} | ${true} | ${outdatedTitle} + ${3} | ${'14.7.0'} | ${'14.8.0'} | ${true} | ${false} | ${mismatchTitle} + ${4} | ${'14.3.0'} | ${'14.8.0'} | ${true} | ${true} | ${mismatchOutdatedTitle} + ${5} | ${'14.6.0'} | ${'14.8.0-rc1'} | ${false} | ${false} | ${''} + ${6} | ${'14.8.0'} | ${'15.0.0'} | ${false} | ${true} | ${outdatedTitle} + ${7} | ${'14.8.0'} | ${'15.0.0-rc1'} | ${false} | ${true} | ${outdatedTitle} + ${8} | ${'14.8.0'} | ${'14.8.10'} | ${false} | ${false} | ${''} `( - 'displays agent connection status as "$status" at line $lineNumber', - ({ status, iconName, lineNumber }) => { - expect(findStatusText(lineNumber).text()).toBe(status); - expect(findStatusIcon(lineNumber).props('name')).toBe(iconName); - }, - ); + 'when agent version is "$agentVersion", KAS version is "$kasVersion" and version mismatch is "$versionMismatch"', + ({ agentMockIdx, agentVersion, kasVersion, versionMismatch, versionOutdated, title }) => { + const currentAgent = clusterAgents[agentMockIdx]; - it.each` - lastContact | lineNumber - ${'Never'} | ${0} - ${timeagoMixin.methods.timeFormatted(connectedTimeNow)} | ${1} - ${timeagoMixin.methods.timeFormatted(connectedTimeInactive)} | ${2} - `( - 'displays agent last contact time as "$lastContact" at line $lineNumber', - ({ lastContact, lineNumber }) => { - expect(findLastContactText(lineNumber).text()).toBe(lastContact); - }, - ); + const findIcon = () => findVersionText(0).findComponent(GlIcon); + const findPopover = () => wrapper.findByTestId(`popover-${currentAgent.name}`); - describe.each` - agent | version | podsNumber | versionMismatch | versionOutdated | title | texts | lineNumber - ${'agent-1'} | ${''} | ${1} | ${false} | ${false} | ${''} | ${''} | ${0} - ${'agent-2'} | ${'14.8'} | ${2} | ${false} | ${false} | ${''} | ${''} | ${1} - ${'agent-3'} | ${'14.5'} | ${1} | ${false} | ${true} | ${outdatedTitle} | ${[outdatedText]} | ${2} - ${'agent-4'} | ${'14.7'} | ${2} | ${true} | ${false} | ${mismatchTitle} | ${[mismatchText]} | ${3} - ${'agent-5'} | ${'14.3'} | ${2} | ${true} | ${true} | ${mismatchOutdatedTitle} | ${[mismatchText, outdatedText]} | ${4} - `( - 'agent version column at line $lineNumber', - ({ - agent, - version, - podsNumber, - versionMismatch, - versionOutdated, - title, - texts, - lineNumber, - }) => { - const findIcon = () => findVersionText(lineNumber).findComponent(GlIcon); - const findPopover = () => wrapper.findByTestId(`popover-${agent}`); const versionWarning = versionMismatch || versionOutdated; + const outdatedText = sprintf(I18N_AGENT_TABLE.versionOutdatedText, { + version: kasVersion, + }); - it('shows the correct agent version', () => { - expect(findVersionText(lineNumber).text()).toBe(version); + beforeEach(() => { + createWrapper({ + provide: { gitlabVersion: '14.8', kasVersion }, + propsData: { agents: [currentAgent] }, + }); + }); + + it('shows the correct agent version text', () => { + expect(findVersionText(0).text()).toBe(agentVersion); }); if (versionWarning) { - it(`shows a warning icon when agent versions mismatch is ${versionMismatch} and outdated is ${versionOutdated} and the number of pods is ${podsNumber}`, () => { + it('shows a warning icon', () => { expect(findIcon().props('name')).toBe('warning'); }); - it(`renders correct title for the popover when agent versions mismatch is ${versionMismatch} and outdated is ${versionOutdated}`, () => { expect(findPopover().props('title')).toBe(title); }); - - it(`renders correct text for the popover when agent versions mismatch is ${versionMismatch} and outdated is ${versionOutdated}`, () => { - texts.forEach((text) => { - expect(findPopover().text()).toContain(text); + if (versionMismatch) { + it(`renders correct text for the popover when agent versions mismatch is ${versionMismatch}`, () => { + expect(findPopover().text()).toContain(mismatchText); }); - }); + } + if (versionOutdated) { + it(`renders correct text for the popover when agent versions outdated is ${versionOutdated}`, () => { + expect(findPopover().text()).toContain(outdatedText); + }); + } } else { - it(`doesn't show a warning icon with a popover when agent versions mismatch is ${versionMismatch} and outdated is ${versionOutdated} and the number of pods is ${podsNumber}`, () => { + it(`doesn't show a warning icon with a popover when agent versions mismatch is ${versionMismatch} and outdated is ${versionOutdated}`, () => { expect(findIcon().exists()).toBe(false); expect(findPopover().exists()).toBe(false); }); } }, ); - - it.each` - agentConfig | link | lineNumber - ${'.gitlab/agents/agent-1'} | ${'/agent/full/path'} | ${0} - ${'Default configuration'} | ${defaultConfigHelpUrl} | ${1} - `( - 'displays config file path as "$agentPath" at line $lineNumber', - ({ agentConfig, link, lineNumber }) => { - const findLink = findConfiguration(lineNumber).findComponent(GlLink); - - expect(findLink.attributes('href')).toBe(link); - expect(findConfiguration(lineNumber).text()).toBe(agentConfig); - }, - ); - - it('displays actions menu for each agent', () => { - expect(findDeleteAgentButton()).toHaveLength(5); - }); }); }); diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js index 3156eaaecfc..f9009696c7b 100644 --- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js +++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js @@ -256,7 +256,7 @@ describe('InstallAgentModal', () => { return mockSelectedAgentResponse(); }); - it('displays the error message', async () => { + it('displays the error message', () => { expect(findAlert().text()).toBe( createAgentTokenErrorResponse.data.clusterAgentTokenCreate.errors[0], ); diff --git a/spec/frontend/clusters_list/components/mock_data.js b/spec/frontend/clusters_list/components/mock_data.js index 3d18b22d727..af1fb496118 100644 --- a/spec/frontend/clusters_list/components/mock_data.js +++ b/spec/frontend/clusters_list/components/mock_data.js @@ -19,7 +19,7 @@ export const connectedTimeInactive = new Date(connectedTimeNow.getTime() - ACTIV export const clusterAgents = [ { name: 'agent-1', - id: 'agent-1-id', + id: 'gid://gitlab/Clusters::Agent/1', configFolder: { webPath: '/agent/full/path', }, @@ -30,17 +30,17 @@ export const clusterAgents = [ }, { name: 'agent-2', - id: 'agent-2-id', + id: 'gid://gitlab/Clusters::Agent/2', webPath: '/agent-2', status: 'active', lastContact: connectedTimeNow.getTime(), connections: { nodes: [ { - metadata: { version: 'v14.8' }, + metadata: { version: 'v14.8.0' }, }, { - metadata: { version: 'v14.8' }, + metadata: { version: 'v14.8.0' }, }, ], }, @@ -54,14 +54,14 @@ export const clusterAgents = [ }, { name: 'agent-3', - id: 'agent-3-id', + id: 'gid://gitlab/Clusters::Agent/3', webPath: '/agent-3', status: 'inactive', lastContact: connectedTimeInactive.getTime(), connections: { nodes: [ { - metadata: { version: 'v14.5' }, + metadata: { version: 'v14.6.0' }, }, ], }, @@ -75,17 +75,17 @@ export const clusterAgents = [ }, { name: 'agent-4', - id: 'agent-4-id', + id: 'gid://gitlab/Clusters::Agent/4', webPath: '/agent-4', status: 'inactive', lastContact: connectedTimeInactive.getTime(), connections: { nodes: [ { - metadata: { version: 'v14.7' }, + metadata: { version: 'v14.7.0' }, }, { - metadata: { version: 'v14.8' }, + metadata: { version: 'v14.8.0' }, }, ], }, @@ -99,17 +99,101 @@ export const clusterAgents = [ }, { name: 'agent-5', - id: 'agent-5-id', + id: 'gid://gitlab/Clusters::Agent/5', webPath: '/agent-5', status: 'inactive', lastContact: connectedTimeInactive.getTime(), connections: { nodes: [ { - metadata: { version: 'v14.5' }, + metadata: { version: 'v14.5.0' }, }, { - metadata: { version: 'v14.3' }, + metadata: { version: 'v14.3.0' }, + }, + ], + }, + tokens: { + nodes: [ + { + lastUsedAt: connectedTimeInactive, + }, + ], + }, + }, + { + name: 'agent-6', + id: 'gid://gitlab/Clusters::Agent/6', + webPath: '/agent-6', + status: 'inactive', + lastContact: connectedTimeInactive.getTime(), + connections: { + nodes: [ + { + metadata: { version: 'v14.6.0' }, + }, + ], + }, + tokens: { + nodes: [ + { + lastUsedAt: connectedTimeInactive, + }, + ], + }, + }, + { + name: 'agent-7', + id: 'gid://gitlab/Clusters::Agent/7', + webPath: '/agent-7', + status: 'inactive', + lastContact: connectedTimeInactive.getTime(), + connections: { + nodes: [ + { + metadata: { version: 'v14.8.0' }, + }, + ], + }, + tokens: { + nodes: [ + { + lastUsedAt: connectedTimeInactive, + }, + ], + }, + }, + { + name: 'agent-8', + id: 'gid://gitlab/Clusters::Agent/8', + webPath: '/agent-8', + status: 'inactive', + lastContact: connectedTimeInactive.getTime(), + connections: { + nodes: [ + { + metadata: { version: 'v14.8.0' }, + }, + ], + }, + tokens: { + nodes: [ + { + lastUsedAt: connectedTimeInactive, + }, + ], + }, + }, + { + name: 'agent-9', + id: 'gid://gitlab/Clusters::Agent/9', + webPath: '/agent-9', + status: 'inactive', + lastContact: connectedTimeInactive.getTime(), + connections: { + nodes: [ + { + metadata: { version: 'v14.8.0' }, }, ], }, diff --git a/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap new file mode 100644 index 00000000000..c979ee5a1d2 --- /dev/null +++ b/spec/frontend/comment_templates/components/__snapshots__/list_item_spec.js.snap @@ -0,0 +1,140 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Comment templates list item component renders list item 1`] = ` +
  • +
    +
    + test +
    + +
    +
    + + +
    +
    + +
      +
    • + +
    • +
    • + +
    • +
    + +
    +
    +
    + +
    + + Comment template actions + +
    +
    +
    + +
    + /assign_reviewer +
    + + +
  • +`; diff --git a/spec/frontend/comment_templates/components/form_spec.js b/spec/frontend/comment_templates/components/form_spec.js new file mode 100644 index 00000000000..053a5099c37 --- /dev/null +++ b/spec/frontend/comment_templates/components/form_spec.js @@ -0,0 +1,145 @@ +import Vue, { nextTick } from 'vue'; +import { mount } from '@vue/test-utils'; +import { GlAlert } from '@gitlab/ui'; +import VueApollo from 'vue-apollo'; +import createdSavedReplyResponse from 'test_fixtures/graphql/comment_templates/create_saved_reply.mutation.graphql.json'; +import createdSavedReplyErrorResponse from 'test_fixtures/graphql/comment_templates/create_saved_reply_with_errors.mutation.graphql.json'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import Form from '~/comment_templates/components/form.vue'; +import createSavedReplyMutation from '~/comment_templates/queries/create_saved_reply.mutation.graphql'; +import updateSavedReplyMutation from '~/comment_templates/queries/update_saved_reply.mutation.graphql'; + +let wrapper; +let createSavedReplyResponseSpy; +let updateSavedReplyResponseSpy; + +function createMockApolloProvider(response) { + Vue.use(VueApollo); + + createSavedReplyResponseSpy = jest.fn().mockResolvedValue(response); + updateSavedReplyResponseSpy = jest.fn().mockResolvedValue(response); + + const requestHandlers = [ + [createSavedReplyMutation, createSavedReplyResponseSpy], + [updateSavedReplyMutation, updateSavedReplyResponseSpy], + ]; + + return createMockApollo(requestHandlers); +} + +function createComponent(id = null, response = createdSavedReplyResponse) { + const mockApollo = createMockApolloProvider(response); + + return mount(Form, { + propsData: { + id, + }, + apolloProvider: mockApollo, + }); +} + +const findSavedReplyNameInput = () => wrapper.find('[data-testid="comment-template-name-input"]'); +const findSavedReplyNameFormGroup = () => + wrapper.find('[data-testid="comment-template-name-form-group"]'); +const findSavedReplyContentInput = () => + wrapper.find('[data-testid="comment-template-content-input"]'); +const findSavedReplyContentFormGroup = () => + wrapper.find('[data-testid="comment-template-content-form-group"]'); +const findSavedReplyFrom = () => wrapper.find('[data-testid="comment-template-form"]'); +const findAlerts = () => wrapper.findAllComponents(GlAlert); +const findSubmitBtn = () => wrapper.find('[data-testid="comment-template-form-submit-btn"]'); + +describe('Comment templates form component', () => { + describe('creates comment template', () => { + it('calls apollo mutation', async () => { + wrapper = createComponent(); + + findSavedReplyNameInput().setValue('Test'); + findSavedReplyContentInput().setValue('Test content'); + findSavedReplyFrom().trigger('submit'); + + await waitForPromises(); + + expect(createSavedReplyResponseSpy).toHaveBeenCalledWith({ + id: null, + content: 'Test content', + name: 'Test', + }); + }); + + it('does not submit when form validation fails', async () => { + wrapper = createComponent(); + + findSavedReplyFrom().trigger('submit'); + + await waitForPromises(); + + expect(createSavedReplyResponseSpy).not.toHaveBeenCalled(); + }); + + it.each` + findFormGroup | findInput | fieldName + ${findSavedReplyNameFormGroup} | ${findSavedReplyContentInput} | ${'name'} + ${findSavedReplyContentFormGroup} | ${findSavedReplyNameInput} | ${'content'} + `('shows errors for empty $fieldName input', async ({ findFormGroup, findInput }) => { + wrapper = createComponent(null, createdSavedReplyErrorResponse); + + findInput().setValue('Test'); + findSavedReplyFrom().trigger('submit'); + + await waitForPromises(); + + expect(findFormGroup().classes('is-invalid')).toBe(true); + }); + + it('displays errors when mutation fails', async () => { + wrapper = createComponent(null, createdSavedReplyErrorResponse); + + findSavedReplyNameInput().setValue('Test'); + findSavedReplyContentInput().setValue('Test content'); + findSavedReplyFrom().trigger('submit'); + + await waitForPromises(); + + const { errors } = createdSavedReplyErrorResponse; + const alertMessages = findAlerts().wrappers.map((x) => x.text()); + + expect(alertMessages).toEqual(errors.map((x) => x.message)); + }); + + it('shows loading state when saving', async () => { + wrapper = createComponent(); + + findSavedReplyNameInput().setValue('Test'); + findSavedReplyContentInput().setValue('Test content'); + findSavedReplyFrom().trigger('submit'); + + await nextTick(); + + expect(findSubmitBtn().props('loading')).toBe(true); + + await waitForPromises(); + + expect(findSubmitBtn().props('loading')).toBe(false); + }); + }); + + describe('updates saved reply', () => { + it('calls apollo mutation', async () => { + wrapper = createComponent('1'); + + findSavedReplyNameInput().setValue('Test'); + findSavedReplyContentInput().setValue('Test content'); + findSavedReplyFrom().trigger('submit'); + + await waitForPromises(); + + expect(updateSavedReplyResponseSpy).toHaveBeenCalledWith({ + id: '1', + content: 'Test content', + name: 'Test', + }); + }); + }); +}); diff --git a/spec/frontend/comment_templates/components/list_item_spec.js b/spec/frontend/comment_templates/components/list_item_spec.js new file mode 100644 index 00000000000..925d78da4ad --- /dev/null +++ b/spec/frontend/comment_templates/components/list_item_spec.js @@ -0,0 +1,154 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { mount } from '@vue/test-utils'; +import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlModal } from '@gitlab/ui'; +import { __ } from '~/locale'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import { createMockDirective } from 'helpers/vue_mock_directive'; +import ListItem from '~/comment_templates/components/list_item.vue'; +import deleteSavedReplyMutation from '~/comment_templates/queries/delete_saved_reply.mutation.graphql'; + +function createMockApolloProvider(requestHandlers = [deleteSavedReplyMutation]) { + Vue.use(VueApollo); + + return createMockApollo([requestHandlers]); +} + +describe('Comment templates list item component', () => { + let wrapper; + let $router; + + function createComponent(propsData = {}, apolloProvider = createMockApolloProvider) { + $router = { + push: jest.fn(), + }; + + return mount(ListItem, { + propsData, + directives: { + GlModal: createMockDirective('gl-modal'), + }, + apolloProvider, + mocks: { + $router, + }, + }); + } + + const findDropdown = () => wrapper.findComponent(GlDisclosureDropdown); + const findDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem); + const findModal = () => wrapper.findComponent(GlModal); + + it('renders list item', () => { + wrapper = createComponent({ template: { name: 'test', content: '/assign_reviewer' } }); + + expect(wrapper.element).toMatchSnapshot(); + }); + + describe('comment template actions dropdown', () => { + beforeEach(() => { + wrapper = createComponent({ template: { name: 'test', content: '/assign_reviewer' } }); + }); + + it('exists', () => { + expect(findDropdown().exists()).toBe(true); + }); + + it('has correct toggle text', () => { + expect(findDropdown().props('toggleText')).toBe(__('Comment template actions')); + }); + + it('has correct amount of dropdown items', () => { + const items = findDropdownItems(); + + expect(items.exists()).toBe(true); + expect(items).toHaveLength(2); + }); + + describe('edit option', () => { + it('exists', () => { + const items = findDropdownItems(); + + const editItem = items.filter((item) => item.text() === __('Edit')); + + expect(editItem.exists()).toBe(true); + }); + + it('shows as first dropdown item', () => { + const items = findDropdownItems(); + + expect(items.at(0).text()).toBe(__('Edit')); + }); + }); + + describe('delete option', () => { + it('exists', () => { + const items = findDropdownItems(); + + const deleteItem = items.filter((item) => item.text() === __('Delete')); + + expect(deleteItem.exists()).toBe(true); + }); + + it('shows as first dropdown item', () => { + const items = findDropdownItems(); + + expect(items.at(1).text()).toBe(__('Delete')); + }); + }); + }); + + describe('Delete modal', () => { + let deleteSavedReplyMutationResponse; + + beforeEach(() => { + deleteSavedReplyMutationResponse = jest + .fn() + .mockResolvedValue({ data: { savedReplyDestroy: { errors: [] } } }); + + const apolloProvider = createMockApolloProvider([ + deleteSavedReplyMutation, + deleteSavedReplyMutationResponse, + ]); + + wrapper = createComponent( + { template: { name: 'test', content: '/assign_reviewer', id: 1 } }, + apolloProvider, + ); + }); + + it('exists', () => { + expect(findModal().exists()).toBe(true); + }); + + it('has correct title', () => { + expect(findModal().props('title')).toBe(__('Delete comment template')); + }); + + it('delete button calls Apollo mutate', async () => { + await findModal().vm.$emit('primary'); + + expect(deleteSavedReplyMutationResponse).toHaveBeenCalledWith({ id: 1 }); + }); + + it('cancel button does not trigger Apollo mutation', async () => { + await findModal().vm.$emit('secondary'); + + expect(deleteSavedReplyMutationResponse).not.toHaveBeenCalled(); + }); + }); + + describe('Dropdown Edit', () => { + beforeEach(() => { + wrapper = createComponent({ template: { name: 'test', content: '/assign_reviewer' } }); + }); + + it('click triggers router push', async () => { + const editComponent = findDropdownItems().at(0); + + await editComponent.find('button').trigger('click'); + + expect($router.push).toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/comment_templates/components/list_spec.js b/spec/frontend/comment_templates/components/list_spec.js new file mode 100644 index 00000000000..8b0daf2fe2f --- /dev/null +++ b/spec/frontend/comment_templates/components/list_spec.js @@ -0,0 +1,46 @@ +import { mount } from '@vue/test-utils'; +import noSavedRepliesResponse from 'test_fixtures/graphql/comment_templates/saved_replies_empty.query.graphql.json'; +import savedRepliesResponse from 'test_fixtures/graphql/comment_templates/saved_replies.query.graphql.json'; +import List from '~/comment_templates/components/list.vue'; +import ListItem from '~/comment_templates/components/list_item.vue'; + +let wrapper; + +function createComponent(res = {}) { + const { savedReplies } = res.data.currentUser; + + return mount(List, { + propsData: { + savedReplies: savedReplies.nodes, + pageInfo: savedReplies.pageInfo, + count: savedReplies.count, + }, + }); +} + +describe('Comment templates list component', () => { + it('does not render any list items when response is empty', () => { + wrapper = createComponent(noSavedRepliesResponse); + + expect(wrapper.findAllComponents(ListItem).length).toBe(0); + }); + + it('render comment templates count', () => { + wrapper = createComponent(savedRepliesResponse); + + expect(wrapper.find('[data-testid="title"]').text()).toEqual('My comment templates (2)'); + }); + + it('renders list of comment templates', () => { + const savedReplies = savedRepliesResponse.data.currentUser.savedReplies.nodes; + wrapper = createComponent(savedRepliesResponse); + + expect(wrapper.findAllComponents(ListItem).length).toBe(2); + expect(wrapper.findAllComponents(ListItem).at(0).props('template')).toEqual( + expect.objectContaining(savedReplies[0]), + ); + expect(wrapper.findAllComponents(ListItem).at(1).props('template')).toEqual( + expect.objectContaining(savedReplies[1]), + ); + }); +}); diff --git a/spec/frontend/comment_templates/pages/index_spec.js b/spec/frontend/comment_templates/pages/index_spec.js new file mode 100644 index 00000000000..6dbec3ef4a4 --- /dev/null +++ b/spec/frontend/comment_templates/pages/index_spec.js @@ -0,0 +1,45 @@ +import Vue from 'vue'; +import { mount } from '@vue/test-utils'; +import VueApollo from 'vue-apollo'; +import savedRepliesResponse from 'test_fixtures/graphql/comment_templates/saved_replies.query.graphql.json'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import IndexPage from '~/comment_templates/pages/index.vue'; +import ListItem from '~/comment_templates/components/list_item.vue'; +import savedRepliesQuery from '~/comment_templates/queries/saved_replies.query.graphql'; + +let wrapper; + +function createMockApolloProvider(response) { + Vue.use(VueApollo); + + const requestHandlers = [[savedRepliesQuery, jest.fn().mockResolvedValue(response)]]; + + return createMockApollo(requestHandlers); +} + +function createComponent(options = {}) { + const { mockApollo } = options; + + return mount(IndexPage, { + apolloProvider: mockApollo, + }); +} + +describe('Comment templates index page component', () => { + it('renders list of comment templates', async () => { + const mockApollo = createMockApolloProvider(savedRepliesResponse); + const savedReplies = savedRepliesResponse.data.currentUser.savedReplies.nodes; + wrapper = createComponent({ mockApollo }); + + await waitForPromises(); + + expect(wrapper.findAllComponents(ListItem).length).toBe(2); + expect(wrapper.findAllComponents(ListItem).at(0).props('template')).toEqual( + expect.objectContaining(savedReplies[0]), + ); + expect(wrapper.findAllComponents(ListItem).at(1).props('template')).toEqual( + expect.objectContaining(savedReplies[1]), + ); + }); +}); diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js index 64623968aa0..cc251104811 100644 --- a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js +++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js @@ -83,7 +83,7 @@ describe('Commit box pipeline mini graph', () => { await createComponent(); }); - it('should not display loading state after the query is resolved', async () => { + it('should not display loading state after the query is resolved', () => { expect(findLoadingIcon().exists()).toBe(false); expect(findPipelineMiniGraph().exists()).toBe(true); }); diff --git a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js index 9c7a41b3506..5df35cc6dda 100644 --- a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js +++ b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js @@ -70,7 +70,7 @@ describe('Commit box pipeline status', () => { await waitForPromises(); }); - it('should display pipeline status after the query is resolved successfully', async () => { + it('should display pipeline status after the query is resolved successfully', () => { expect(findStatusIcon().exists()).toBe(true); expect(findLoadingIcon().exists()).toBe(false); diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap deleted file mode 100644 index 331a0a474a3..00000000000 --- a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap +++ /dev/null @@ -1,33 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`content_editor/components/toolbar_link_button renders dropdown component 1`] = ` -"
    -
  • -
    -
    - - -
    - -
    -
    -
  • -
  • -
    -
  • -
  • -
    " -`; diff --git a/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js index 085a6d3a28d..2a6ab75227c 100644 --- a/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js +++ b/spec/frontend/content_editor/components/bubble_menus/code_block_bubble_menu_spec.js @@ -59,7 +59,7 @@ describe('content_editor/components/bubble_menus/code_block_bubble_menu', () => checked: x.props('isChecked'), })); - beforeEach(async () => { + beforeEach(() => { buildEditor(); buildWrapper(); }); @@ -133,7 +133,7 @@ describe('content_editor/components/bubble_menus/code_block_bubble_menu', () => }); describe('preview button', () => { - it('does not appear for a regular code block', async () => { + it('does not appear for a regular code block', () => { tiptapEditor.commands.insertContent('
    var a = 2;
    '); expect(wrapper.findByTestId('preview-diagram').exists()).toBe(false); @@ -269,7 +269,7 @@ describe('content_editor/components/bubble_menus/code_block_bubble_menu', () => await emitEditorEvent({ event: 'transaction', tiptapEditor }); }); - it('hides the custom language input form and shows dropdown items', async () => { + it('hides the custom language input form and shows dropdown items', () => { expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true); expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(true); expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(false); diff --git a/spec/frontend/content_editor/components/bubble_menus/formatting_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/formatting_bubble_menu_spec.js index 7bab473529f..c4bc29adb52 100644 --- a/spec/frontend/content_editor/components/bubble_menus/formatting_bubble_menu_spec.js +++ b/spec/frontend/content_editor/components/bubble_menus/formatting_bubble_menu_spec.js @@ -53,7 +53,7 @@ describe('content_editor/components/bubble_menus/formatting_bubble_menu', () => ${'superscript'} | ${{ contentType: 'superscript', iconName: 'superscript', label: 'Superscript', editorCommand: 'toggleSuperscript' }} ${'subscript'} | ${{ contentType: 'subscript', iconName: 'subscript', label: 'Subscript', editorCommand: 'toggleSubscript' }} ${'highlight'} | ${{ contentType: 'highlight', iconName: 'highlight', label: 'Highlight', editorCommand: 'toggleHighlight' }} - ${'link'} | ${{ contentType: 'link', iconName: 'link', label: 'Insert link', editorCommand: 'toggleLink', editorCommandParams: { href: '' } }} + ${'link'} | ${{ contentType: 'link', iconName: 'link', label: 'Insert link', editorCommand: 'editLink' }} `('given a $testId toolbar control', ({ testId, controlProps }) => { beforeEach(() => { buildWrapper(); diff --git a/spec/frontend/content_editor/components/bubble_menus/link_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/link_bubble_menu_spec.js index eb5a3b61591..2a8a1b00692 100644 --- a/spec/frontend/content_editor/components/bubble_menus/link_bubble_menu_spec.js +++ b/spec/frontend/content_editor/components/bubble_menus/link_bubble_menu_spec.js @@ -59,13 +59,13 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { expect(wrapper.findByTestId('remove-link').exists()).toBe(exist); }; - beforeEach(async () => { + beforeEach(() => { buildEditor(); tiptapEditor .chain() .insertContent( - 'Download PDF File', + 'Download PDF File', ) .setTextSelection(14) // put cursor in the middle of the link .run(); @@ -84,7 +84,6 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { expect.objectContaining({ href: '/path/to/project/-/wikis/uploads/my_file.pdf', 'aria-label': 'uploads/my_file.pdf', - title: 'uploads/my_file.pdf', target: '_blank', }), ); @@ -181,52 +180,17 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { }); }); - describe('for a placeholder link', () => { - beforeEach(async () => { - tiptapEditor - .chain() - .clearContent() - .insertContent('Dummy link') - .selectAll() - .setLink({ href: '' }) - .setTextSelection(4) - .run(); - - await buildWrapperAndDisplayMenu(); - }); - - it('directly opens the edit form for a placeholder link', async () => { - expectLinkButtonsToExist(false); - - expect(wrapper.findComponent(GlForm).exists()).toBe(true); - }); - - it('removes the link on clicking apply (if no change)', async () => { - await wrapper.findComponent(GlForm).vm.$emit('submit', createFakeEvent()); - - expect(tiptapEditor.getHTML()).toBe('

    Dummy link

    '); - }); - - it('removes the link on clicking cancel', async () => { - await wrapper.findByTestId('cancel-link').vm.$emit('click'); - - expect(tiptapEditor.getHTML()).toBe('

    Dummy link

    '); - }); - }); - describe('edit button', () => { let linkHrefInput; - let linkTitleInput; beforeEach(async () => { await buildWrapperAndDisplayMenu(); await wrapper.findByTestId('edit-link').vm.$emit('click'); linkHrefInput = wrapper.findByTestId('link-href'); - linkTitleInput = wrapper.findByTestId('link-title'); }); - it('hides the link and copy/edit/remove link buttons', async () => { + it('hides the link and copy/edit/remove link buttons', () => { expectLinkButtonsToExist(false); }); @@ -234,7 +198,6 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { expect(wrapper.findComponent(GlForm).exists()).toBe(true); expect(linkHrefInput.element.value).toBe('uploads/my_file.pdf'); - expect(linkTitleInput.element.value).toBe('Click here to download'); }); it('extends selection to select the entire link', () => { @@ -247,26 +210,18 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { describe('after making changes in the form and clicking apply', () => { beforeEach(async () => { linkHrefInput.setValue('https://google.com'); - linkTitleInput.setValue('Search Google'); contentEditor.resolveUrl.mockResolvedValue('https://google.com'); await wrapper.findComponent(GlForm).vm.$emit('submit', createFakeEvent()); }); - it('updates prosemirror doc with new link', async () => { - expect(tiptapEditor.getHTML()).toBe( - '

    Download PDF File

    ', - ); - }); - it('updates the link in the bubble menu', () => { const link = wrapper.findComponent(GlLink); expect(link.attributes()).toEqual( expect.objectContaining({ href: 'https://google.com', 'aria-label': 'https://google.com', - title: 'https://google.com', target: '_blank', }), ); @@ -277,7 +232,6 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { describe('after making changes in the form and clicking cancel', () => { beforeEach(async () => { linkHrefInput.setValue('https://google.com'); - linkTitleInput.setValue('Search Google'); await wrapper.findByTestId('cancel-link').vm.$emit('click'); }); @@ -285,17 +239,6 @@ describe('content_editor/components/bubble_menus/link_bubble_menu', () => { it('hides the form and shows the copy/edit/remove link buttons', () => { expectLinkButtonsToExist(); }); - - it('resets the form with old values of the link from prosemirror', async () => { - // click edit once again to show the form back - await wrapper.findByTestId('edit-link').vm.$emit('click'); - - linkHrefInput = wrapper.findByTestId('link-href'); - linkTitleInput = wrapper.findByTestId('link-title'); - - expect(linkHrefInput.element.value).toBe('uploads/my_file.pdf'); - expect(linkTitleInput.element.value).toBe('Click here to download'); - }); }); }); }); diff --git a/spec/frontend/content_editor/components/bubble_menus/media_bubble_menu_spec.js b/spec/frontend/content_editor/components/bubble_menus/media_bubble_menu_spec.js index c918f068c07..e02b36fb8e9 100644 --- a/spec/frontend/content_editor/components/bubble_menus/media_bubble_menu_spec.js +++ b/spec/frontend/content_editor/components/bubble_menus/media_bubble_menu_spec.js @@ -100,11 +100,11 @@ describe.each` bubbleMenu = wrapper.findComponent(BubbleMenu); }); - it('renders bubble menu component', async () => { + it('renders bubble menu component', () => { expect(bubbleMenu.classes()).toEqual(['gl-shadow', 'gl-rounded-base', 'gl-bg-white']); }); - it('shows a clickable link to the image', async () => { + it('shows a clickable link to the image', () => { const link = wrapper.findComponent(GlLink); expect(link.attributes()).toEqual( expect.objectContaining({ @@ -202,7 +202,7 @@ describe.each` mediaAltInput = wrapper.findByTestId('media-alt'); }); - it('hides the link and copy/edit/remove link buttons', async () => { + it('hides the link and copy/edit/remove link buttons', () => { expectLinkButtonsToExist(false); }); @@ -225,7 +225,7 @@ describe.each` await wrapper.findComponent(GlForm).vm.$emit('submit', createFakeEvent()); }); - it(`updates prosemirror doc with new src to the ${mediaType}`, async () => { + it(`updates prosemirror doc with new src to the ${mediaType}`, () => { expect(tiptapEditor.getHTML()).toBe(mediaOutputHTML); }); diff --git a/spec/frontend/content_editor/components/content_editor_alert_spec.js b/spec/frontend/content_editor/components/content_editor_alert_spec.js index e62e2331d25..e6873e2cf96 100644 --- a/spec/frontend/content_editor/components/content_editor_alert_spec.js +++ b/spec/frontend/content_editor/components/content_editor_alert_spec.js @@ -14,7 +14,7 @@ describe('content_editor/components/content_editor_alert', () => { const findErrorAlert = () => wrapper.findComponent(GlAlert); - const createWrapper = async () => { + const createWrapper = () => { tiptapEditor = createTestEditor(); eventHub = eventHubFactory(); diff --git a/spec/frontend/content_editor/components/formatting_toolbar_spec.js b/spec/frontend/content_editor/components/formatting_toolbar_spec.js index 4a7b7cedf19..5d2a9e493e5 100644 --- a/spec/frontend/content_editor/components/formatting_toolbar_spec.js +++ b/spec/frontend/content_editor/components/formatting_toolbar_spec.js @@ -35,7 +35,7 @@ describe('content_editor/components/formatting_toolbar', () => { ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }} ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }} ${'task-list'} | ${{ contentType: 'taskList', iconName: 'list-task', label: 'Add a checklist', editorCommand: 'toggleTaskList' }} - ${'image'} | ${{}} + ${'attachment'} | ${{}} ${'table'} | ${{}} ${'more'} | ${{}} `('given a $testId toolbar control', ({ testId, controlProps }) => { diff --git a/spec/frontend/content_editor/components/suggestions_dropdown_spec.js b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js index e72eb892e74..9d34d9d0e9e 100644 --- a/spec/frontend/content_editor/components/suggestions_dropdown_spec.js +++ b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js @@ -1,4 +1,4 @@ -import { GlAvatarLabeled, GlDropdownItem } from '@gitlab/ui'; +import { GlDropdownItem, GlAvatarLabeled, GlLoadingIcon } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import SuggestionsDropdown from '~/content_editor/components/suggestions_dropdown.vue'; @@ -75,6 +75,26 @@ describe('~/content_editor/components/suggestions_dropdown', () => { unicodeVersion: '6.0', }; + it.each` + loading | description + ${false} | ${'does not show a loading indicator'} + ${true} | ${'shows a loading indicator'} + `('$description if loading=$loading', ({ loading }) => { + buildWrapper({ + propsData: { + loading, + char: '@', + nodeType: 'reference', + nodeProps: { + referenceType: 'member', + }, + items: [exampleUser], + }, + }); + + expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(loading); + }); + describe('on item select', () => { it.each` nodeType | referenceType | char | reference | insertedText | insertedProps diff --git a/spec/frontend/content_editor/components/toolbar_attachment_button_spec.js b/spec/frontend/content_editor/components/toolbar_attachment_button_spec.js new file mode 100644 index 00000000000..06ea863dbfa --- /dev/null +++ b/spec/frontend/content_editor/components/toolbar_attachment_button_spec.js @@ -0,0 +1,57 @@ +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import ToolbarAttachmentButton from '~/content_editor/components/toolbar_attachment_button.vue'; +import Attachment from '~/content_editor/extensions/attachment'; +import Link from '~/content_editor/extensions/link'; +import { createTestEditor, mockChainedCommands } from '../test_utils'; + +describe('content_editor/components/toolbar_attachment_button', () => { + let wrapper; + let editor; + + const buildWrapper = () => { + wrapper = mountExtended(ToolbarAttachmentButton, { + provide: { + tiptapEditor: editor, + }, + }); + }; + + const selectFile = async (file) => { + const input = wrapper.findComponent({ ref: 'fileSelector' }); + + // override the property definition because `input.files` isn't directly modifyable + Object.defineProperty(input.element, 'files', { value: [file], writable: true }); + await input.trigger('change'); + }; + + beforeEach(() => { + editor = createTestEditor({ + extensions: [ + Link, + Attachment.configure({ + renderMarkdown: jest.fn(), + uploadsPath: '/uploads/', + }), + ], + }); + + buildWrapper(); + }); + + afterEach(() => { + editor.destroy(); + }); + + it('uploads the selected attachment when file input changes', async () => { + const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']); + const file = new File(['foo'], 'foo.png', { type: 'image/png' }); + + await selectFile(file); + + expect(commands.focus).toHaveBeenCalled(); + expect(commands.uploadAttachment).toHaveBeenCalledWith({ file }); + expect(commands.run).toHaveBeenCalled(); + + expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link', value: 'upload' }]); + }); +}); diff --git a/spec/frontend/content_editor/components/toolbar_image_button_spec.js b/spec/frontend/content_editor/components/toolbar_image_button_spec.js deleted file mode 100644 index 0ec950137fc..00000000000 --- a/spec/frontend/content_editor/components/toolbar_image_button_spec.js +++ /dev/null @@ -1,96 +0,0 @@ -import { GlButton, GlFormInputGroup, GlDropdown } from '@gitlab/ui'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; -import ToolbarImageButton from '~/content_editor/components/toolbar_image_button.vue'; -import Attachment from '~/content_editor/extensions/attachment'; -import Image from '~/content_editor/extensions/image'; -import { stubComponent } from 'helpers/stub_component'; -import { createTestEditor, mockChainedCommands } from '../test_utils'; - -describe('content_editor/components/toolbar_image_button', () => { - let wrapper; - let editor; - - const buildWrapper = () => { - wrapper = mountExtended(ToolbarImageButton, { - provide: { - tiptapEditor: editor, - }, - stubs: { - GlDropdown: stubComponent(GlDropdown), - }, - }); - }; - - const findImageURLInput = () => - wrapper.findComponent(GlFormInputGroup).find('input[type="text"]'); - const findApplyImageButton = () => wrapper.findComponent(GlButton); - const findDropdown = () => wrapper.findComponent(GlDropdown); - - const selectFile = async (file) => { - const input = wrapper.findComponent({ ref: 'fileSelector' }); - - // override the property definition because `input.files` isn't directly modifyable - Object.defineProperty(input.element, 'files', { value: [file], writable: true }); - await input.trigger('change'); - }; - - beforeEach(() => { - editor = createTestEditor({ - extensions: [ - Image, - Attachment.configure({ - renderMarkdown: jest.fn(), - uploadsPath: '/uploads/', - }), - ], - }); - - buildWrapper(); - }); - - afterEach(() => { - editor.destroy(); - }); - - it('sets the image to the value in the URL input when "Insert" button is clicked', async () => { - const commands = mockChainedCommands(editor, ['focus', 'setImage', 'run']); - - await findImageURLInput().setValue('https://example.com/img.jpg'); - await findApplyImageButton().trigger('click'); - - expect(commands.focus).toHaveBeenCalled(); - expect(commands.setImage).toHaveBeenCalledWith({ - alt: 'img', - src: 'https://example.com/img.jpg', - canonicalSrc: 'https://example.com/img.jpg', - }); - expect(commands.run).toHaveBeenCalled(); - - expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'url' }]); - }); - - it('uploads the selected image when file input changes', async () => { - const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']); - const file = new File(['foo'], 'foo.png', { type: 'image/png' }); - - await selectFile(file); - - expect(commands.focus).toHaveBeenCalled(); - expect(commands.uploadAttachment).toHaveBeenCalledWith({ file }); - expect(commands.run).toHaveBeenCalled(); - - expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'image', value: 'upload' }]); - }); - - describe('a11y tests', () => { - it('sets text, title, and text-sr-only properties to the table button dropdown', () => { - buildWrapper(); - - expect(findDropdown().props()).toMatchObject({ - text: 'Insert image', - textSrOnly: true, - }); - expect(findDropdown().attributes('title')).toBe('Insert image'); - }); - }); -}); diff --git a/spec/frontend/content_editor/components/toolbar_link_button_spec.js b/spec/frontend/content_editor/components/toolbar_link_button_spec.js deleted file mode 100644 index 80090c0278f..00000000000 --- a/spec/frontend/content_editor/components/toolbar_link_button_spec.js +++ /dev/null @@ -1,223 +0,0 @@ -import { GlDropdown, GlButton, GlFormInputGroup } from '@gitlab/ui'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; -import ToolbarLinkButton from '~/content_editor/components/toolbar_link_button.vue'; -import eventHubFactory from '~/helpers/event_hub_factory'; -import Link from '~/content_editor/extensions/link'; -import { hasSelection } from '~/content_editor/services/utils'; -import { stubComponent } from 'helpers/stub_component'; -import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../test_utils'; - -jest.mock('~/content_editor/services/utils'); - -describe('content_editor/components/toolbar_link_button', () => { - let wrapper; - let editor; - - const buildWrapper = () => { - wrapper = mountExtended(ToolbarLinkButton, { - provide: { - tiptapEditor: editor, - eventHub: eventHubFactory(), - }, - stubs: { - GlDropdown: stubComponent(GlDropdown), - }, - }); - }; - const findDropdown = () => wrapper.findComponent(GlDropdown); - const findLinkURLInput = () => wrapper.findComponent(GlFormInputGroup).find('input[type="text"]'); - const findApplyLinkButton = () => wrapper.findComponent(GlButton); - const findRemoveLinkButton = () => wrapper.findByText('Remove link'); - - const selectFile = async (file) => { - const input = wrapper.findComponent({ ref: 'fileSelector' }); - - // override the property definition because `input.files` isn't directly modifyable - Object.defineProperty(input.element, 'files', { value: [file], writable: true }); - await input.trigger('change'); - }; - - beforeEach(() => { - editor = createTestEditor(); - }); - - afterEach(() => { - editor.destroy(); - }); - - it('renders dropdown component', () => { - buildWrapper(); - - expect(findDropdown().html()).toMatchSnapshot(); - }); - - describe('when there is an active link', () => { - beforeEach(async () => { - jest.spyOn(editor, 'isActive').mockReturnValueOnce(true); - buildWrapper(); - - await emitEditorEvent({ event: 'transaction', tiptapEditor: editor }); - }); - - it('sets dropdown as active when link extension is active', () => { - expect(findDropdown().props('toggleClass')).toEqual({ active: true }); - }); - - it('does not display the upload file option', () => { - expect(wrapper.findByText('Upload file').exists()).toBe(false); - }); - - it('displays a remove link dropdown option', () => { - expect(wrapper.findByText('Remove link').exists()).toBe(true); - }); - - it('executes removeLink command when the remove link option is clicked', async () => { - const commands = mockChainedCommands(editor, ['focus', 'unsetLink', 'run']); - - await findRemoveLinkButton().trigger('click'); - - expect(commands.unsetLink).toHaveBeenCalled(); - expect(commands.focus).toHaveBeenCalled(); - expect(commands.run).toHaveBeenCalled(); - }); - - it('updates the link with a new link when "Apply" button is clicked', async () => { - const commands = mockChainedCommands(editor, ['focus', 'unsetLink', 'setLink', 'run']); - - await findLinkURLInput().setValue('https://example'); - await findApplyLinkButton().trigger('click'); - - expect(commands.focus).toHaveBeenCalled(); - expect(commands.unsetLink).toHaveBeenCalled(); - expect(commands.setLink).toHaveBeenCalledWith({ - href: 'https://example', - canonicalSrc: 'https://example', - }); - expect(commands.run).toHaveBeenCalled(); - - expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]); - }); - - describe('on selection update', () => { - it('updates link input box with canonical-src if present', async () => { - jest.spyOn(editor, 'getAttributes').mockReturnValueOnce({ - canonicalSrc: 'uploads/my-file.zip', - href: '/username/my-project/uploads/abcdefgh133535/my-file.zip', - }); - - await emitEditorEvent({ event: 'transaction', tiptapEditor: editor }); - - expect(findLinkURLInput().element.value).toEqual('uploads/my-file.zip'); - }); - - it('updates link input box with link href otherwise', async () => { - jest.spyOn(editor, 'getAttributes').mockReturnValueOnce({ - href: 'https://gitlab.com', - }); - - await emitEditorEvent({ event: 'transaction', tiptapEditor: editor }); - - expect(findLinkURLInput().element.value).toEqual('https://gitlab.com'); - }); - }); - }); - - describe('when there is no active link', () => { - beforeEach(() => { - jest.spyOn(editor, 'isActive'); - editor.isActive.mockReturnValueOnce(false); - buildWrapper(); - }); - - it('does not set dropdown as active', () => { - expect(findDropdown().props('toggleClass')).toEqual({ active: false }); - }); - - it('displays the upload file option', () => { - expect(wrapper.findByText('Upload file').exists()).toBe(true); - }); - - it('does not display a remove link dropdown option', () => { - expect(wrapper.findByText('Remove link').exists()).toBe(false); - }); - - it('sets the link to the value in the URL input when "Apply" button is clicked', async () => { - const commands = mockChainedCommands(editor, ['focus', 'unsetLink', 'setLink', 'run']); - - await findLinkURLInput().setValue('https://example'); - await findApplyLinkButton().trigger('click'); - - expect(commands.focus).toHaveBeenCalled(); - expect(commands.setLink).toHaveBeenCalledWith({ - href: 'https://example', - canonicalSrc: 'https://example', - }); - expect(commands.run).toHaveBeenCalled(); - - expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]); - }); - - it('uploads the selected image when file input changes', async () => { - const commands = mockChainedCommands(editor, ['focus', 'uploadAttachment', 'run']); - const file = new File(['foo'], 'foo.png', { type: 'image/png' }); - - await selectFile(file); - - expect(commands.focus).toHaveBeenCalled(); - expect(commands.uploadAttachment).toHaveBeenCalledWith({ file }); - expect(commands.run).toHaveBeenCalled(); - - expect(wrapper.emitted().execute[0]).toEqual([{ contentType: 'link' }]); - }); - }); - - describe('when the user displays the dropdown', () => { - let commands; - - beforeEach(() => { - commands = mockChainedCommands(editor, ['focus', 'extendMarkRange', 'run']); - }); - - describe('given the user has not selected text', () => { - beforeEach(() => { - hasSelection.mockReturnValueOnce(false); - }); - - it('the editor selection is extended to the current mark extent', () => { - buildWrapper(); - - findDropdown().vm.$emit('show'); - expect(commands.extendMarkRange).toHaveBeenCalledWith(Link.name); - expect(commands.focus).toHaveBeenCalled(); - expect(commands.run).toHaveBeenCalled(); - }); - }); - - describe('given the user has selected text', () => { - beforeEach(() => { - hasSelection.mockReturnValueOnce(true); - }); - - it('the editor does not modify the current selection', () => { - buildWrapper(); - - findDropdown().vm.$emit('show'); - expect(commands.extendMarkRange).not.toHaveBeenCalled(); - expect(commands.focus).not.toHaveBeenCalled(); - expect(commands.run).not.toHaveBeenCalled(); - }); - }); - }); - - describe('a11y tests', () => { - it('sets text, title, and text-sr-only properties to the table button dropdown', () => { - buildWrapper(); - - expect(findDropdown().props()).toMatchObject({ - text: 'Insert link', - textSrOnly: true, - }); - expect(findDropdown().attributes('title')).toBe('Insert link'); - }); - }); -}); diff --git a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js index 5af4784f358..78b02744d51 100644 --- a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js +++ b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js @@ -53,7 +53,7 @@ describe('content_editor/components/toolbar_more_dropdown', () => { let commands; let btn; - beforeEach(async () => { + beforeEach(() => { buildWrapper(); commands = mockChainedCommands(tiptapEditor, [command, 'focus', 'run']); diff --git a/spec/frontend/content_editor/components/wrappers/code_block_spec.js b/spec/frontend/content_editor/components/wrappers/code_block_spec.js index 057e50cd0e2..cbeea90dcb4 100644 --- a/spec/frontend/content_editor/components/wrappers/code_block_spec.js +++ b/spec/frontend/content_editor/components/wrappers/code_block_spec.js @@ -26,7 +26,7 @@ describe('content/components/wrappers/code_block', () => { eventHub = eventHubFactory(); }; - const createWrapper = async (nodeAttrs = { language }) => { + const createWrapper = (nodeAttrs = { language }) => { updateAttributesFn = jest.fn(); wrapper = mountExtended(CodeBlockWrapper, { @@ -97,7 +97,7 @@ describe('content/components/wrappers/code_block', () => { jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(true); }); - it('does not render a preview if showPreview: false', async () => { + it('does not render a preview if showPreview: false', () => { createWrapper({ language: 'plantuml', isDiagram: true, showPreview: false }); expect(wrapper.findComponent({ ref: 'diagramContainer' }).exists()).toBe(false); diff --git a/spec/frontend/content_editor/components/wrappers/details_spec.js b/spec/frontend/content_editor/components/wrappers/details_spec.js index 232c1e9aede..e35b04636f7 100644 --- a/spec/frontend/content_editor/components/wrappers/details_spec.js +++ b/spec/frontend/content_editor/components/wrappers/details_spec.js @@ -5,7 +5,7 @@ import DetailsWrapper from '~/content_editor/components/wrappers/details.vue'; describe('content/components/wrappers/details', () => { let wrapper; - const createWrapper = async () => { + const createWrapper = () => { wrapper = shallowMountExtended(DetailsWrapper, { propsData: { node: {}, diff --git a/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js b/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js index 91c6799478e..b5b118a2d9a 100644 --- a/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js +++ b/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js @@ -4,7 +4,7 @@ import FootnoteDefinitionWrapper from '~/content_editor/components/wrappers/foot describe('content/components/wrappers/footnote_definition', () => { let wrapper; - const createWrapper = async (node = {}) => { + const createWrapper = (node = {}) => { wrapper = shallowMountExtended(FootnoteDefinitionWrapper, { propsData: { node, diff --git a/spec/frontend/content_editor/components/wrappers/label_spec.js b/spec/frontend/content_editor/components/wrappers/label_spec.js deleted file mode 100644 index fa32b746142..00000000000 --- a/spec/frontend/content_editor/components/wrappers/label_spec.js +++ /dev/null @@ -1,32 +0,0 @@ -import { GlLabel } from '@gitlab/ui'; -import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; -import LabelWrapper from '~/content_editor/components/wrappers/label.vue'; - -describe('content/components/wrappers/label', () => { - let wrapper; - - const createWrapper = async (node = {}) => { - wrapper = shallowMountExtended(LabelWrapper, { - propsData: { node }, - }); - }; - - it("renders a GlLabel with the node's text and color", () => { - createWrapper({ attrs: { color: '#ff0000', text: 'foo bar', originalText: '~"foo bar"' } }); - - const glLabel = wrapper.findComponent(GlLabel); - - expect(glLabel.props()).toMatchObject( - expect.objectContaining({ - title: 'foo bar', - backgroundColor: '#ff0000', - }), - ); - }); - - it('renders a scoped label if there is a "::" in the label', () => { - createWrapper({ attrs: { color: '#ff0000', text: 'foo::bar', originalText: '~"foo::bar"' } }); - - expect(wrapper.findComponent(GlLabel).props().scoped).toBe(true); - }); -}); diff --git a/spec/frontend/content_editor/components/wrappers/reference_label_spec.js b/spec/frontend/content_editor/components/wrappers/reference_label_spec.js new file mode 100644 index 00000000000..f57caee911b --- /dev/null +++ b/spec/frontend/content_editor/components/wrappers/reference_label_spec.js @@ -0,0 +1,32 @@ +import { GlLabel } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import ReferenceLabelWrapper from '~/content_editor/components/wrappers/reference_label.vue'; + +describe('content/components/wrappers/reference_label', () => { + let wrapper; + + const createWrapper = (node = {}) => { + wrapper = shallowMountExtended(ReferenceLabelWrapper, { + propsData: { node }, + }); + }; + + it("renders a GlLabel with the node's text and color", () => { + createWrapper({ attrs: { color: '#ff0000', text: 'foo bar', originalText: '~"foo bar"' } }); + + const glLabel = wrapper.findComponent(GlLabel); + + expect(glLabel.props()).toMatchObject( + expect.objectContaining({ + title: 'foo bar', + backgroundColor: '#ff0000', + }), + ); + }); + + it('renders a scoped label if there is a "::" in the label', () => { + createWrapper({ attrs: { color: '#ff0000', text: 'foo::bar', originalText: '~"foo::bar"' } }); + + expect(wrapper.findComponent(GlLabel).props().scoped).toBe(true); + }); +}); diff --git a/spec/frontend/content_editor/components/wrappers/reference_spec.js b/spec/frontend/content_editor/components/wrappers/reference_spec.js new file mode 100644 index 00000000000..828b92a6b1e --- /dev/null +++ b/spec/frontend/content_editor/components/wrappers/reference_spec.js @@ -0,0 +1,46 @@ +import { GlLink } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import ReferenceWrapper from '~/content_editor/components/wrappers/reference.vue'; + +describe('content/components/wrappers/reference', () => { + let wrapper; + + const createWrapper = (node = {}) => { + wrapper = shallowMountExtended(ReferenceWrapper, { + propsData: { node }, + }); + }; + + it('renders a span for commands', () => { + createWrapper({ attrs: { referenceType: 'command', text: '/assign' } }); + + const span = wrapper.find('span'); + expect(span.text()).toBe('/assign'); + }); + + it('renders an anchor for everything else', () => { + createWrapper({ attrs: { referenceType: 'issue', text: '#252522' } }); + + const link = wrapper.findComponent(GlLink); + expect(link.text()).toBe('#252522'); + }); + + it('adds gfm-project_member class for project members', () => { + createWrapper({ attrs: { referenceType: 'user', text: '@root' } }); + + const link = wrapper.findComponent(GlLink); + expect(link.text()).toBe('@root'); + expect(link.classes('gfm-project_member')).toBe(true); + expect(link.classes('current-user')).toBe(false); + }); + + it('adds a current-user class if the project member is current user', () => { + window.gon = { current_username: 'root' }; + + createWrapper({ attrs: { referenceType: 'user', text: '@root' } }); + + const link = wrapper.findComponent(GlLink); + expect(link.text()).toBe('@root'); + expect(link.classes('current-user')).toBe(true); + }); +}); diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js index d8f34565705..71ffbd3f93c 100644 --- a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js +++ b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js @@ -13,7 +13,7 @@ describe('content/components/wrappers/table_cell_base', () => { let editor; let node; - const createWrapper = async (propsData = { cellType: 'td' }) => { + const createWrapper = (propsData = { cellType: 'td' }) => { wrapper = shallowMountExtended(TableCellBaseWrapper, { propsData: { editor, @@ -118,7 +118,7 @@ describe('content/components/wrappers/table_cell_base', () => { }, ); - it('does not allow deleting rows and columns', async () => { + it('does not allow deleting rows and columns', () => { expect(findDropdownItemWithLabelExists('Delete row')).toBe(false); expect(findDropdownItemWithLabelExists('Delete column')).toBe(false); }); @@ -173,7 +173,7 @@ describe('content/components/wrappers/table_cell_base', () => { await nextTick(); }); - it('does not allow adding a row before the header', async () => { + it('does not allow adding a row before the header', () => { expect(findDropdownItemWithLabelExists('Insert row before')).toBe(false); }); diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_body_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_body_spec.js index 506f442bcc7..4c91573e0c7 100644 --- a/spec/frontend/content_editor/components/wrappers/table_cell_body_spec.js +++ b/spec/frontend/content_editor/components/wrappers/table_cell_body_spec.js @@ -8,7 +8,7 @@ describe('content/components/wrappers/table_cell_body', () => { let editor; let node; - const createWrapper = async () => { + const createWrapper = () => { wrapper = shallowMount(TableCellBodyWrapper, { propsData: { editor, diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_header_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_header_spec.js index bebe7fb4124..689a8bc32bb 100644 --- a/spec/frontend/content_editor/components/wrappers/table_cell_header_spec.js +++ b/spec/frontend/content_editor/components/wrappers/table_cell_header_spec.js @@ -8,7 +8,7 @@ describe('content/components/wrappers/table_cell_header', () => { let editor; let node; - const createWrapper = async () => { + const createWrapper = () => { wrapper = shallowMount(TableCellHeaderWrapper, { propsData: { editor, diff --git a/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js b/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js index 4d5911dda0c..037da7678bb 100644 --- a/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js +++ b/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js @@ -20,7 +20,7 @@ describe('content/components/wrappers/table_of_contents', () => { eventHub = eventHubFactory(); }; - const createWrapper = async () => { + const createWrapper = () => { wrapper = mountExtended(TableOfContentsWrapper, { propsData: { editor: tiptapEditor, diff --git a/spec/frontend/content_editor/extensions/paste_markdown_spec.js b/spec/frontend/content_editor/extensions/paste_markdown_spec.js index 8f3a4934e77..c9997e3c58f 100644 --- a/spec/frontend/content_editor/extensions/paste_markdown_spec.js +++ b/spec/frontend/content_editor/extensions/paste_markdown_spec.js @@ -2,6 +2,7 @@ import PasteMarkdown from '~/content_editor/extensions/paste_markdown'; import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight'; import Diagram from '~/content_editor/extensions/diagram'; import Frontmatter from '~/content_editor/extensions/frontmatter'; +import Heading from '~/content_editor/extensions/heading'; import Bold from '~/content_editor/extensions/bold'; import { VARIANT_DANGER } from '~/alert'; import eventHubFactory from '~/helpers/event_hub_factory'; @@ -20,6 +21,7 @@ describe('content_editor/extensions/paste_markdown', () => { let doc; let p; let bold; + let heading; let renderMarkdown; let eventHub; const defaultData = { 'text/plain': '**bold text**' }; @@ -36,16 +38,18 @@ describe('content_editor/extensions/paste_markdown', () => { CodeBlockHighlight, Diagram, Frontmatter, + Heading, PasteMarkdown.configure({ renderMarkdown, eventHub }), ], }); ({ - builders: { doc, p, bold }, + builders: { doc, p, bold, heading }, } = createDocBuilder({ tiptapEditor, names: { bold: { markType: Bold.name }, + heading: { nodeType: Heading.name }, }, })); }); @@ -110,6 +114,52 @@ describe('content_editor/extensions/paste_markdown', () => { expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON()); }); + + describe('when pasting inline content in an existing paragraph', () => { + it('inserts the inline content next to the existing paragraph content', async () => { + const expectedDoc = doc(p('Initial text and', bold('bold text'))); + + tiptapEditor.commands.setContent('Initial text and '); + + await triggerPasteEventHandlerAndWaitForTransaction(buildClipboardEvent()); + + expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON()); + }); + }); + + describe('when pasting inline content and there is text selected', () => { + it('inserts the block content after the existing paragraph', async () => { + const expectedDoc = doc(p('Initial text', bold('bold text'))); + + tiptapEditor.commands.setContent('Initial text and '); + tiptapEditor.commands.setTextSelection({ from: 13, to: 17 }); + + await triggerPasteEventHandlerAndWaitForTransaction(buildClipboardEvent()); + + expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON()); + }); + }); + + describe('when pasting block content in an existing paragraph', () => { + beforeEach(() => { + renderMarkdown.mockReset(); + renderMarkdown.mockResolvedValueOnce('

    Heading

    bold text

    '); + }); + + it('inserts the block content after the existing paragraph', async () => { + const expectedDoc = doc( + p('Initial text and'), + heading({ level: 1 }, 'Heading'), + p(bold('bold text')), + ); + + tiptapEditor.commands.setContent('Initial text and '); + + await triggerPasteEventHandlerAndWaitForTransaction(buildClipboardEvent()); + + expect(tiptapEditor.state.doc.toJSON()).toEqual(expectedDoc.toJSON()); + }); + }); }); describe('when rendering markdown fails', () => { diff --git a/spec/frontend/content_editor/markdown_snapshot_spec.js b/spec/frontend/content_editor/markdown_snapshot_spec.js index fd64003420e..49b466fd7f5 100644 --- a/spec/frontend/content_editor/markdown_snapshot_spec.js +++ b/spec/frontend/content_editor/markdown_snapshot_spec.js @@ -42,7 +42,7 @@ describe('markdown example snapshots in ContentEditor', () => { const expectedProseMirrorJsonExamples = loadExamples(prosemirrorJsonYml); const exampleNames = Object.keys(markdownExamples); - beforeAll(async () => { + beforeAll(() => { return renderHtmlAndJsonForAllExamples(markdownExamples).then((examples) => { actualHtmlAndJsonExamples = examples; }); @@ -60,7 +60,7 @@ describe('markdown example snapshots in ContentEditor', () => { if (skipRunningSnapshotWysiwygHtmlTests) { it.todo(`${exampleNamePrefix} HTML: ${skipRunningSnapshotWysiwygHtmlTests}`); } else { - it(`${exampleNamePrefix} HTML`, async () => { + it(`${exampleNamePrefix} HTML`, () => { const expectedHtml = expectedHtmlExamples[name].wysiwyg; const { html: actualHtml } = actualHtmlAndJsonExamples[name]; @@ -78,7 +78,7 @@ describe('markdown example snapshots in ContentEditor', () => { if (skipRunningSnapshotProsemirrorJsonTests) { it.todo(`${exampleNamePrefix} ProseMirror JSON: ${skipRunningSnapshotProsemirrorJsonTests}`); } else { - it(`${exampleNamePrefix} ProseMirror JSON`, async () => { + it(`${exampleNamePrefix} ProseMirror JSON`, () => { const expectedJson = expectedProseMirrorJsonExamples[name]; const { json: actualJson } = actualHtmlAndJsonExamples[name]; diff --git a/spec/frontend/content_editor/services/content_editor_spec.js b/spec/frontend/content_editor/services/content_editor_spec.js index 6175cbdd3d4..5dfe9c06923 100644 --- a/spec/frontend/content_editor/services/content_editor_spec.js +++ b/spec/frontend/content_editor/services/content_editor_spec.js @@ -64,13 +64,13 @@ describe('content_editor/services/content_editor', () => { }); describe('editable', () => { - it('returns true when tiptapEditor is editable', async () => { + it('returns true when tiptapEditor is editable', () => { contentEditor.setEditable(true); expect(contentEditor.editable).toBe(true); }); - it('returns false when tiptapEditor is readonly', async () => { + it('returns false when tiptapEditor is readonly', () => { contentEditor.setEditable(false); expect(contentEditor.editable).toBe(false); diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js index 00cc628ca72..53cd51b8c5f 100644 --- a/spec/frontend/content_editor/services/create_content_editor_spec.js +++ b/spec/frontend/content_editor/services/create_content_editor_spec.js @@ -53,7 +53,7 @@ describe('content_editor/services/create_content_editor', () => { }); }); - it('allows providing external content editor extensions', async () => { + it('allows providing external content editor extensions', () => { const labelReference = 'this is a ~group::editor'; const { tiptapExtension, serializer } = createTestContentEditorExtension(); diff --git a/spec/frontend/content_editor/services/gl_api_markdown_deserializer_spec.js b/spec/frontend/content_editor/services/gl_api_markdown_deserializer_spec.js index 8ee37282ee9..a9960918e62 100644 --- a/spec/frontend/content_editor/services/gl_api_markdown_deserializer_spec.js +++ b/spec/frontend/content_editor/services/gl_api_markdown_deserializer_spec.js @@ -43,7 +43,7 @@ describe('content_editor/services/gl_api_markdown_deserializer', () => { }); }); - it('transforms HTML returned by render function to a ProseMirror document', async () => { + it('transforms HTML returned by render function to a ProseMirror document', () => { const document = doc(p(bold(text)), comment(' some comment ')); expect(result.document.toJSON()).toEqual(document.toJSON()); diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js index c4d302547a5..a28d5a278e6 100644 --- a/spec/frontend/content_editor/services/markdown_serializer_spec.js +++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js @@ -268,6 +268,19 @@ comment --> ).toBe('![GitLab][gitlab-url]'); }); + it('omits image data urls when serializing', () => { + expect( + serialize( + paragraph( + image({ + src: 'data:image/png;base64,iVBORw0KGgoAAAAN', + alt: 'image', + }), + ), + ), + ).toBe('![image]()'); + }); + it('correctly serializes strikethrough', () => { expect(serialize(paragraph(strike('deleted content')))).toBe('~~deleted content~~'); }); @@ -885,6 +898,59 @@ _An elephant at sunset_ ); }); + it('correctly renders a table with checkboxes', () => { + expect( + serialize( + table( + // each table cell must contain at least one paragraph + tableRow( + tableHeader(paragraph('')), + tableHeader(paragraph('Item')), + tableHeader(paragraph('Description')), + ), + tableRow( + tableCell(taskList(taskItem(paragraph('')))), + tableCell(paragraph('Item 1')), + tableCell(paragraph('Description 1')), + ), + tableRow( + tableCell(taskList(taskItem(paragraph('some text')))), + tableCell(paragraph('Item 2')), + tableCell(paragraph('Description 2')), + ), + ), + ).trim(), + ).toBe( + ` + + + + + + + + + + + + + + + + +
    + +ItemDescription
    + +* [ ]   +Item 1Description 1
    + +* [ ] some text +Item 2Description 2
    + `.trim(), + ); + }); + it('correctly serializes a table with line breaks', () => { expect( serialize( @@ -1309,6 +1375,25 @@ paragraph .run(); }; + const editNonInclusiveMarkAction = (initialContent) => { + tiptapEditor.commands.setContent(initialContent.toJSON()); + tiptapEditor.commands.selectTextblockEnd(); + + let { from } = tiptapEditor.state.selection; + tiptapEditor.commands.setTextSelection({ + from: from - 1, + to: from - 1, + }); + + const sel = tiptapEditor.state.doc.textBetween(from - 1, from, ' '); + tiptapEditor.commands.insertContent(`${sel} modified`); + + tiptapEditor.commands.selectTextblockEnd(); + from = tiptapEditor.state.selection.from; + + tiptapEditor.commands.deleteRange({ from: from - 1, to: from }); + }; + it.each` mark | markdown | modifiedMarkdown | editAction ${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction} @@ -1319,8 +1404,8 @@ paragraph ${'italic'} | ${'*italic*'} | ${'*italic modified*'} | ${defaultEditAction} ${'italic'} | ${'italic'} | ${'italic modified'} | ${defaultEditAction} ${'italic'} | ${'italic'} | ${'italic modified'} | ${defaultEditAction} - ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'} | ${defaultEditAction} - ${'link'} | ${'link'} | ${'link modified'} | ${defaultEditAction} + ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'} | ${editNonInclusiveMarkAction} + ${'link'} | ${'link'} | ${'link modified'} | ${editNonInclusiveMarkAction} ${'link'} | ${'link www.gitlab.com'} | ${'modified link www.gitlab.com'} | ${prependContentEditAction} ${'link'} | ${'link https://www.gitlab.com'} | ${'modified link https://www.gitlab.com'} | ${prependContentEditAction} ${'link'} | ${'link(https://www.gitlab.com)'} | ${'modified link(https://www.gitlab.com)'} | ${prependContentEditAction} diff --git a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js index 8c1a3831a74..1459988cf8f 100644 --- a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js +++ b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js @@ -43,7 +43,7 @@ describe('content_editor/services/track_input_rules_and_shortcuts', () => { }); describe('when creating a heading using an keyboard shortcut', () => { - it('sends a tracking event indicating that a heading was created using an input rule', async () => { + it('sends a tracking event indicating that a heading was created using an input rule', () => { const shortcuts = Heading.parent.config.addKeyboardShortcuts.call(Heading); const [firstShortcut] = Object.keys(shortcuts); const nodeName = Heading.name; @@ -68,7 +68,7 @@ describe('content_editor/services/track_input_rules_and_shortcuts', () => { }); describe('when creating a heading using an input rule', () => { - it('sends a tracking event indicating that a heading was created using an input rule', async () => { + it('sends a tracking event indicating that a heading was created using an input rule', () => { const nodeName = Heading.name; triggerNodeInputRule({ tiptapEditor: editor, inputRuleText: '## ' }); expect(trackingSpy).toHaveBeenCalledWith(undefined, INPUT_RULE_TRACKING_ACTION, { diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap index 4b7439f6fd2..5cfb4702be7 100644 --- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap +++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap @@ -64,6 +64,7 @@ exports[`Contributors charts should render charts and a RefSelector when loading legendlayout="inline" legendmaxtext="Max" legendmintext="Min" + legendseriesinfo="" option="[object Object]" responsive="" thresholds="" @@ -100,6 +101,7 @@ exports[`Contributors charts should render charts and a RefSelector when loading legendlayout="inline" legendmaxtext="Max" legendmintext="Min" + legendseriesinfo="" option="[object Object]" responsive="" thresholds="" diff --git a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js index 12fef9d5ddf..d3cdd0d16ef 100644 --- a/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js +++ b/spec/frontend/custom_metrics/components/custom_metrics_form_fields_spec.js @@ -173,7 +173,7 @@ describe('custom metrics form fields component', () => { return axios.waitForAll(); }); - it('shows invalid query message', async () => { + it('shows invalid query message', () => { expect(wrapper.text()).toContain(errorMessage); }); }); diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js index 5f20d4ad542..3c4fa2a6de6 100644 --- a/spec/frontend/deploy_keys/components/key_spec.js +++ b/spec/frontend/deploy_keys/components/key_spec.js @@ -1,9 +1,10 @@ import { mount } from '@vue/test-utils'; import { nextTick } from 'vue'; import data from 'test_fixtures/deploy_keys/keys.json'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import key from '~/deploy_keys/components/key.vue'; import DeployKeysStore from '~/deploy_keys/store'; -import { getTimeago } from '~/lib/utils/datetime_utility'; +import { getTimeago, formatDate } from '~/lib/utils/datetime_utility'; describe('Deploy keys key', () => { let wrapper; @@ -18,6 +19,9 @@ describe('Deploy keys key', () => { endpoint: 'https://test.host/dummy/endpoint', ...propsData, }, + directives: { + GlTooltip: createMockDirective('gl-tooltip'), + }, }); }; @@ -43,6 +47,33 @@ describe('Deploy keys key', () => { ); }); + it('renders human friendly expiration date', () => { + const expiresAt = new Date(); + createComponent({ + deployKey: { ...deployKey, expires_at: expiresAt }, + }); + + expect(findTextAndTrim('.key-expires-at')).toBe(`${getTimeago().format(expiresAt)}`); + }); + it('shows tooltip for expiration date', () => { + const expiresAt = new Date(); + createComponent({ + deployKey: { ...deployKey, expires_at: expiresAt }, + }); + + const expiryComponent = wrapper.find('[data-testid="expires-at-tooltip"]'); + const tooltip = getBinding(expiryComponent.element, 'gl-tooltip'); + expect(tooltip).toBeDefined(); + expect(expiryComponent.attributes('title')).toBe(`${formatDate(expiresAt)}`); + }); + it('renders never when no expiration date', () => { + createComponent({ + deployKey: { ...deployKey, expires_at: null }, + }); + + expect(wrapper.find('[data-testid="expires-never"]').exists()).toBe(true); + }); + it('shows pencil button for editing', () => { createComponent({ deployKey }); diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js index 56bf0fa60a7..a6ab147884f 100644 --- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js +++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js @@ -381,7 +381,7 @@ describe('Design discussions component', () => { }); }); - it('should open confirmation modal when the note emits `delete-note` event', async () => { + it('should open confirmation modal when the note emits `delete-note` event', () => { createComponent(); findDesignNotes().at(0).vm.$emit('delete-note', { id: '1' }); diff --git a/spec/frontend/design_management/components/design_notes/design_note_spec.js b/spec/frontend/design_management/components/design_notes/design_note_spec.js index 82848bd1a19..6f5b282fa3b 100644 --- a/spec/frontend/design_management/components/design_notes/design_note_spec.js +++ b/spec/frontend/design_management/components/design_notes/design_note_spec.js @@ -189,7 +189,7 @@ describe('Design note component', () => { }); }); - it('should emit `delete-note` event with proper payload when delete note button is clicked', async () => { + it('should emit `delete-note` event with proper payload when delete note button is clicked', () => { const payload = { ...note, userPermissions: { diff --git a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js index db1cfb4f504..f08efc0c685 100644 --- a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js +++ b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js @@ -1,7 +1,9 @@ import { GlAlert } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; import Autosave from '~/autosave'; +import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'; import createNoteMutation from '~/design_management/graphql/mutations/create_note.mutation.graphql'; @@ -17,11 +19,14 @@ import { mockNoteSubmitFailureMutationResponse, } from '../../mock_data/apollo_mock'; +Vue.use(VueApollo); + jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'); jest.mock('~/autosave'); describe('Design reply form component', () => { let wrapper; + let mockApollo; const findTextarea = () => wrapper.find('textarea'); const findSubmitButton = () => wrapper.findComponent({ ref: 'submitButton' }); @@ -32,14 +37,10 @@ describe('Design reply form component', () => { const mockComment = 'New comment'; const mockDiscussionId = 'gid://gitlab/Discussion/6466a72f35b163f3c3e52d7976a09387f2c573e8'; const createNoteMutationData = { - mutation: createNoteMutation, - update: expect.anything(), - variables: { - input: { - noteableId: mockNoteableId, - discussionId: mockDiscussionId, - body: mockComment, - }, + input: { + noteableId: mockNoteableId, + discussionId: mockDiscussionId, + body: mockComment, }, }; @@ -49,14 +50,15 @@ describe('Design reply form component', () => { const metaKey = { metaKey: true, }; - const mutationHandler = jest.fn().mockResolvedValue(); + const mockMutationHandler = jest.fn().mockResolvedValue(mockNoteSubmitSuccessMutationResponse); function createComponent({ props = {}, mountOptions = {}, data = {}, - mutation = mutationHandler, + mutationHandler = mockMutationHandler, } = {}) { + mockApollo = createMockApollo([[createNoteMutation, mutationHandler]]); wrapper = mount(DesignReplyForm, { propsData: { designNoteMutation: createNoteMutation, @@ -67,11 +69,7 @@ describe('Design reply form component', () => { ...props, }, ...mountOptions, - mocks: { - $apollo: { - mutate: mutation, - }, - }, + apolloProvider: mockApollo, data() { return { ...data, @@ -85,6 +83,7 @@ describe('Design reply form component', () => { }); afterEach(() => { + mockApollo = null; confirmAction.mockReset(); }); @@ -125,9 +124,8 @@ describe('Design reply form component', () => { ${'gid://gitlab/DiffDiscussion/123'} | ${123} `( 'initializes autosave support on discussion with proper key', - async ({ discussionId, shortDiscussionId }) => { + ({ discussionId, shortDiscussionId }) => { createComponent({ props: { discussionId } }); - await nextTick(); expect(Autosave).toHaveBeenCalledWith(expect.any(Element), [ 'Discussion', @@ -138,9 +136,8 @@ describe('Design reply form component', () => { ); describe('when form has no text', () => { - beforeEach(async () => { + beforeEach(() => { createComponent(); - await nextTick(); }); it('submit button is disabled', () => { @@ -151,11 +148,10 @@ describe('Design reply form component', () => { key | keyData ${'ctrl'} | ${ctrlKey} ${'meta'} | ${metaKey} - `('does not perform mutation on textarea $key+enter keydown', async ({ keyData }) => { + `('does not perform mutation on textarea $key+enter keydown', ({ keyData }) => { findTextarea().trigger('keydown.enter', keyData); - await nextTick(); - expect(mutationHandler).not.toHaveBeenCalled(); + expect(mockMutationHandler).not.toHaveBeenCalled(); }); it('emits cancelForm event on pressing escape button on textarea', () => { @@ -182,22 +178,20 @@ describe('Design reply form component', () => { noteableId: mockNoteableId, discussionId: mockDiscussionId, }; - const successfulMutation = jest.fn().mockResolvedValue(mockNoteSubmitSuccessMutationResponse); + createComponent({ props: { - designNoteMutation: createNoteMutation, mutationVariables: mockMutationVariables, value: mockComment, }, - mutation: successfulMutation, }); findSubmitButton().vm.$emit('click'); - await nextTick(); - expect(successfulMutation).toHaveBeenCalledWith(createNoteMutationData); + expect(mockMutationHandler).toHaveBeenCalledWith(createNoteMutationData); await waitForPromises(); + expect(wrapper.emitted('note-submit-complete')).toEqual([ [mockNoteSubmitSuccessMutationResponse], ]); @@ -212,20 +206,17 @@ describe('Design reply form component', () => { noteableId: mockNoteableId, discussionId: mockDiscussionId, }; - const successfulMutation = jest.fn().mockResolvedValue(mockNoteSubmitSuccessMutationResponse); + createComponent({ props: { - designNoteMutation: createNoteMutation, mutationVariables: mockMutationVariables, value: mockComment, }, - mutation: successfulMutation, }); findTextarea().trigger('keydown.enter', keyData); - await nextTick(); - expect(successfulMutation).toHaveBeenCalledWith(createNoteMutationData); + expect(mockMutationHandler).toHaveBeenCalledWith(createNoteMutationData); await waitForPromises(); expect(wrapper.emitted('note-submit-complete')).toEqual([ @@ -240,7 +231,7 @@ describe('Design reply form component', () => { designNoteMutation: createNoteMutation, value: mockComment, }, - mutation: failedMutation, + mutationHandler: failedMutation, data: { errorMessage: 'error', }, @@ -260,7 +251,7 @@ describe('Design reply form component', () => { ${false} | ${false} | ${UPDATE_NOTE_ERROR} `( 'return proper error message on error in case of isDiscussion is $isDiscussion and isNewComment is $isNewComment', - async ({ isDiscussion, isNewComment, errorMessage }) => { + ({ isDiscussion, isNewComment, errorMessage }) => { createComponent({ props: { isDiscussion, isNewComment } }); expect(wrapper.vm.getErrorMessage()).toBe(errorMessage); @@ -275,12 +266,11 @@ describe('Design reply form component', () => { expect(wrapper.emitted('cancel-form')).toHaveLength(1); }); - it('opens confirmation modal on Escape key when text has changed', async () => { + it('opens confirmation modal on Escape key when text has changed', () => { createComponent(); findTextarea().setValue(mockComment); - await nextTick(); findTextarea().trigger('keyup.esc'); expect(confirmAction).toHaveBeenCalled(); @@ -292,7 +282,6 @@ describe('Design reply form component', () => { createComponent({ props: { value: mockComment } }); findTextarea().setValue('Comment changed'); - await nextTick(); findTextarea().trigger('keyup.esc'); expect(confirmAction).toHaveBeenCalled(); @@ -306,10 +295,8 @@ describe('Design reply form component', () => { createComponent({ props: { value: mockComment } }); findTextarea().setValue('Comment changed'); - await nextTick(); findTextarea().trigger('keyup.esc'); - await nextTick(); expect(confirmAction).toHaveBeenCalled(); await waitForPromises(); diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js index 2807fe7727f..3eb47fdb97e 100644 --- a/spec/frontend/design_management/components/design_overlay_spec.js +++ b/spec/frontend/design_management/components/design_overlay_spec.js @@ -1,6 +1,6 @@ -import { shallowMount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import DesignOverlay from '~/design_management/components/design_overlay.vue'; @@ -16,22 +16,20 @@ describe('Design overlay component', () => { const mockDimensions = { width: 100, height: 100 }; - const findOverlay = () => wrapper.find('[data-testid="design-overlay"]'); - const findAllNotes = () => wrapper.findAll('[data-testid="note-pin"]'); - const findCommentBadge = () => wrapper.find('[data-testid="comment-badge"]'); + const findOverlay = () => wrapper.findByTestId('design-overlay'); + const findAllNotes = () => wrapper.findAllByTestId('note-pin'); + const findCommentBadge = () => wrapper.findByTestId('comment-badge'); const findBadgeAtIndex = (noteIndex) => findAllNotes().at(noteIndex); const findFirstBadge = () => findBadgeAtIndex(0); const findSecondBadge = () => findBadgeAtIndex(1); - const clickAndDragBadge = async (elem, fromPoint, toPoint) => { + const clickAndDragBadge = (elem, fromPoint, toPoint) => { elem.vm.$emit( 'mousedown', new MouseEvent('click', { clientX: fromPoint.x, clientY: fromPoint.y }), ); findOverlay().trigger('mousemove', { clientX: toPoint.x, clientY: toPoint.y }); - await nextTick(); elem.vm.$emit('mouseup', new MouseEvent('click', { clientX: toPoint.x, clientY: toPoint.y })); - await nextTick(); }; function createComponent(props = {}, data = {}) { @@ -47,7 +45,7 @@ describe('Design overlay component', () => { }, }); - wrapper = shallowMount(DesignOverlay, { + wrapper = shallowMountExtended(DesignOverlay, { apolloProvider, propsData: { dimensions: mockDimensions, @@ -80,7 +78,7 @@ describe('Design overlay component', () => { expect(wrapper.attributes().style).toBe('width: 100px; height: 100px; top: 0px; left: 0px;'); }); - it('should emit `openCommentForm` when clicking on overlay', async () => { + it('should emit `openCommentForm` when clicking on overlay', () => { createComponent(); const newCoordinates = { x: 10, @@ -90,7 +88,7 @@ describe('Design overlay component', () => { wrapper .find('[data-qa-selector="design_image_button"]') .trigger('mouseup', { offsetX: newCoordinates.x, offsetY: newCoordinates.y }); - await nextTick(); + expect(wrapper.emitted('openCommentForm')).toEqual([ [{ x: newCoordinates.x, y: newCoordinates.y }], ]); @@ -175,25 +173,15 @@ describe('Design overlay component', () => { }); }); - it('should recalculate badges positions on window resize', async () => { + it('should calculate badges positions based on dimensions', () => { createComponent({ notes, - dimensions: { - width: 400, - height: 400, - }, - }); - - expect(findFirstBadge().props('position')).toEqual({ left: '40px', top: '60px' }); - - wrapper.setProps({ dimensions: { width: 200, height: 200, }, }); - await nextTick(); expect(findFirstBadge().props('position')).toEqual({ left: '20px', top: '30px' }); }); @@ -216,7 +204,6 @@ describe('Design overlay component', () => { new MouseEvent('click', { clientX: position.x, clientY: position.y }), ); - await nextTick(); findFirstBadge().vm.$emit( 'mouseup', new MouseEvent('click', { clientX: position.x, clientY: position.y }), @@ -290,7 +277,7 @@ describe('Design overlay component', () => { }); describe('when moving the comment badge', () => { - it('should update badge style when note-moving action ends', async () => { + it('should update badge style when note-moving action ends', () => { const { position } = notes[0]; createComponent({ currentCommentForm: { @@ -298,19 +285,15 @@ describe('Design overlay component', () => { }, }); - const commentBadge = findCommentBadge(); + expect(findCommentBadge().props('position')).toEqual({ left: '10px', top: '15px' }); + const toPoint = { x: 20, y: 20 }; - await clickAndDragBadge(commentBadge, { x: position.x, y: position.y }, toPoint); - commentBadge.vm.$emit('mouseup', new MouseEvent('click')); - // simulates the currentCommentForm being updated in index.vue component, and - // propagated back down to this prop - wrapper.setProps({ + createComponent({ currentCommentForm: { height: position.height, width: position.width, ...toPoint }, }); - await nextTick(); - expect(commentBadge.props('position')).toEqual({ left: '20px', top: '20px' }); + expect(findCommentBadge().props('position')).toEqual({ left: '20px', top: '20px' }); }); it('should emit `openCommentForm` event when mouseleave fired on overlay element', async () => { @@ -330,8 +313,7 @@ describe('Design overlay component', () => { newCoordinates, ); - wrapper.trigger('mouseleave'); - await nextTick(); + findOverlay().vm.$emit('mouseleave'); expect(wrapper.emitted('openCommentForm')).toEqual([[newCoordinates]]); }); diff --git a/spec/frontend/design_management/components/design_scaler_spec.js b/spec/frontend/design_management/components/design_scaler_spec.js index 62a26a8f5dd..b29448b4471 100644 --- a/spec/frontend/design_management/components/design_scaler_spec.js +++ b/spec/frontend/design_management/components/design_scaler_spec.js @@ -36,7 +36,7 @@ describe('Design management design scaler component', () => { expect(wrapper.emitted('scale')[1]).toEqual([1]); }); - it('emits @scale event when "decrement" button clicked', async () => { + it('emits @scale event when "decrement" button clicked', () => { getDecreaseScaleButton().vm.$emit('click'); expect(wrapper.emitted('scale')[1]).toEqual([1.4]); }); diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js index f713203c0ee..698535d8937 100644 --- a/spec/frontend/design_management/components/design_todo_button_spec.js +++ b/spec/frontend/design_management/components/design_todo_button_spec.js @@ -81,7 +81,7 @@ describe('Design management design todo button', () => { await nextTick(); }); - it('calls `$apollo.mutate` with the `todoMarkDone` mutation and variables containing `id`', async () => { + it('calls `$apollo.mutate` with the `todoMarkDone` mutation and variables containing `id`', () => { const todoMarkDoneMutationVariables = { mutation: todoMarkDoneMutation, update: expect.anything(), @@ -127,7 +127,7 @@ describe('Design management design todo button', () => { await nextTick(); }); - it('calls `$apollo.mutate` with the `createDesignTodoMutation` mutation and variables containing `issuable_id`, `issue_id`, & `projectPath`', async () => { + it('calls `$apollo.mutate` with the `createDesignTodoMutation` mutation and variables containing `issuable_id`, `issue_id`, & `projectPath`', () => { const createDesignTodoMutationVariables = { mutation: createDesignTodoMutation, update: expect.anything(), diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap index b5a69b28a88..934bda570d4 100644 --- a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap +++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap @@ -7,7 +7,7 @@ exports[`Design management pagination component renders navigation buttons 1`] = class="gl-display-flex gl-align-items-center" > - 0 of 2 + 0 of 3 { let wrapper; - function createComponent() { + const buildMockHandler = (nodes = designListQueryResponseNodes) => { + return jest.fn().mockResolvedValue(getDesignListQueryResponse({ designs: nodes })); + }; + + const createMockApolloProvider = (handler) => { + Vue.use(VueApollo); + + return createMockApollo([[getDesignListQuery, handler]]); + }; + + function createComponent({ propsData = {}, handler = buildMockHandler() } = {}) { wrapper = shallowMount(DesignNavigation, { propsData: { id: '2', + ...propsData, }, + apolloProvider: createMockApolloProvider(handler), mocks: { $router, $route, @@ -30,48 +51,45 @@ describe('Design management pagination component', () => { }); } - beforeEach(() => { - createComponent(); - }); + const findGlButtonGroup = () => wrapper.findComponent(GlButtonGroup); + + it('hides components when designs are empty', async () => { + createComponent({ handler: buildMockHandler([]) }); + await waitForPromises(); - it('hides components when designs are empty', () => { + expect(findGlButtonGroup().exists()).toBe(false); expect(wrapper.element).toMatchSnapshot(); }); it('renders navigation buttons', async () => { - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - designCollection: { designs: [{ id: '1' }, { id: '2' }] }, - }); + createComponent({ handler: buildMockHandler() }); + await waitForPromises(); - await nextTick(); + expect(findGlButtonGroup().exists()).toBe(true); expect(wrapper.element).toMatchSnapshot(); }); describe('keyboard buttons navigation', () => { - beforeEach(() => { - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - designCollection: { designs: [{ filename: '1' }, { filename: '2' }, { filename: '3' }] }, - }); - }); + it('routes to previous design on Left button', async () => { + createComponent({ propsData: { id: designListQueryResponseNodes[1].filename } }); + await waitForPromises(); - it('routes to previous design on Left button', () => { Mousetrap.trigger('left'); expect(push).toHaveBeenCalledWith({ name: DESIGN_ROUTE_NAME, - params: { id: '1' }, + params: { id: designListQueryResponseNodes[0].filename }, query: {}, }); }); - it('routes to next design on Right button', () => { + it('routes to next design on Right button', async () => { + createComponent({ propsData: { id: designListQueryResponseNodes[1].filename } }); + await waitForPromises(); + Mousetrap.trigger('right'); expect(push).toHaveBeenCalledWith({ name: DESIGN_ROUTE_NAME, - params: { id: '3' }, + params: { id: designListQueryResponseNodes[2].filename }, query: {}, }); }); diff --git a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js index cdfff61ba4f..3ee68f80538 100644 --- a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js +++ b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js @@ -1,9 +1,14 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; import { GlAvatar, GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import getDesignListQuery from 'shared_queries/design_management/get_design_list.query.graphql'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; import DesignVersionDropdown from '~/design_management/components/upload/design_version_dropdown.vue'; import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue'; -import mockAllVersions from './mock_data/all_versions'; +import mockAllVersions from '../../mock_data/all_versions'; +import { getDesignListQueryResponse } from '../../mock_data/apollo_mock'; const LATEST_VERSION_ID = 1; const PREVIOUS_VERSION_ID = 2; @@ -20,11 +25,20 @@ const MOCK_ROUTE = { query: {}, }; +Vue.use(VueApollo); + describe('Design management design version dropdown component', () => { let wrapper; function createComponent({ maxVersions = -1, $route = MOCK_ROUTE } = {}) { + const designVersions = + maxVersions > -1 ? mockAllVersions.slice(0, maxVersions) : mockAllVersions; + const designListHandler = jest + .fn() + .mockResolvedValue(getDesignListQueryResponse({ versions: designVersions })); + wrapper = shallowMount(DesignVersionDropdown, { + apolloProvider: createMockApollo([[getDesignListQuery, designListHandler]]), propsData: { projectPath: '', issueIid: '', @@ -34,12 +48,6 @@ describe('Design management design version dropdown component', () => { }, stubs: { GlAvatar: true, GlCollapsibleListbox }, }); - - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ - allVersions: maxVersions > -1 ? mockAllVersions.slice(0, maxVersions) : mockAllVersions, - }); } const findListbox = () => wrapper.findComponent(GlCollapsibleListbox); @@ -52,7 +60,7 @@ describe('Design management design version dropdown component', () => { beforeEach(async () => { createComponent(); - await nextTick(); + await waitForPromises(); listItem = findAllListboxItems().at(0); }); @@ -74,7 +82,8 @@ describe('Design management design version dropdown component', () => { it('has "latest" on most recent version item', async () => { createComponent(); - await nextTick(); + await waitForPromises(); + expect(findVersionLink(0).text()).toContain('latest'); }); }); @@ -83,7 +92,7 @@ describe('Design management design version dropdown component', () => { it('displays latest version text by default', async () => { createComponent(); - await nextTick(); + await waitForPromises(); expect(findListbox().props('toggleText')).toBe('Showing latest version'); }); @@ -91,35 +100,39 @@ describe('Design management design version dropdown component', () => { it('displays latest version text when only 1 version is present', async () => { createComponent({ maxVersions: 1 }); - await nextTick(); + await waitForPromises(); + expect(findListbox().props('toggleText')).toBe('Showing latest version'); }); it('displays version text when the current version is not the latest', async () => { createComponent({ $route: designRouteFactory(PREVIOUS_VERSION_ID) }); - await nextTick(); + await waitForPromises(); + expect(findListbox().props('toggleText')).toBe(`Showing version #1`); }); it('displays latest version text when the current version is the latest', async () => { createComponent({ $route: designRouteFactory(LATEST_VERSION_ID) }); - await nextTick(); + await waitForPromises(); + expect(findListbox().props('toggleText')).toBe('Showing latest version'); }); it('should have the same length as apollo query', async () => { createComponent(); - await nextTick(); + await waitForPromises(); + expect(findAllListboxItems()).toHaveLength(wrapper.vm.allVersions.length); }); it('should render TimeAgo', async () => { createComponent(); - await nextTick(); + await waitForPromises(); expect(wrapper.findAllComponents(TimeAgo)).toHaveLength(wrapper.vm.allVersions.length); }); diff --git a/spec/frontend/design_management/components/upload/mock_data/all_versions.js b/spec/frontend/design_management/components/upload/mock_data/all_versions.js deleted file mode 100644 index 24c59ce1a75..00000000000 --- a/spec/frontend/design_management/components/upload/mock_data/all_versions.js +++ /dev/null @@ -1,20 +0,0 @@ -export default [ - { - id: 'gid://gitlab/DesignManagement::Version/1', - sha: 'b389071a06c153509e11da1f582005b316667001', - createdAt: '2021-08-09T06:05:00Z', - author: { - id: 'gid://gitlab/User/1', - name: 'Adminstrator', - }, - }, - { - id: 'gid://gitlab/DesignManagement::Version/2', - sha: 'b389071a06c153509e11da1f582005b316667021', - createdAt: '2021-08-09T06:05:00Z', - author: { - id: 'gid://gitlab/User/1', - name: 'Adminstrator', - }, - }, -]; diff --git a/spec/frontend/design_management/mock_data/all_versions.js b/spec/frontend/design_management/mock_data/all_versions.js index f4026da7dfd..36f611247a9 100644 --- a/spec/frontend/design_management/mock_data/all_versions.js +++ b/spec/frontend/design_management/mock_data/all_versions.js @@ -1,20 +1,26 @@ export default [ { + __typename: 'DesignVersion', id: 'gid://gitlab/DesignManagement::Version/1', sha: 'b389071a06c153509e11da1f582005b316667001', createdAt: '2021-08-09T06:05:00Z', author: { + __typename: 'UserCore', id: 'gid://gitlab/User/1', name: 'Adminstrator', + avatarUrl: 'avatar.png', }, }, { - id: 'gid://gitlab/DesignManagement::Version/1', + __typename: 'DesignVersion', + id: 'gid://gitlab/DesignManagement::Version/2', sha: 'b389071a06c153509e11da1f582005b316667021', createdAt: '2021-08-09T06:05:00Z', author: { + __typename: 'UserCore', id: 'gid://gitlab/User/1', name: 'Adminstrator', + avatarUrl: 'avatar.png', }, }, ]; diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js index 2b99dcf14da..18e08ecd729 100644 --- a/spec/frontend/design_management/mock_data/apollo_mock.js +++ b/spec/frontend/design_management/mock_data/apollo_mock.js @@ -1,4 +1,49 @@ -export const designListQueryResponse = { +export const designListQueryResponseNodes = [ + { + __typename: 'Design', + id: '1', + event: 'NONE', + filename: 'fox_1.jpg', + notesCount: 3, + image: 'image-1', + imageV432x230: 'image-1', + currentUserTodos: { + __typename: 'ToDo', + nodes: [], + }, + }, + { + __typename: 'Design', + id: '2', + event: 'NONE', + filename: 'fox_2.jpg', + notesCount: 2, + image: 'image-2', + imageV432x230: 'image-2', + currentUserTodos: { + __typename: 'ToDo', + nodes: [], + }, + }, + { + __typename: 'Design', + id: '3', + event: 'NONE', + filename: 'fox_3.jpg', + notesCount: 1, + image: 'image-3', + imageV432x230: 'image-3', + currentUserTodos: { + __typename: 'ToDo', + nodes: [], + }, + }, +]; + +export const getDesignListQueryResponse = ({ + versions = [], + designs = designListQueryResponseNodes, +} = {}) => ({ data: { project: { __typename: 'Project', @@ -11,57 +56,17 @@ export const designListQueryResponse = { copyState: 'READY', designs: { __typename: 'DesignConnection', - nodes: [ - { - __typename: 'Design', - id: '1', - event: 'NONE', - filename: 'fox_1.jpg', - notesCount: 3, - image: 'image-1', - imageV432x230: 'image-1', - currentUserTodos: { - __typename: 'ToDo', - nodes: [], - }, - }, - { - __typename: 'Design', - id: '2', - event: 'NONE', - filename: 'fox_2.jpg', - notesCount: 2, - image: 'image-2', - imageV432x230: 'image-2', - currentUserTodos: { - __typename: 'ToDo', - nodes: [], - }, - }, - { - __typename: 'Design', - id: '3', - event: 'NONE', - filename: 'fox_3.jpg', - notesCount: 1, - image: 'image-3', - imageV432x230: 'image-3', - currentUserTodos: { - __typename: 'ToDo', - nodes: [], - }, - }, - ], + nodes: designs, }, versions: { - __typename: 'DesignVersion', - nodes: [], + __typename: 'DesignVersionConnection', + nodes: versions, }, }, }, }, }, -}; +}); export const designUploadMutationCreatedResponse = { data: { @@ -212,64 +217,62 @@ export const getDesignQueryResponse = { }, }; -export const mockNoteSubmitSuccessMutationResponse = [ - { - data: { - createNote: { - note: { - id: 'gid://gitlab/DiffNote/468', - author: { - id: 'gid://gitlab/User/1', - avatarUrl: - 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', - name: 'Administrator', - username: 'root', - webUrl: 'http://127.0.0.1:3000/root', - __typename: 'UserCore', - }, - body: 'New comment', - bodyHtml: "

    asdd

    ", - createdAt: '2023-02-24T06:49:20Z', - resolved: false, - position: { - diffRefs: { - baseSha: 'f63ae53ed82d8765477c191383e1e6a000c10375', - startSha: 'f63ae53ed82d8765477c191383e1e6a000c10375', - headSha: 'f348c652f1a737151fc79047895e695fbe81464c', - __typename: 'DiffRefs', - }, - x: 441, - y: 128, - height: 152, - width: 695, - __typename: 'DiffPosition', - }, - userPermissions: { - adminNote: true, - repositionNote: true, - __typename: 'NotePermissions', +export const mockNoteSubmitSuccessMutationResponse = { + data: { + createNote: { + note: { + id: 'gid://gitlab/DiffNote/468', + author: { + id: 'gid://gitlab/User/1', + avatarUrl: + 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon', + name: 'Administrator', + username: 'root', + webUrl: 'http://127.0.0.1:3000/root', + __typename: 'UserCore', + }, + body: 'New comment', + bodyHtml: "

    asdd

    ", + createdAt: '2023-02-24T06:49:20Z', + resolved: false, + position: { + diffRefs: { + baseSha: 'f63ae53ed82d8765477c191383e1e6a000c10375', + startSha: 'f63ae53ed82d8765477c191383e1e6a000c10375', + headSha: 'f348c652f1a737151fc79047895e695fbe81464c', + __typename: 'DiffRefs', }, - discussion: { - id: 'gid://gitlab/Discussion/6466a72f35b163f3c3e52d7976a09387f2c573e8', - notes: { - nodes: [ - { - id: 'gid://gitlab/DiffNote/459', - __typename: 'Note', - }, - ], - __typename: 'NoteConnection', - }, - __typename: 'Discussion', + x: 441, + y: 128, + height: 152, + width: 695, + __typename: 'DiffPosition', + }, + userPermissions: { + adminNote: true, + repositionNote: true, + __typename: 'NotePermissions', + }, + discussion: { + id: 'gid://gitlab/Discussion/6466a72f35b163f3c3e52d7976a09387f2c573e8', + notes: { + nodes: [ + { + id: 'gid://gitlab/DiffNote/459', + __typename: 'Note', + }, + ], + __typename: 'NoteConnection', }, - __typename: 'Note', + __typename: 'Discussion', }, - errors: [], - __typename: 'CreateNotePayload', + __typename: 'Note', }, + errors: [], + __typename: 'CreateNotePayload', }, }, -]; +}; export const mockNoteSubmitFailureMutationResponse = [ { diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap index d86fbf81d20..18b63082e4a 100644 --- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap +++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap @@ -2,7 +2,7 @@ exports[`Design management design index page renders design index 1`] = `
    { }); describe('when navigating away from component', () => { - it('removes fullscreen layout class', async () => { + it('removes fullscreen layout class', () => { jest.spyOn(utils, 'getPageLayoutElement').mockReturnValue(mockPageLayoutElement); createComponent({ loading: true }); diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js index 1ddf757eb19..1a6403d3b87 100644 --- a/spec/frontend/design_management/pages/index_spec.js +++ b/spec/frontend/design_management/pages/index_spec.js @@ -32,7 +32,7 @@ import { import { createAlert } from '~/alert'; import DesignDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue'; import { - designListQueryResponse, + getDesignListQueryResponse, designUploadMutationCreatedResponse, designUploadMutationUpdatedResponse, getPermissionsQueryResponse, @@ -100,6 +100,7 @@ describe('Design management index page', () => { let wrapper; let fakeApollo; let moveDesignHandler; + let permissionsQueryHandler; const findDesignCheckboxes = () => wrapper.findAll('.design-checkbox'); const findSelectAllButton = () => wrapper.findByTestId('select-all-designs-button'); @@ -174,14 +175,16 @@ describe('Design management index page', () => { } function createComponentWithApollo({ + permissionsHandler = jest.fn().mockResolvedValue(getPermissionsQueryResponse()), moveHandler = jest.fn().mockResolvedValue(moveDesignMutationResponse), }) { Vue.use(VueApollo); + permissionsQueryHandler = permissionsHandler; moveDesignHandler = moveHandler; const requestHandlers = [ - [getDesignListQuery, jest.fn().mockResolvedValue(designListQueryResponse)], - [permissionsQuery, jest.fn().mockResolvedValue(getPermissionsQueryResponse())], + [getDesignListQuery, jest.fn().mockResolvedValue(getDesignListQueryResponse())], + [permissionsQuery, permissionsQueryHandler], [moveDesignMutation, moveDesignHandler], ]; @@ -230,13 +233,6 @@ describe('Design management index page', () => { expect(findDesignUploadButton().exists()).toBe(true); }); - it('does not render toolbar when there is no permission', () => { - createComponent({ designs: mockDesigns, allVersions: [mockVersion], createDesign: false }); - - expect(findDesignToolbarWrapper().exists()).toBe(false); - expect(findDesignUploadButton().exists()).toBe(false); - }); - it('has correct classes applied to design dropzone', () => { createComponent({ designs: mockDesigns, allVersions: [mockVersion] }); expect(dropzoneClasses()).toContain('design-list-item'); @@ -723,7 +719,7 @@ describe('Design management index page', () => { expect(mockMutate).not.toHaveBeenCalled(); }); - it('removes onPaste listener after mouseleave event', async () => { + it('removes onPaste listener after mouseleave event', () => { findDesignsWrapper().trigger('mouseleave'); document.dispatchEvent(event); @@ -744,6 +740,17 @@ describe('Design management index page', () => { }); }); + describe('when there is no permission to create a design', () => { + beforeEach(() => { + createComponent({ designs: mockDesigns, allVersions: [mockVersion], createDesign: false }); + }); + + it("doesn't render the design toolbar and dropzone", () => { + expect(findToolbar().exists()).toBe(false); + expect(findDropzoneWrapper().exists()).toBe(false); + }); + }); + describe('with mocked Apollo client', () => { it('has a design with id 1 as a first one', async () => { createComponentWithApollo({}); @@ -819,5 +826,17 @@ describe('Design management index page', () => { 'Something went wrong when reordering designs. Please try again', ); }); + + it("doesn't render the design toolbar and dropzone if the user can't edit", async () => { + createComponentWithApollo({ + permissionsHandler: jest.fn().mockResolvedValue(getPermissionsQueryResponse(false)), + }); + + await waitForPromises(); + + expect(permissionsQueryHandler).toHaveBeenCalled(); + expect(findToolbar().exists()).toBe(false); + expect(findDropzoneWrapper().exists()).toBe(false); + }); }); }); diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js index 06995706a2b..f24ce8ba4ce 100644 --- a/spec/frontend/diffs/components/app_spec.js +++ b/spec/frontend/diffs/components/app_spec.js @@ -11,6 +11,7 @@ import CommitWidget from '~/diffs/components/commit_widget.vue'; import CompareVersions from '~/diffs/components/compare_versions.vue'; import DiffFile from '~/diffs/components/diff_file.vue'; import NoChanges from '~/diffs/components/no_changes.vue'; +import findingsDrawer from '~/diffs/components/shared/findings_drawer.vue'; import TreeList from '~/diffs/components/tree_list.vue'; import CollapsedFilesWarning from '~/diffs/components/collapsed_files_warning.vue'; @@ -174,7 +175,7 @@ describe('diffs/components/app', () => { }); describe('codequality diff', () => { - it('does not fetch code quality data on FOSS', async () => { + it('does not fetch code quality data on FOSS', () => { createComponent(); jest.spyOn(wrapper.vm, 'fetchCodequality'); wrapper.vm.fetchData(false); @@ -714,19 +715,27 @@ describe('diffs/components/app', () => { }); it.each` - currentDiffFileId | targetFile - ${'123'} | ${2} - ${'312'} | ${1} + currentDiffFileId | targetFile | newFileByFile + ${'123'} | ${2} | ${false} + ${'312'} | ${1} | ${true} `( 'calls navigateToDiffFileIndex with $index when $link is clicked', - async ({ currentDiffFileId, targetFile }) => { - createComponent({ fileByFileUserPreference: true }, ({ state }) => { - state.diffs.treeEntries = { - 123: { type: 'blob', fileHash: '123' }, - 312: { type: 'blob', fileHash: '312' }, - }; - state.diffs.currentDiffFileId = currentDiffFileId; - }); + async ({ currentDiffFileId, targetFile, newFileByFile }) => { + createComponent( + { fileByFileUserPreference: true }, + ({ state }) => { + state.diffs.treeEntries = { + 123: { type: 'blob', fileHash: '123', filePaths: { old: '1234', new: '123' } }, + 312: { type: 'blob', fileHash: '312', filePaths: { old: '3124', new: '312' } }, + }; + state.diffs.currentDiffFileId = currentDiffFileId; + }, + { + glFeatures: { + singleFileFileByFile: newFileByFile, + }, + }, + ); await nextTick(); @@ -736,9 +745,28 @@ describe('diffs/components/app', () => { await nextTick(); - expect(wrapper.vm.navigateToDiffFileIndex).toHaveBeenCalledWith(targetFile - 1); + expect(wrapper.vm.navigateToDiffFileIndex).toHaveBeenCalledWith({ + index: targetFile - 1, + singleFile: newFileByFile, + }); }, ); }); }); + + describe('findings-drawer', () => { + it('does not render findings-drawer when codeQualityInlineDrawer flag is off', () => { + createComponent(); + expect(wrapper.findComponent(findingsDrawer).exists()).toBe(false); + }); + + it('does render findings-drawer when codeQualityInlineDrawer flag is on', () => { + createComponent({}, () => {}, { + glFeatures: { + codeQualityInlineDrawer: true, + }, + }); + expect(wrapper.findComponent(findingsDrawer).exists()).toBe(true); + }); + }); }); diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js index 4b4b6351d3f..3c092296130 100644 --- a/spec/frontend/diffs/components/commit_item_spec.js +++ b/spec/frontend/diffs/components/commit_item_spec.js @@ -1,4 +1,5 @@ import { mount } from '@vue/test-utils'; +import { GlFormCheckbox } from '@gitlab/ui'; import getDiffWithCommit from 'test_fixtures/merge_request_diffs/with_commit.json'; import { TEST_HOST } from 'helpers/test_constants'; import { trimText } from 'helpers/text_helper'; @@ -28,6 +29,7 @@ describe('diffs/components/commit_item', () => { const getCommitterElement = () => wrapper.find('.committer'); const getCommitActionsElement = () => wrapper.find('.commit-actions'); const getCommitPipelineStatus = () => wrapper.findComponent(CommitPipelineStatus); + const getCommitCheckbox = () => wrapper.findComponent(GlFormCheckbox); const mountComponent = (propsData) => { wrapper = mount(Component, { @@ -168,4 +170,24 @@ describe('diffs/components/commit_item', () => { expect(getCommitPipelineStatus().exists()).toBe(true); }); }); + + describe('when commit is selectable', () => { + beforeEach(() => { + mountComponent({ + commit: { ...commit }, + isSelectable: true, + }); + }); + + it('renders checkbox', () => { + expect(getCommitCheckbox().exists()).toBe(true); + }); + + it('emits "handleCheckboxChange" event on change', () => { + expect(wrapper.emitted('handleCheckboxChange')).toBeUndefined(); + getCommitCheckbox().vm.$emit('change'); + + expect(wrapper.emitted('handleCheckboxChange')[0]).toEqual([true]); + }); + }); }); diff --git a/spec/frontend/diffs/components/diff_code_quality_item_spec.js b/spec/frontend/diffs/components/diff_code_quality_item_spec.js new file mode 100644 index 00000000000..be9fb61a77d --- /dev/null +++ b/spec/frontend/diffs/components/diff_code_quality_item_spec.js @@ -0,0 +1,66 @@ +import { GlIcon, GlLink } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import DiffCodeQualityItem from '~/diffs/components/diff_code_quality_item.vue'; +import { SEVERITY_CLASSES, SEVERITY_ICONS } from '~/ci/reports/codequality_report/constants'; +import { multipleFindingsArr } from '../mock_data/diff_code_quality'; + +let wrapper; + +const findIcon = () => wrapper.findComponent(GlIcon); +const findButton = () => wrapper.findComponent(GlLink); +const findDescriptionPlainText = () => wrapper.findByTestId('description-plain-text'); +const findDescriptionLinkSection = () => wrapper.findByTestId('description-button-section'); + +describe('DiffCodeQuality', () => { + const createWrapper = ({ glFeatures = {} } = {}) => { + return shallowMountExtended(DiffCodeQualityItem, { + propsData: { + finding: multipleFindingsArr[0], + }, + provide: { + glFeatures, + }, + }); + }; + + it('shows icon for given degradation', () => { + wrapper = createWrapper(); + expect(findIcon().exists()).toBe(true); + + expect(findIcon().attributes()).toMatchObject({ + class: `codequality-severity-icon ${SEVERITY_CLASSES[multipleFindingsArr[0].severity]}`, + name: SEVERITY_ICONS[multipleFindingsArr[0].severity], + size: '12', + }); + }); + + describe('with codeQualityInlineDrawer flag false', () => { + it('should render severity + description in plain text', () => { + wrapper = createWrapper({ + glFeatures: { + codeQualityInlineDrawer: false, + }, + }); + expect(findDescriptionPlainText().text()).toContain(multipleFindingsArr[0].severity); + expect(findDescriptionPlainText().text()).toContain(multipleFindingsArr[0].description); + }); + }); + + describe('with codeQualityInlineDrawer flag true', () => { + beforeEach(() => { + wrapper = createWrapper({ + glFeatures: { + codeQualityInlineDrawer: true, + }, + }); + }); + + it('should render severity as plain text', () => { + expect(findDescriptionLinkSection().text()).toContain(multipleFindingsArr[0].severity); + }); + + it('should render button with description text', () => { + expect(findButton().text()).toContain(multipleFindingsArr[0].description); + }); + }); +}); diff --git a/spec/frontend/diffs/components/diff_code_quality_spec.js b/spec/frontend/diffs/components/diff_code_quality_spec.js index e5ca90eb7c8..9ecfb62e1c5 100644 --- a/spec/frontend/diffs/components/diff_code_quality_spec.js +++ b/spec/frontend/diffs/components/diff_code_quality_spec.js @@ -1,13 +1,12 @@ -import { GlIcon } from '@gitlab/ui'; import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper'; import DiffCodeQuality from '~/diffs/components/diff_code_quality.vue'; -import { SEVERITY_CLASSES, SEVERITY_ICONS } from '~/ci/reports/codequality_report/constants'; +import DiffCodeQualityItem from '~/diffs/components/diff_code_quality_item.vue'; import { NEW_CODE_QUALITY_FINDINGS } from '~/diffs/i18n'; import { multipleFindingsArr } from '../mock_data/diff_code_quality'; let wrapper; -const findIcon = () => wrapper.findComponent(GlIcon); +const diffItems = () => wrapper.findAllComponents(DiffCodeQualityItem); const findHeading = () => wrapper.findByTestId(`diff-codequality-findings-heading`); describe('DiffCodeQuality', () => { @@ -28,37 +27,12 @@ describe('DiffCodeQuality', () => { expect(wrapper.emitted('hideCodeQualityFindings').length).toBe(1); }); - it('renders heading and correct amount of list items for codequality array and their description', async () => { - wrapper = createWrapper(multipleFindingsArr); - expect(findHeading().text()).toEqual(NEW_CODE_QUALITY_FINDINGS); - - const listItems = wrapper.findAll('li'); - expect(wrapper.findAll('li').length).toBe(5); + it('renders heading and correct amount of list items for codequality array and their description', () => { + wrapper = createWrapper(multipleFindingsArr, shallowMountExtended); - listItems.wrappers.map((e, i) => { - return expect(e.text()).toContain( - `${multipleFindingsArr[i].severity} - ${multipleFindingsArr[i].description}`, - ); - }); - }); - - it.each` - severity - ${'info'} - ${'minor'} - ${'major'} - ${'critical'} - ${'blocker'} - ${'unknown'} - `('shows icon for $severity degradation', ({ severity }) => { - wrapper = createWrapper([{ severity }], shallowMountExtended); - - expect(findIcon().exists()).toBe(true); + expect(findHeading().text()).toEqual(NEW_CODE_QUALITY_FINDINGS); - expect(findIcon().attributes()).toMatchObject({ - class: `codequality-severity-icon ${SEVERITY_CLASSES[severity]}`, - name: SEVERITY_ICONS[severity], - size: '12', - }); + expect(diffItems()).toHaveLength(multipleFindingsArr.length); + expect(diffItems().at(0).props().finding).toEqual(multipleFindingsArr[0]); }); }); diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js index 4515a8e8926..900aa8d1469 100644 --- a/spec/frontend/diffs/components/diff_file_header_spec.js +++ b/spec/frontend/diffs/components/diff_file_header_spec.js @@ -85,7 +85,7 @@ describe('DiffFileHeader component', () => { const findExternalLink = () => wrapper.findComponent({ ref: 'externalLink' }); const findReplacedFileButton = () => wrapper.findComponent({ ref: 'replacedFileButton' }); const findViewFileButton = () => wrapper.findComponent({ ref: 'viewButton' }); - const findCollapseIcon = () => wrapper.findComponent({ ref: 'collapseIcon' }); + const findCollapseButton = () => wrapper.findComponent({ ref: 'collapseButton' }); const findEditButton = () => wrapper.findComponent({ ref: 'editButton' }); const findReviewFileCheckbox = () => wrapper.find("[data-testid='fileReviewCheckbox']"); @@ -111,7 +111,7 @@ describe('DiffFileHeader component', () => { ${'hidden'} | ${false} `('collapse toggle is $visibility if collapsible is $collapsible', ({ collapsible }) => { createComponent({ props: { collapsible } }); - expect(findCollapseIcon().exists()).toBe(collapsible); + expect(findCollapseButton().exists()).toBe(collapsible); }); it.each` @@ -120,7 +120,7 @@ describe('DiffFileHeader component', () => { ${false} | ${'chevron-right'} `('collapse icon is $icon if expanded is $expanded', ({ icon, expanded }) => { createComponent({ props: { expanded, collapsible: true } }); - expect(findCollapseIcon().props('name')).toBe(icon); + expect(findCollapseButton().props('icon')).toBe(icon); }); it('when header is clicked emits toggleFile', async () => { @@ -133,7 +133,7 @@ describe('DiffFileHeader component', () => { it('when collapseIcon is clicked emits toggleFile', async () => { createComponent({ props: { collapsible: true } }); - findCollapseIcon().vm.$emit('click', new Event('click')); + findCollapseButton().vm.$emit('click', new Event('click')); await nextTick(); expect(wrapper.emitted().toggleFile).toBeDefined(); }); diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js index 93698396450..79cd2508757 100644 --- a/spec/frontend/diffs/components/diff_file_spec.js +++ b/spec/frontend/diffs/components/diff_file_spec.js @@ -306,7 +306,7 @@ describe('DiffFile', () => { markFileToBeRendered(store); }); - it('should have the file content', async () => { + it('should have the file content', () => { expect(wrapper.findComponent(DiffContentComponent).exists()).toBe(true); }); @@ -316,7 +316,7 @@ describe('DiffFile', () => { }); describe('toggle', () => { - it('should update store state', async () => { + it('should update store state', () => { jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation(() => {}); toggleFile(wrapper); diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js index bd0e3455872..eb895bd9057 100644 --- a/spec/frontend/diffs/components/diff_line_note_form_spec.js +++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js @@ -1,7 +1,6 @@ import { shallowMount } from '@vue/test-utils'; import { nextTick } from 'vue'; import Vuex from 'vuex'; -import Autosave from '~/autosave'; import DiffLineNoteForm from '~/diffs/components/diff_line_note_form.vue'; import { createModules } from '~/mr_notes/stores'; import NoteForm from '~/notes/components/note_form.vue'; @@ -11,7 +10,6 @@ import { noteableDataMock } from 'jest/notes/mock_data'; import { getDiffFileMock } from '../mock_data/diff_file'; jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'); -jest.mock('~/autosave'); describe('DiffLineNoteForm', () => { let wrapper; @@ -77,7 +75,6 @@ describe('DiffLineNoteForm', () => { const findCommentForm = () => wrapper.findComponent(MultilineCommentForm); beforeEach(() => { - Autosave.mockClear(); createComponent(); }); @@ -100,19 +97,6 @@ describe('DiffLineNoteForm', () => { }); }); - it('should init autosave', () => { - // we're using shallow mount here so there's no element to pass to Autosave - expect(Autosave).toHaveBeenCalledWith(undefined, [ - 'Note', - 'Issue', - 98, - undefined, - 'DiffNote', - undefined, - '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a_1_2', - ]); - }); - describe('when cancelling form', () => { afterEach(() => { confirmAction.mockReset(); @@ -146,7 +130,6 @@ describe('DiffLineNoteForm', () => { await nextTick(); expect(getSelectedLine().hasForm).toBe(false); - expect(Autosave.mock.instances[0].reset).toHaveBeenCalled(); }); }); diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js index 9bff6bd14f1..cfc80e61b30 100644 --- a/spec/frontend/diffs/components/diff_view_spec.js +++ b/spec/frontend/diffs/components/diff_view_spec.js @@ -14,7 +14,7 @@ describe('DiffView', () => { const setSelectedCommentPosition = jest.fn(); const getDiffRow = (wrapper) => wrapper.findComponent(DiffRow).vm; - const createWrapper = (props, provide = {}) => { + const createWrapper = (props) => { Vue.use(Vuex); const batchComments = { @@ -48,7 +48,7 @@ describe('DiffView', () => { ...props, }; const stubs = { DiffExpansionCell, DiffRow, DiffCommentCell, DraftNote }; - return shallowMount(DiffView, { propsData, store, stubs, provide }); + return shallowMount(DiffView, { propsData, store, stubs }); }; it('does not render a diff-line component when there is no finding', () => { @@ -56,24 +56,13 @@ describe('DiffView', () => { expect(wrapper.findComponent(DiffLine).exists()).toBe(false); }); - it('does render a diff-line component with the correct props when there is a finding & refactorCodeQualityInlineFindings flag is true', async () => { - const wrapper = createWrapper(diffCodeQuality, { - glFeatures: { refactorCodeQualityInlineFindings: true }, - }); + it('does render a diff-line component with the correct props when there is a finding', async () => { + const wrapper = createWrapper(diffCodeQuality); wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2); await nextTick(); expect(wrapper.findComponent(DiffLine).props('line')).toBe(diffCodeQuality.diffLines[2]); }); - it('does not render a diff-line component when there is a finding & refactorCodeQualityInlineFindings flag is false', async () => { - const wrapper = createWrapper(diffCodeQuality, { - glFeatures: { refactorCodeQualityInlineFindings: false }, - }); - wrapper.findComponent(DiffRow).vm.$emit('toggleCodeQualityFindings', 2); - await nextTick(); - expect(wrapper.findComponent(DiffLine).exists()).toBe(false); - }); - it.each` type | side | container | sides | total ${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true }, right: { lineDrafts: [], renderDiscussion: true } }} | ${2} diff --git a/spec/frontend/diffs/components/hidden_files_warning_spec.js b/spec/frontend/diffs/components/hidden_files_warning_spec.js index d9359fb3c7b..9b748a3ed6f 100644 --- a/spec/frontend/diffs/components/hidden_files_warning_spec.js +++ b/spec/frontend/diffs/components/hidden_files_warning_spec.js @@ -37,7 +37,9 @@ describe('HiddenFilesWarning', () => { it('has a correct visible/total files text', () => { expect(wrapper.text()).toContain( - __('To preserve performance only 5 of 10 files are displayed.'), + __( + 'For a faster browsing experience, only 5 of 10 files are shown. Download one of the files below to see all changes', + ), ); }); }); diff --git a/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap new file mode 100644 index 00000000000..ab330ffbb38 --- /dev/null +++ b/spec/frontend/diffs/components/shared/__snapshots__/findings_drawer_spec.js.snap @@ -0,0 +1,126 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`FindingsDrawer matches the snapshot 1`] = ` + +

    + + Unused method argument - \`c\`. If it's necessary, use \`_\` or \`_c\` as an argument name to indicate that it won't be used. + +

    +
      +
    • + + Severity: + + + + + + minor + +
    • + +
    • + + Engine: + + + testengine name + +
    • + +
    • + + Category: + + + testcategory 1 + +
    • + +
    • + + Other locations: + + +
        +
      • + + testpath + +
      • +
      • + + testpath 1 + +
      • +
      • + + testpath2 + +
      • +
      +
    • +
    + + + Duplicated Code Duplicated code + +
    +`; diff --git a/spec/frontend/diffs/components/shared/findings_drawer_spec.js b/spec/frontend/diffs/components/shared/findings_drawer_spec.js new file mode 100644 index 00000000000..0af6e0f0e96 --- /dev/null +++ b/spec/frontend/diffs/components/shared/findings_drawer_spec.js @@ -0,0 +1,19 @@ +import FindingsDrawer from '~/diffs/components/shared/findings_drawer.vue'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import mockFinding from '../../mock_data/findings_drawer'; + +let wrapper; +describe('FindingsDrawer', () => { + const createWrapper = () => { + return shallowMountExtended(FindingsDrawer, { + propsData: { + drawer: mockFinding, + }, + }); + }; + + it('matches the snapshot', () => { + wrapper = createWrapper(); + expect(wrapper.element).toMatchSnapshot(); + }); +}); diff --git a/spec/frontend/diffs/create_diffs_store.js b/spec/frontend/diffs/create_diffs_store.js index 307ebdaa4ac..92f38858ca5 100644 --- a/spec/frontend/diffs/create_diffs_store.js +++ b/spec/frontend/diffs/create_diffs_store.js @@ -3,6 +3,7 @@ import Vuex from 'vuex'; import batchCommentsModule from '~/batch_comments/stores/modules/batch_comments'; import diffsModule from '~/diffs/store/modules'; import notesModule from '~/notes/stores/modules'; +import findingsDrawer from '~/mr_notes/stores/drawer'; Vue.use(Vuex); @@ -18,6 +19,7 @@ export default function createDiffsStore() { diffs: diffsModule(), notes: notesModule(), batchComments: batchCommentsModule(), + findingsDrawer: findingsDrawer(), }, }); } diff --git a/spec/frontend/diffs/mock_data/diff_code_quality.js b/spec/frontend/diffs/mock_data/diff_code_quality.js index 7558592f6a4..29f16da8d89 100644 --- a/spec/frontend/diffs/mock_data/diff_code_quality.js +++ b/spec/frontend/diffs/mock_data/diff_code_quality.js @@ -24,6 +24,11 @@ export const multipleFindingsArr = [ description: 'mocked blocker Issue', line: 3, }, + { + severity: 'unknown', + description: 'mocked unknown Issue', + line: 3, + }, ]; export const fiveFindings = { diff --git a/spec/frontend/diffs/mock_data/findings_drawer.js b/spec/frontend/diffs/mock_data/findings_drawer.js new file mode 100644 index 00000000000..d7e7e957c83 --- /dev/null +++ b/spec/frontend/diffs/mock_data/findings_drawer.js @@ -0,0 +1,21 @@ +export default { + line: 7, + description: + "Unused method argument - `c`. If it's necessary, use `_` or `_c` as an argument name to indicate that it won't be used.", + severity: 'minor', + engineName: 'testengine name', + categories: ['testcategory 1', 'testcategory 2'], + content: { + body: 'Duplicated Code Duplicated code', + }, + location: { + path: 'workhorse/config_test.go', + lines: { begin: 221, end: 284 }, + }, + otherLocations: [ + { path: 'testpath', href: 'http://testlink.com' }, + { path: 'testpath 1', href: 'http://testlink.com' }, + { path: 'testpath2', href: 'http://testlink.com' }, + ], + type: 'issue', +}; diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js index b00076504e3..f3581c3dd74 100644 --- a/spec/frontend/diffs/store/actions_spec.js +++ b/spec/frontend/diffs/store/actions_spec.js @@ -1,5 +1,6 @@ import MockAdapter from 'axios-mock-adapter'; import Cookies from '~/lib/utils/cookies'; +import waitForPromises from 'helpers/wait_for_promises'; import { useLocalStorageSpy } from 'helpers/local_storage_helper'; import { TEST_HOST } from 'helpers/test_constants'; import testAction from 'helpers/vuex_action_helper'; @@ -9,6 +10,7 @@ import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE, } from '~/diffs/constants'; +import { LOAD_SINGLE_DIFF_FAILED } from '~/diffs/i18n'; import * as diffActions from '~/diffs/store/actions'; import * as types from '~/diffs/store/mutation_types'; import * as utils from '~/diffs/store/utils'; @@ -24,10 +26,16 @@ import { } from '~/lib/utils/http_status'; import { mergeUrlParams } from '~/lib/utils/url_utility'; import eventHub from '~/notes/event_hub'; +import diffsEventHub from '~/diffs/event_hub'; import { diffMetadata } from '../mock_data/diff_metadata'; jest.mock('~/alert'); +jest.mock('~/lib/utils/secret_detection', () => ({ + confirmSensitiveAction: jest.fn(() => Promise.resolve(false)), + containsSensitiveToken: jest.requireActual('~/lib/utils/secret_detection').containsSensitiveToken, +})); + describe('DiffsStoreActions', () => { let mock; @@ -135,6 +143,112 @@ describe('DiffsStoreActions', () => { }); }); + describe('fetchFileByFile', () => { + beforeEach(() => { + window.location.hash = 'e334a2a10f036c00151a04cea7938a5d4213a818'; + }); + + it('should do nothing if there is no tree entry for the file ID', () => { + return testAction(diffActions.fetchFileByFile, {}, { flatBlobsList: [] }, [], []); + }); + + it('should do nothing if the tree entry for the file ID has already been marked as loaded', () => { + return testAction( + diffActions.fetchFileByFile, + {}, + { + flatBlobsList: [ + { fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818', diffLoaded: true }, + ], + }, + [], + [], + ); + }); + + describe('when a tree entry exists for the file, but it has not been marked as loaded', () => { + let state; + let commit; + let hubSpy; + const endpointDiffForPath = '/diffs/set/endpoint/path'; + const diffForPath = mergeUrlParams( + { + old_path: 'old/123', + new_path: 'new/123', + w: '1', + view: 'inline', + }, + endpointDiffForPath, + ); + const treeEntry = { + fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818', + filePaths: { old: 'old/123', new: 'new/123' }, + }; + const fileResult = { + diff_files: [{ file_hash: 'e334a2a10f036c00151a04cea7938a5d4213a818' }], + }; + const getters = { + flatBlobsList: [treeEntry], + getDiffFileByHash(hash) { + return state.diffFiles?.find((entry) => entry.file_hash === hash); + }, + }; + + beforeEach(() => { + commit = jest.fn(); + state = { + endpointDiffForPath, + diffFiles: [], + }; + getters.flatBlobsList = [treeEntry]; + hubSpy = jest.spyOn(diffsEventHub, '$emit'); + }); + + it('does nothing if the file already exists in the loaded diff files', () => { + state.diffFiles = fileResult.diff_files; + + return testAction(diffActions.fetchFileByFile, state, getters, [], []); + }); + + it('does some standard work every time', async () => { + mock.onGet(diffForPath).reply(HTTP_STATUS_OK, fileResult); + + await diffActions.fetchFileByFile({ state, getters, commit }); + + expect(commit).toHaveBeenCalledWith(types.SET_BATCH_LOADING_STATE, 'loading'); + expect(commit).toHaveBeenCalledWith(types.SET_RETRIEVING_BATCHES, true); + + // wait for the mocked network request to return and start processing the .then + await waitForPromises(); + + expect(commit).toHaveBeenCalledWith(types.SET_DIFF_DATA_BATCH, fileResult); + expect(commit).toHaveBeenCalledWith(types.SET_BATCH_LOADING_STATE, 'loaded'); + + expect(hubSpy).toHaveBeenCalledWith('diffFilesModified'); + }); + + it.each` + urlHash | diffFiles | expected + ${treeEntry.fileHash} | ${[]} | ${''} + ${'abcdef1234567890'} | ${fileResult.diff_files} | ${'e334a2a10f036c00151a04cea7938a5d4213a818'} + `( + "sets the current file to the first diff file ('$id') if it's not a note hash and there isn't a current ID set", + async ({ urlHash, diffFiles, expected }) => { + window.location.hash = urlHash; + mock.onGet(diffForPath).reply(HTTP_STATUS_OK, fileResult); + state.diffFiles = diffFiles; + + await diffActions.fetchFileByFile({ state, getters, commit }); + + // wait for the mocked network request to return and start processing the .then + await waitForPromises(); + + expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, expected); + }, + ); + }); + }); + describe('fetchDiffFilesBatch', () => { it('should fetch batch diff files', () => { const endpointBatch = '/fetch/diffs_batch'; @@ -818,31 +932,32 @@ describe('DiffsStoreActions', () => { }); describe('saveDiffDiscussion', () => { - it('dispatches actions', () => { - const commitId = 'something'; - const formData = { - diffFile: getDiffFileMock(), - noteableData: {}, - }; - const note = {}; - const state = { - commit: { - id: commitId, - }, - }; - const dispatch = jest.fn((name) => { - switch (name) { - case 'saveNote': - return Promise.resolve({ - discussion: 'test', - }); - case 'updateDiscussion': - return Promise.resolve('discussion'); - default: - return Promise.resolve({}); - } - }); + const dispatch = jest.fn((name) => { + switch (name) { + case 'saveNote': + return Promise.resolve({ + discussion: 'test', + }); + case 'updateDiscussion': + return Promise.resolve('discussion'); + default: + return Promise.resolve({}); + } + }); + const commitId = 'something'; + const formData = { + diffFile: getDiffFileMock(), + noteableData: {}, + }; + const note = {}; + const state = { + commit: { + id: commitId, + }, + }; + + it('dispatches actions', () => { return diffActions.saveDiffDiscussion({ state, dispatch }, { note, formData }).then(() => { expect(dispatch).toHaveBeenCalledTimes(5); expect(dispatch).toHaveBeenNthCalledWith(1, 'saveNote', expect.any(Object), { @@ -856,6 +971,16 @@ describe('DiffsStoreActions', () => { expect(dispatch).toHaveBeenNthCalledWith(3, 'assignDiscussionsToDiff', ['discussion']); }); }); + + it('should not add note with sensitive token', async () => { + const sensitiveMessage = 'token: glpat-1234567890abcdefghij'; + + await diffActions.saveDiffDiscussion( + { state, dispatch }, + { note: sensitiveMessage, formData }, + ); + expect(dispatch).not.toHaveBeenCalled(); + }); }); describe('toggleTreeOpen', () => { @@ -870,6 +995,104 @@ describe('DiffsStoreActions', () => { }); }); + describe('goToFile', () => { + const getters = {}; + const file = { path: 'path' }; + const fileHash = 'test'; + let state; + let dispatch; + let commit; + + beforeEach(() => { + getters.isTreePathLoaded = () => false; + state = { + viewDiffsFileByFile: true, + treeEntries: { + path: { + fileHash, + }, + }, + }; + commit = jest.fn(); + dispatch = jest.fn().mockResolvedValue(); + }); + + it('immediately defers to scrollToFile if the app is not in file-by-file mode', () => { + state.viewDiffsFileByFile = false; + + diffActions.goToFile({ state, dispatch }, file); + + expect(dispatch).toHaveBeenCalledWith('scrollToFile', file); + }); + + describe('when the app is in fileByFile mode', () => { + describe('when the singleFileFileByFile feature flag is enabled', () => { + it('commits SET_CURRENT_DIFF_FILE', () => { + diffActions.goToFile( + { state, commit, dispatch, getters }, + { path: file.path, singleFile: true }, + ); + + expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash); + }); + + it('does nothing more if the path has already been loaded', () => { + getters.isTreePathLoaded = () => true; + + diffActions.goToFile( + { state, dispatch, getters, commit }, + { path: file.path, singleFile: true }, + ); + + expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash); + expect(dispatch).toHaveBeenCalledTimes(0); + }); + + describe('when the tree entry has not been loaded', () => { + it('updates location hash', () => { + diffActions.goToFile( + { state, commit, getters, dispatch }, + { path: file.path, singleFile: true }, + ); + + expect(document.location.hash).toBe('#test'); + }); + + it('loads the file and then scrolls to it', async () => { + diffActions.goToFile( + { state, commit, getters, dispatch }, + { path: file.path, singleFile: true }, + ); + + // Wait for the fetchFileByFile dispatch to return, to trigger scrollToFile + await waitForPromises(); + + expect(dispatch).toHaveBeenCalledWith('fetchFileByFile'); + expect(dispatch).toHaveBeenCalledWith('scrollToFile', file); + expect(dispatch).toHaveBeenCalledTimes(2); + }); + + it('shows an alert when there was an error fetching the file', async () => { + dispatch = jest.fn().mockRejectedValue(); + + diffActions.goToFile( + { state, commit, getters, dispatch }, + { path: file.path, singleFile: true }, + ); + + // Wait for the fetchFileByFile dispatch to return, to trigger the catch + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledTimes(1); + expect(createAlert).toHaveBeenCalledWith({ + message: expect.stringMatching(LOAD_SINGLE_DIFF_FAILED), + }); + }); + }); + }); + }); + }); + describe('scrollToFile', () => { let commit; const getters = { isVirtualScrollingEnabled: false }; @@ -1392,6 +1615,54 @@ describe('DiffsStoreActions', () => { ); }); + describe('rereadNoteHash', () => { + beforeEach(() => { + window.location.hash = 'note_123'; + }); + + it('dispatches setCurrentDiffFileIdFromNote if the hash is a note URL', () => { + window.location.hash = 'note_123'; + + return testAction( + diffActions.rereadNoteHash, + {}, + {}, + [], + [{ type: 'setCurrentDiffFileIdFromNote', payload: '123' }], + ); + }); + + it('dispatches fetchFileByFile if the app is in fileByFile mode', () => { + window.location.hash = 'note_123'; + + return testAction( + diffActions.rereadNoteHash, + {}, + { viewDiffsFileByFile: true }, + [], + [{ type: 'setCurrentDiffFileIdFromNote', payload: '123' }, { type: 'fetchFileByFile' }], + ); + }); + + it('does not try to fetch the diff file if the app is not in fileByFile mode', () => { + window.location.hash = 'note_123'; + + return testAction( + diffActions.rereadNoteHash, + {}, + { viewDiffsFileByFile: false }, + [], + [{ type: 'setCurrentDiffFileIdFromNote', payload: '123' }], + ); + }); + + it('does nothing if the hash is not a note URL', () => { + window.location.hash = 'abcdef1234567890'; + + return testAction(diffActions.rereadNoteHash, {}, {}, [], []); + }); + }); + describe('setCurrentDiffFileIdFromNote', () => { it('commits SET_CURRENT_DIFF_FILE', () => { const commit = jest.fn(); @@ -1436,12 +1707,22 @@ describe('DiffsStoreActions', () => { it('commits SET_CURRENT_DIFF_FILE', () => { return testAction( diffActions.navigateToDiffFileIndex, - 0, + { index: 0, singleFile: false }, { flatBlobsList: [{ fileHash: '123' }] }, [{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }], [], ); }); + + it('dispatches the fetchFileByFile action when the state value viewDiffsFileByFile is true and the single-file file-by-file feature flag is enabled', () => { + return testAction( + diffActions.navigateToDiffFileIndex, + { index: 0, singleFile: true }, + { viewDiffsFileByFile: true, flatBlobsList: [{ fileHash: '123' }] }, + [{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }], + [{ type: 'fetchFileByFile' }], + ); + }); }); describe('setFileByFile', () => { diff --git a/spec/frontend/diffs/utils/merge_request_spec.js b/spec/frontend/diffs/utils/merge_request_spec.js index c070e8c004d..21599a3be45 100644 --- a/spec/frontend/diffs/utils/merge_request_spec.js +++ b/spec/frontend/diffs/utils/merge_request_spec.js @@ -1,4 +1,8 @@ -import { getDerivedMergeRequestInformation } from '~/diffs/utils/merge_request'; +import { + updateChangesTabCount, + getDerivedMergeRequestInformation, +} from '~/diffs/utils/merge_request'; +import { ZERO_CHANGES_ALT_DISPLAY } from '~/diffs/constants'; import { diffMetadata } from '../mock_data/diff_metadata'; describe('Merge Request utilities', () => { @@ -24,6 +28,56 @@ describe('Merge Request utilities', () => { ...noVersion, }; + describe('updateChangesTabCount', () => { + let dummyTab; + let badge; + + beforeEach(() => { + dummyTab = document.createElement('div'); + dummyTab.classList.add('js-diffs-tab'); + dummyTab.insertAdjacentHTML('afterbegin', 'ERROR'); + badge = dummyTab.querySelector('.gl-badge'); + }); + + afterEach(() => { + dummyTab.remove(); + dummyTab = null; + badge = null; + }); + + it('uses the alt hyphen display when the new changes are falsey', () => { + updateChangesTabCount({ count: 0, badge }); + + expect(dummyTab.textContent).toBe(ZERO_CHANGES_ALT_DISPLAY); + + updateChangesTabCount({ badge }); + + expect(dummyTab.textContent).toBe(ZERO_CHANGES_ALT_DISPLAY); + + updateChangesTabCount({ count: false, badge }); + + expect(dummyTab.textContent).toBe(ZERO_CHANGES_ALT_DISPLAY); + }); + + it('uses the actual value for display when the value is truthy', () => { + updateChangesTabCount({ count: 42, badge }); + + expect(dummyTab.textContent).toBe('42'); + + updateChangesTabCount({ count: '999+', badge }); + + expect(dummyTab.textContent).toBe('999+'); + }); + + it('selects the proper element to modify by default', () => { + document.body.insertAdjacentElement('afterbegin', dummyTab); + + updateChangesTabCount({ count: 42 }); + + expect(dummyTab.textContent).toBe('42'); + }); + }); + describe('getDerivedMergeRequestInformation', () => { let endpoint = `${diffMetadata.latest_version_path}.json?searchParam=irrelevant`; diff --git a/spec/frontend/drawio/drawio_editor_spec.js b/spec/frontend/drawio/drawio_editor_spec.js index 5ef26c04204..d7d75922e1e 100644 --- a/spec/frontend/drawio/drawio_editor_spec.js +++ b/spec/frontend/drawio/drawio_editor_spec.js @@ -108,7 +108,7 @@ describe('drawio/drawio_editor', () => { await waitForDrawioIFrameMessage(); }); - it('sends configure action to the draw.io iframe', async () => { + it('sends configure action to the draw.io iframe', () => { expectDrawioIframeMessage({ expectation: { action: 'configure', @@ -121,7 +121,7 @@ describe('drawio/drawio_editor', () => { }); }); - it('does not remove the iframe after the load error timeouts run', async () => { + it('does not remove the iframe after the load error timeouts run', () => { jest.runAllTimers(); expect(findDrawioIframe()).not.toBe(null); @@ -227,7 +227,7 @@ describe('drawio/drawio_editor', () => { postMessageToParentWindow({ event: 'init' }); }); - it('displays an error alert indicating that the image is not a diagram', async () => { + it('displays an error alert indicating that the image is not a diagram', () => { expect(createAlert).toHaveBeenCalledWith({ message: errorMessage, error: expect.any(Error), @@ -248,7 +248,7 @@ describe('drawio/drawio_editor', () => { postMessageToParentWindow({ event: 'init' }); }); - it('displays an error alert indicating the failure', async () => { + it('displays an error alert indicating the failure', () => { expect(createAlert).toHaveBeenCalledWith({ message: 'Cannot load the diagram into the diagrams.net editor', error: expect.any(Error), diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js index fdd157dd09f..179ba917e7f 100644 --- a/spec/frontend/dropzone_input_spec.js +++ b/spec/frontend/dropzone_input_spec.js @@ -48,9 +48,9 @@ describe('dropzone_input', () => { }; beforeEach(() => { - loadHTMLFixture('issues/new-issue.html'); + loadHTMLFixture('milestones/new-milestone.html'); - form = $('#new_issue'); + form = $('#new_milestone'); form.data('uploads-path', TEST_UPLOAD_PATH); dropzoneInput(form); }); diff --git a/spec/frontend/editor/components/source_editor_toolbar_button_spec.js b/spec/frontend/editor/components/source_editor_toolbar_button_spec.js index 79692ab4557..b5944a52af7 100644 --- a/spec/frontend/editor/components/source_editor_toolbar_button_spec.js +++ b/spec/frontend/editor/components/source_editor_toolbar_button_spec.js @@ -33,17 +33,17 @@ describe('Source Editor Toolbar button', () => { it('does not render the button if the props have not been passed', () => { createComponent({}); - expect(findButton().vm).toBeUndefined(); + expect(findButton().exists()).toBe(false); }); - it('renders a default button without props', async () => { + it('renders a default button without props', () => { createComponent(); const btn = findButton(); expect(btn.exists()).toBe(true); expect(btn.props()).toMatchObject(defaultProps); }); - it('renders a button based on the props passed', async () => { + it('renders a button based on the props passed', () => { createComponent({ button: customProps, }); @@ -107,34 +107,31 @@ describe('Source Editor Toolbar button', () => { }); describe('click handler', () => { - let clickEvent; - - beforeEach(() => { - clickEvent = new Event('click'); - }); - it('fires the click handler on the button when available', async () => { - const spy = jest.fn(); + const clickSpy = jest.fn(); + const clickEvent = new Event('click'); createComponent({ button: { - onClick: spy, + onClick: clickSpy, }, }); - expect(spy).not.toHaveBeenCalled(); + expect(wrapper.emitted('click')).toEqual(undefined); findButton().vm.$emit('click', clickEvent); await nextTick(); - expect(spy).toHaveBeenCalledWith(clickEvent); + + expect(wrapper.emitted('click')).toEqual([[clickEvent]]); + expect(clickSpy).toHaveBeenCalledWith(clickEvent); }); + it('emits the "click" event, passing the event itself', async () => { createComponent(); - jest.spyOn(wrapper.vm, '$emit'); - expect(wrapper.vm.$emit).not.toHaveBeenCalled(); + expect(wrapper.emitted('click')).toEqual(undefined); - findButton().vm.$emit('click', clickEvent); + findButton().vm.$emit('click'); await nextTick(); - expect(wrapper.vm.$emit).toHaveBeenCalledWith('click', clickEvent); + expect(wrapper.emitted('click')).toHaveLength(1); }); }); }); diff --git a/spec/frontend/editor/components/source_editor_toolbar_spec.js b/spec/frontend/editor/components/source_editor_toolbar_spec.js index f737340a317..95dc29c7916 100644 --- a/spec/frontend/editor/components/source_editor_toolbar_spec.js +++ b/spec/frontend/editor/components/source_editor_toolbar_spec.js @@ -104,19 +104,16 @@ describe('Source Editor Toolbar', () => { group: EDITOR_TOOLBAR_BUTTON_GROUPS.settings, }); createComponentWithApollo([item1, item2, item3]); - jest.spyOn(wrapper.vm, '$emit'); - expect(wrapper.vm.$emit).not.toHaveBeenCalled(); + expect(wrapper.emitted('click')).toEqual(undefined); findButtons().at(0).vm.$emit('click'); - expect(wrapper.vm.$emit).toHaveBeenCalledWith('click', item1); + expect(wrapper.emitted('click')).toEqual([[item1]]); findButtons().at(1).vm.$emit('click'); - expect(wrapper.vm.$emit).toHaveBeenCalledWith('click', item2); + expect(wrapper.emitted('click')).toEqual([[item1], [item2]]); findButtons().at(2).vm.$emit('click'); - expect(wrapper.vm.$emit).toHaveBeenCalledWith('click', item3); - - expect(wrapper.vm.$emit.mock.calls).toHaveLength(3); + expect(wrapper.emitted('click')).toEqual([[item1], [item2], [item3]]); }); }); }); diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js index 895eb87833d..fb5fce92482 100644 --- a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js +++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js @@ -1,6 +1,5 @@ import MockAdapter from 'axios-mock-adapter'; import { Emitter } from 'monaco-editor'; -import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame'; import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import waitForPromises from 'helpers/wait_for_promises'; import { @@ -28,6 +27,7 @@ describe('Markdown Live Preview Extension for Source Editor', () => { let panelSpy; let mockAxios; let extension; + let resizeCallback; const previewMarkdownPath = '/gitlab/fooGroup/barProj/preview_markdown'; const firstLine = 'This is a'; const secondLine = 'multiline'; @@ -35,6 +35,8 @@ describe('Markdown Live Preview Extension for Source Editor', () => { const text = `${firstLine}\n${secondLine}\n${thirdLine}`; const markdownPath = 'foo.md'; const responseData = '
    FooBar
    '; + const observeSpy = jest.fn(); + const disconnectSpy = jest.fn(); const togglePreview = async () => { instance.togglePreview(); @@ -43,8 +45,22 @@ describe('Markdown Live Preview Extension for Source Editor', () => { beforeEach(() => { mockAxios = new MockAdapter(axios); - setHTMLFixture('
    '); + setHTMLFixture( + '
    ', + ); editorEl = document.getElementById('editor'); + global.ResizeObserver = class { + constructor(callback) { + resizeCallback = callback; + this.observe = (node) => { + return observeSpy(node); + }; + this.disconnect = () => { + return disconnectSpy(); + }; + } + }; + editor = new SourceEditor(); instance = editor.createInstance({ el: editorEl, @@ -77,9 +93,6 @@ describe('Markdown Live Preview Extension for Source Editor', () => { actions: expect.any(Object), shown: false, modelChangeListener: undefined, - layoutChangeListener: { - dispose: expect.anything(), - }, path: previewMarkdownPath, actionShowPreviewCondition: expect.any(Object), eventEmitter: expect.any(Object), @@ -94,36 +107,64 @@ describe('Markdown Live Preview Extension for Source Editor', () => { expect(panelSpy).toHaveBeenCalled(); }); - describe('onDidLayoutChange', () => { - const emitter = new Emitter(); - let layoutSpy; + describe('ResizeObserver handler', () => { + it('sets a ResizeObserver to observe the container DOM node', () => { + observeSpy.mockClear(); + instance.togglePreview(); + expect(observeSpy).toHaveBeenCalledWith(instance.getContainerDomNode()); + }); - useFakeRequestAnimationFrame(); + describe('disconnects the ResizeObserver when…', () => { + beforeEach(() => { + instance.togglePreview(); + instance.markdownPreview.modelChangeListener = { + dispose: jest.fn(), + }; + }); - beforeEach(() => { - instance.unuse(extension); - instance.onDidLayoutChange = emitter.event; - extension = instance.use({ - definition: EditorMarkdownPreviewExtension, - setupOptions: { previewMarkdownPath }, + it('the preview gets closed', () => { + expect(disconnectSpy).not.toHaveBeenCalled(); + instance.togglePreview(); + expect(disconnectSpy).toHaveBeenCalled(); }); - layoutSpy = jest.spyOn(instance, 'layout'); - }); - it('does not trigger the layout when the preview is not active [default]', async () => { - expect(instance.markdownPreview.shown).toBe(false); - expect(layoutSpy).not.toHaveBeenCalled(); - await emitter.fire(); - expect(layoutSpy).not.toHaveBeenCalled(); + it('the extension is unused', () => { + expect(disconnectSpy).not.toHaveBeenCalled(); + instance.unuse(extension); + expect(disconnectSpy).toHaveBeenCalled(); + }); }); - it('triggers the layout if the preview panel is opened', async () => { - expect(layoutSpy).not.toHaveBeenCalled(); - instance.togglePreview(); - layoutSpy.mockReset(); + describe('layout behavior', () => { + let layoutSpy; + let instanceDimensions; + let newInstanceWidth; - await emitter.fire(); - expect(layoutSpy).toHaveBeenCalledTimes(1); + beforeEach(() => { + instanceDimensions = instance.getLayoutInfo(); + }); + + it('does not trigger the layout if the preview panel is closed', () => { + layoutSpy = jest.spyOn(instance, 'layout'); + newInstanceWidth = instanceDimensions.width + 100; + + // Manually trigger the resize event + resizeCallback([{ contentRect: { width: newInstanceWidth } }]); + expect(layoutSpy).not.toHaveBeenCalled(); + }); + + it('triggers the layout if the preview panel is opened, and width of the editor has changed', () => { + instance.togglePreview(); + layoutSpy = jest.spyOn(instance, 'layout'); + newInstanceWidth = instanceDimensions.width + 100; + + // Manually trigger the resize event + resizeCallback([{ contentRect: { width: newInstanceWidth } }]); + expect(layoutSpy).toHaveBeenCalledWith({ + width: newInstanceWidth * EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH, + height: instanceDimensions.height, + }); + }); }); }); @@ -226,11 +267,10 @@ describe('Markdown Live Preview Extension for Source Editor', () => { expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true); }); - it('disposes the layoutChange listener and does not re-layout on layout changes', () => { - expect(instance.markdownPreview.layoutChangeListener).toBeDefined(); + it('disconnects the ResizeObserver', () => { instance.unuse(extension); - expect(instance.markdownPreview?.layoutChangeListener).toBeUndefined(); + expect(disconnectSpy).toHaveBeenCalled(); }); it('does not trigger the re-layout after instance is unused', async () => { diff --git a/spec/frontend/editor/utils_spec.js b/spec/frontend/editor/utils_spec.js index 13b8a9804b0..c9d6cbcaaa6 100644 --- a/spec/frontend/editor/utils_spec.js +++ b/spec/frontend/editor/utils_spec.js @@ -1,6 +1,8 @@ import { editor as monacoEditor } from 'monaco-editor'; import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import * as utils from '~/editor/utils'; +import languages from '~/ide/lib/languages'; +import { registerLanguages } from '~/ide/utils'; import { DEFAULT_THEME } from '~/ide/lib/themes'; describe('Source Editor utils', () => { @@ -53,13 +55,19 @@ describe('Source Editor utils', () => { }); describe('getBlobLanguage', () => { + beforeEach(() => { + registerLanguages(...languages); + }); + it.each` - path | expectedLanguage - ${'foo.js'} | ${'javascript'} - ${'foo.js.rb'} | ${'ruby'} - ${'foo.bar'} | ${'plaintext'} - ${undefined} | ${'plaintext'} - ${'foo/bar/foo.js'} | ${'javascript'} + path | expectedLanguage + ${'foo.js'} | ${'javascript'} + ${'foo.js.rb'} | ${'ruby'} + ${'foo.bar'} | ${'plaintext'} + ${undefined} | ${'plaintext'} + ${'foo/bar/foo.js'} | ${'javascript'} + ${'CODEOWNERS'} | ${'codeowners'} + ${'.gitlab/CODEOWNERS'} | ${'codeowners'} `(`returns '$expectedLanguage' for '$path' path`, ({ path, expectedLanguage }) => { const language = utils.getBlobLanguage(path); diff --git a/spec/frontend/emoji/awards_app/store/actions_spec.js b/spec/frontend/emoji/awards_app/store/actions_spec.js index 3e9b49707ed..65f2e813f19 100644 --- a/spec/frontend/emoji/awards_app/store/actions_spec.js +++ b/spec/frontend/emoji/awards_app/store/actions_spec.js @@ -119,7 +119,7 @@ describe('Awards app actions', () => { mock.onPost(`${relativeRootUrl || ''}/awards`).reply(HTTP_STATUS_OK, { id: 1 }); }); - it('adds an optimistic award, removes it, and then commits ADD_NEW_AWARD', async () => { + it('adds an optimistic award, removes it, and then commits ADD_NEW_AWARD', () => { testAction(actions.toggleAward, null, { path: '/awards', awards: [] }, [ makeOptimisticAddMutation(), makeOptimisticRemoveMutation(), @@ -156,7 +156,7 @@ describe('Awards app actions', () => { mock.onDelete(`${relativeRootUrl || ''}/awards/1`).reply(HTTP_STATUS_OK); }); - it('commits REMOVE_AWARD', async () => { + it('commits REMOVE_AWARD', () => { testAction( actions.toggleAward, 'thumbsup', diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js index 7b160c48501..4e341b2bb2f 100644 --- a/spec/frontend/environment.js +++ b/spec/frontend/environment.js @@ -21,8 +21,17 @@ class CustomEnvironment extends TestEnvironment { // https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39496#note_503084332 setGlobalDateToFakeDate(); + const { error: originalErrorFn } = context.console; Object.assign(context.console, { error(...args) { + if ( + args?.[0]?.includes('[Vue warn]: Missing required prop') || + args?.[0]?.includes('[Vue warn]: Invalid prop') + ) { + originalErrorFn.apply(context.console, args); + return; + } + throw new ErrorWithStack( `Unexpected call of console.error() with:\n\n${args.join(', ')}`, this.error, @@ -30,7 +39,7 @@ class CustomEnvironment extends TestEnvironment { }, warn(...args) { - if (args[0].includes('The updateQuery callback for fetchMore is deprecated')) { + if (args?.[0]?.includes('The updateQuery callback for fetchMore is deprecated')) { return; } throw new ErrorWithStack( diff --git a/spec/frontend/environments/deploy_board_component_spec.js b/spec/frontend/environments/deploy_board_component_spec.js index 73a366457fb..f50efada91a 100644 --- a/spec/frontend/environments/deploy_board_component_spec.js +++ b/spec/frontend/environments/deploy_board_component_spec.js @@ -61,7 +61,7 @@ describe('Deploy Board', () => { const icon = iconSpan.findComponent(GlIcon); expect(tooltip.props('target')()).toBe(iconSpan.element); - expect(icon.props('name')).toBe('question'); + expect(icon.props('name')).toBe('question-o'); }); it('renders the canary weight selector', () => { @@ -116,7 +116,7 @@ describe('Deploy Board', () => { const icon = iconSpan.findComponent(GlIcon); expect(tooltip.props('target')()).toBe(iconSpan.element); - expect(icon.props('name')).toBe('question'); + expect(icon.props('name')).toBe('question-o'); }); it('renders the canary weight selector', () => { diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js index 3c9b4144e45..dcfefbb2072 100644 --- a/spec/frontend/environments/environment_actions_spec.js +++ b/spec/frontend/environments/environment_actions_spec.js @@ -1,14 +1,8 @@ -import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlIcon } from '@gitlab/ui'; -import { shallowMount, mount } from '@vue/test-utils'; -import Vue, { nextTick } from 'vue'; -import VueApollo from 'vue-apollo'; +import { GlDisclosureDropdown, GlDisclosureDropdownItem, GlIcon } from '@gitlab/ui'; +import { mount } from '@vue/test-utils'; import { TEST_HOST } from 'helpers/test_constants'; -import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import EnvironmentActions from '~/environments/components/environment_actions.vue'; -import eventHub from '~/environments/event_hub'; -import actionMutation from '~/environments/graphql/mutations/action.mutation.graphql'; import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'; -import createMockApollo from 'helpers/mock_apollo_helper'; jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'); @@ -29,15 +23,9 @@ const expiredJobAction = { describe('EnvironmentActions Component', () => { let wrapper; - const findEnvironmentActionsButton = () => - wrapper.find('[data-testid="environment-actions-button"]'); - - function createComponent(props, { mountFn = shallowMount, options = {} } = {}) { - wrapper = mountFn(EnvironmentActions, { + function createComponent(props, { options = {} } = {}) { + wrapper = mount(EnvironmentActions, { propsData: { actions: [], ...props }, - directives: { - GlTooltip: createMockDirective('gl-tooltip'), - }, ...options, }); } @@ -46,9 +34,10 @@ describe('EnvironmentActions Component', () => { return createComponent({ actions: [scheduledJobAction, expiredJobAction] }, opts); } + const findDropdownItems = () => wrapper.findAllComponents(GlDisclosureDropdownItem); const findDropdownItem = (action) => { - const buttons = wrapper.findAllComponents(GlDropdownItem); - return buttons.filter((button) => button.text().startsWith(action.name)).at(0); + const items = findDropdownItems(); + return items.filter((item) => item.text().startsWith(action.name)).at(0); }; afterEach(() => { @@ -56,19 +45,15 @@ describe('EnvironmentActions Component', () => { }); it('should render a dropdown button with 2 icons', () => { - createComponent({}, { mountFn: mount }); - expect(wrapper.findComponent(GlDropdown).findAllComponents(GlIcon).length).toBe(2); - }); - - it('should render a dropdown button with aria-label description', () => { createComponent(); - expect(wrapper.findComponent(GlDropdown).attributes('aria-label')).toBe('Deploy to...'); + expect(wrapper.findComponent(GlDisclosureDropdown).findAllComponents(GlIcon).length).toBe(2); }); - it('should render a tooltip', () => { + it('should render a dropdown button with aria-label description', () => { createComponent(); - const tooltip = getBinding(findEnvironmentActionsButton().element, 'gl-tooltip'); - expect(tooltip).toBeDefined(); + expect(wrapper.findComponent(GlDisclosureDropdown).attributes('aria-label')).toBe( + 'Deploy to...', + ); }); describe('manual actions', () => { @@ -93,96 +78,31 @@ describe('EnvironmentActions Component', () => { }); it('should render a dropdown with the provided list of actions', () => { - expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(actions.length); + expect(findDropdownItems()).toHaveLength(actions.length); }); it("should render a disabled action when it's not playable", () => { - const dropdownItems = wrapper.findAllComponents(GlDropdownItem); + const dropdownItems = findDropdownItems(); const lastDropdownItem = dropdownItems.at(dropdownItems.length - 1); - expect(lastDropdownItem.attributes('disabled')).toBe('true'); + expect(lastDropdownItem.find('button').attributes('disabled')).toBe('disabled'); }); }); describe('scheduled jobs', () => { - let emitSpy; - - const clickAndConfirm = async ({ confirm = true } = {}) => { - confirmAction.mockResolvedValueOnce(confirm); - - findDropdownItem(scheduledJobAction).vm.$emit('click'); - await nextTick(); - }; - beforeEach(() => { - emitSpy = jest.fn(); - eventHub.$on('postAction', emitSpy); jest.spyOn(Date, 'now').mockImplementation(() => new Date('2063-04-04T00:42:00Z').getTime()); }); - describe('when postAction event is confirmed', () => { - beforeEach(async () => { - createComponentWithScheduledJobs({ mountFn: mount }); - clickAndConfirm(); - }); - - it('emits postAction event', () => { - expect(confirmAction).toHaveBeenCalled(); - expect(emitSpy).toHaveBeenCalledWith({ endpoint: scheduledJobAction.playPath }); - }); - - it('should render a dropdown button with a loading icon', () => { - expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(true); - }); - }); - - describe('when postAction event is denied', () => { - beforeEach(async () => { - createComponentWithScheduledJobs({ mountFn: mount }); - clickAndConfirm({ confirm: false }); - }); - - it('does not emit postAction event if confirmation is cancelled', () => { - expect(confirmAction).toHaveBeenCalled(); - expect(emitSpy).not.toHaveBeenCalled(); - }); - }); - it('displays the remaining time in the dropdown', () => { + confirmAction.mockResolvedValueOnce(true); createComponentWithScheduledJobs(); expect(findDropdownItem(scheduledJobAction).text()).toContain('24:00:00'); }); it('displays 00:00:00 for expired jobs in the dropdown', () => { + confirmAction.mockResolvedValueOnce(true); createComponentWithScheduledJobs(); expect(findDropdownItem(expiredJobAction).text()).toContain('00:00:00'); }); }); - - describe('graphql', () => { - Vue.use(VueApollo); - - const action = { - name: 'bar', - play_path: 'https://gitlab.com/play', - }; - - let mockApollo; - - beforeEach(() => { - mockApollo = createMockApollo(); - createComponent( - { actions: [action], graphql: true }, - { options: { apolloProvider: mockApollo } }, - ); - }); - - it('should trigger a graphql mutation on click', () => { - jest.spyOn(mockApollo.defaultClient, 'mutate'); - findDropdownItem(action).vm.$emit('click'); - expect(mockApollo.defaultClient.mutate).toHaveBeenCalledWith({ - mutation: actionMutation, - variables: { action }, - }); - }); - }); }); diff --git a/spec/frontend/environments/environment_details/components/deployment_actions_spec.js b/spec/frontend/environments/environment_details/components/deployment_actions_spec.js index 725c8c6479e..a0eb4c494e6 100644 --- a/spec/frontend/environments/environment_details/components/deployment_actions_spec.js +++ b/spec/frontend/environments/environment_details/components/deployment_actions_spec.js @@ -1,8 +1,15 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { GlButton } from '@gitlab/ui'; import DeploymentActions from '~/environments/environment_details/components/deployment_actions.vue'; import { mountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; +import { translations } from '~/environments/environment_details/constants'; import ActionsComponent from '~/environments/components/environment_actions.vue'; describe('~/environments/environment_details/components/deployment_actions.vue', () => { + Vue.use(VueApollo); let wrapper; const actionsData = [ @@ -14,34 +21,116 @@ describe('~/environments/environment_details/components/deployment_actions.vue', }, ]; - const createWrapper = ({ actions }) => { + const rollbackData = { + id: '123', + name: 'enironment-name', + lastDeployment: { + commit: { + shortSha: 'abcd1234', + }, + isLast: true, + }, + retryUrl: 'deployment/retry', + }; + + const mockSetEnvironmentToRollback = jest.fn(); + const mockResolvers = { + Mutation: { + setEnvironmentToRollback: mockSetEnvironmentToRollback, + }, + }; + const createWrapper = ({ actions, rollback, approvalEnvironment }) => { + const mockApollo = createMockApollo([], mockResolvers); return mountExtended(DeploymentActions, { + apolloProvider: mockApollo, + provide: { + projectPath: 'fullProjectPath', + }, propsData: { actions, + rollback, + approvalEnvironment, }, }); }; - describe('when there is no actions provided', () => { - beforeEach(() => { - wrapper = createWrapper({ actions: [] }); + const findRollbackButton = () => wrapper.findComponent(GlButton); + + describe('deployment actions', () => { + describe('when there is no actions provided', () => { + beforeEach(() => { + wrapper = createWrapper({ actions: [] }); + }); + + it('should not render actions component', () => { + const actionsComponent = wrapper.findComponent(ActionsComponent); + expect(actionsComponent.exists()).toBe(false); + }); }); - it('should not render actions component', () => { - const actionsComponent = wrapper.findComponent(ActionsComponent); - expect(actionsComponent.exists()).toBe(false); + describe('when there are actions provided', () => { + beforeEach(() => { + wrapper = createWrapper({ actions: actionsData }); + }); + + it('should render actions component', () => { + const actionsComponent = wrapper.findComponent(ActionsComponent); + expect(actionsComponent.exists()).toBe(true); + expect(actionsComponent.props().actions).toBe(actionsData); + }); }); }); - describe('when there are actions provided', () => { - beforeEach(() => { - wrapper = createWrapper({ actions: actionsData }); + describe('rollback action', () => { + describe('when there is no rollback data available', () => { + it('should not show a rollback button', () => { + wrapper = createWrapper({ actions: [] }); + const button = findRollbackButton(); + expect(button.exists()).toBe(false); + }); }); - it('should render actions component', () => { - const actionsComponent = wrapper.findComponent(ActionsComponent); - expect(actionsComponent.exists()).toBe(true); - expect(actionsComponent.props().actions).toBe(actionsData); - }); + describe.each([ + { isLast: true, buttonTitle: translations.redeployButtonTitle, icon: 'repeat' }, + { isLast: false, buttonTitle: translations.rollbackButtonTitle, icon: 'redo' }, + ])( + `when there is a rollback data available and the deployment isLast=$isLast`, + ({ isLast, buttonTitle, icon }) => { + let rollback; + beforeEach(() => { + const lastDeployment = { ...rollbackData.lastDeployment, isLast }; + rollback = { ...rollbackData }; + rollback.lastDeployment = lastDeployment; + wrapper = createWrapper({ actions: [], rollback }); + }); + + it('should show the rollback button', () => { + const button = findRollbackButton(); + expect(button.exists()).toBe(true); + }); + + it(`the rollback button should have "${icon}" icon`, () => { + const button = findRollbackButton(); + expect(button.props().icon).toBe(icon); + }); + + it(`the rollback button should have "${buttonTitle}" title`, () => { + const button = findRollbackButton(); + expect(button.attributes().title).toBe(buttonTitle); + }); + + it(`the rollback button click should send correct mutation`, async () => { + const button = findRollbackButton(); + button.vm.$emit('click'); + await waitForPromises(); + expect(mockSetEnvironmentToRollback).toHaveBeenCalledWith( + expect.anything(), + { environment: rollback }, + expect.anything(), + expect.anything(), + ); + }); + }, + ); }); }); diff --git a/spec/frontend/environments/environment_details/page_spec.js b/spec/frontend/environments/environment_details/page_spec.js index 3a1a3238abe..ed7e0feb6ed 100644 --- a/spec/frontend/environments/environment_details/page_spec.js +++ b/spec/frontend/environments/environment_details/page_spec.js @@ -15,19 +15,40 @@ describe('~/environments/environment_details/page.vue', () => { let wrapper; + const emptyEnvironmentToRollbackData = { id: '', name: '', lastDeployment: null, retryUrl: '' }; + const environmentToRollbackMock = jest.fn(); + + const mockResolvers = { + Query: { + environmentToRollback: environmentToRollbackMock, + }, + }; + const defaultWrapperParameters = { resolvedData: resolvedEnvironmentDetails, + environmentToRollbackData: emptyEnvironmentToRollbackData, }; - const createWrapper = ({ resolvedData } = defaultWrapperParameters) => { - const mockApollo = createMockApollo([ - [getEnvironmentDetails, jest.fn().mockResolvedValue(resolvedData)], - ]); + const createWrapper = ({ + resolvedData, + environmentToRollbackData, + } = defaultWrapperParameters) => { + const mockApollo = createMockApollo( + [[getEnvironmentDetails, jest.fn().mockResolvedValue(resolvedData)]], + mockResolvers, + ); + environmentToRollbackMock.mockReturnValue( + environmentToRollbackData || emptyEnvironmentToRollbackData, + ); + const projectFullPath = 'gitlab-group/test-project'; return mountExtended(EnvironmentsDetailPage, { apolloProvider: mockApollo, + provide: { + projectPath: projectFullPath, + }, propsData: { - projectFullPath: 'gitlab-group/test-project', + projectFullPath, environmentName: 'test-environment-name', }, }); @@ -48,7 +69,7 @@ describe('~/environments/environment_details/page.vue', () => { wrapper = createWrapper(); await waitForPromises(); }); - it('should render a table when query is loaded', async () => { + it('should render a table when query is loaded', () => { expect(wrapper.findComponent(GlLoadingIcon).exists()).not.toBe(true); expect(wrapper.findComponent(GlTableLite).exists()).toBe(true); }); @@ -60,7 +81,7 @@ describe('~/environments/environment_details/page.vue', () => { await waitForPromises(); }); - it('should render empty state component', async () => { + it('should render empty state component', () => { expect(wrapper.findComponent(GlTableLite).exists()).toBe(false); expect(wrapper.findComponent(EmptyState).exists()).toBe(true); }); diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js index 279ff32a13d..4716f807657 100644 --- a/spec/frontend/environments/environment_folder_spec.js +++ b/spec/frontend/environments/environment_folder_spec.js @@ -38,7 +38,7 @@ describe('~/environments/components/environments_folder.vue', () => { provide: { helpPagePath: '/help', projectId: '1' }, }); - beforeEach(async () => { + beforeEach(() => { environmentFolderMock = jest.fn(); [nestedEnvironment] = resolvedEnvironmentsApp.environments; environmentFolderMock.mockReturnValue(resolvedFolder); diff --git a/spec/frontend/environments/environment_stop_spec.js b/spec/frontend/environments/environment_stop_spec.js index 851e24c22cc..3e27b8822e1 100644 --- a/spec/frontend/environments/environment_stop_spec.js +++ b/spec/frontend/environments/environment_stop_spec.js @@ -73,7 +73,7 @@ describe('Stop Component', () => { }); }); - it('should show a loading icon if the environment is currently stopping', async () => { + it('should show a loading icon if the environment is currently stopping', () => { expect(findButton().props('loading')).toBe(true); }); }); diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js index a843f801da5..6f2ee6f06cd 100644 --- a/spec/frontend/environments/environments_app_spec.js +++ b/spec/frontend/environments/environments_app_spec.js @@ -422,7 +422,7 @@ describe('~/environments/components/environments_app.vue', () => { ); }); - it('should sync search term from query params on load', async () => { + it('should sync search term from query params on load', () => { expect(searchBox.element.value).toBe('prod'); }); }); diff --git a/spec/frontend/environments/graphql/mock_data.js b/spec/frontend/environments/graphql/mock_data.js index b5435990042..8d91ffe5ffc 100644 --- a/spec/frontend/environments/graphql/mock_data.js +++ b/spec/frontend/environments/graphql/mock_data.js @@ -801,6 +801,14 @@ export const resolvedDeploymentDetails = { export const agent = { project: 'agent-project', - id: '1', + id: 'gid://gitlab/ClusterAgent/1', name: 'agent-name', + kubernetesNamespace: 'agent-namespace', }; + +const runningPod = { status: { phase: 'Running' } }; +const pendingPod = { status: { phase: 'Pending' } }; +const succeededPod = { status: { phase: 'Succeeded' } }; +const failedPod = { status: { phase: 'Failed' } }; + +export const k8sPodsMock = [runningPod, runningPod, pendingPod, succeededPod, failedPod, failedPod]; diff --git a/spec/frontend/environments/graphql/resolvers_spec.js b/spec/frontend/environments/graphql/resolvers_spec.js index 2c223d3a1a7..c66844f5f24 100644 --- a/spec/frontend/environments/graphql/resolvers_spec.js +++ b/spec/frontend/environments/graphql/resolvers_spec.js @@ -1,4 +1,5 @@ import MockAdapter from 'axios-mock-adapter'; +import { CoreV1Api } from '@gitlab/cluster-client'; import { s__ } from '~/locale'; import axios from '~/lib/utils/axios_utils'; import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status'; @@ -17,6 +18,7 @@ import { resolvedEnvironment, folder, resolvedFolder, + k8sPodsMock, } from './mock_data'; const ENDPOINT = `${TEST_HOST}/environments`; @@ -143,6 +145,61 @@ describe('~/frontend/environments/graphql/resolvers', () => { expect(environmentFolder).toEqual(resolvedFolder); }); }); + describe('k8sPods', () => { + const namespace = 'default'; + const configuration = { + basePath: 'kas-proxy/', + baseOptions: { + headers: { 'GitLab-Agent-Id': '1' }, + }, + }; + + const mockPodsListFn = jest.fn().mockImplementation(() => { + return Promise.resolve({ + data: { + items: k8sPodsMock, + }, + }); + }); + + const mockNamespacedPodsListFn = jest.fn().mockImplementation(mockPodsListFn); + const mockAllPodsListFn = jest.fn().mockImplementation(mockPodsListFn); + + beforeEach(() => { + jest + .spyOn(CoreV1Api.prototype, 'listCoreV1NamespacedPod') + .mockImplementation(mockNamespacedPodsListFn); + jest + .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces') + .mockImplementation(mockAllPodsListFn); + }); + + it('should request namespaced pods from the cluster_client library if namespace is specified', async () => { + const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace }); + + expect(mockNamespacedPodsListFn).toHaveBeenCalledWith(namespace); + expect(mockAllPodsListFn).not.toHaveBeenCalled(); + + expect(pods).toEqual(k8sPodsMock); + }); + it('should request all pods from the cluster_client library if namespace is not specified', async () => { + const pods = await mockResolvers.Query.k8sPods(null, { configuration, namespace: '' }); + + expect(mockAllPodsListFn).toHaveBeenCalled(); + expect(mockNamespacedPodsListFn).not.toHaveBeenCalled(); + + expect(pods).toEqual(k8sPodsMock); + }); + it('should throw an error if the API call fails', async () => { + jest + .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces') + .mockRejectedValue(new Error('API error')); + + await expect(mockResolvers.Query.k8sPods(null, { configuration })).rejects.toThrow( + 'API error', + ); + }); + }); describe('stopEnvironment', () => { it('should post to the stop environment path', async () => { mock.onPost(ENDPOINT).reply(HTTP_STATUS_OK); diff --git a/spec/frontend/environments/helpers/__snapshots__/deployment_data_transformation_helper_spec.js.snap b/spec/frontend/environments/helpers/__snapshots__/deployment_data_transformation_helper_spec.js.snap index 326a28bd769..ec0fe0c5541 100644 --- a/spec/frontend/environments/helpers/__snapshots__/deployment_data_transformation_helper_spec.js.snap +++ b/spec/frontend/environments/helpers/__snapshots__/deployment_data_transformation_helper_spec.js.snap @@ -26,11 +26,37 @@ Object { }, "created": "2022-10-17T07:44:17Z", "deployed": "2022-10-17T07:44:43Z", + "deploymentApproval": Object { + "isApprovalActionAvailable": false, + }, "id": "31", "job": Object { "label": "deploy-prod (#860)", "webPath": "/gitlab-org/pipelinestest/-/jobs/860", }, + "rollback": Object { + "id": "gid://gitlab/Deployment/76", + "lastDeployment": Object { + "commit": Object { + "author": Object { + "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png", + "id": "gid://gitlab/User/1", + "name": "Administrator", + "webUrl": "http://gdk.test:3000/root", + }, + "authorEmail": "admin@example.com", + "authorGravatar": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "authorName": "Administrator", + "id": "gid://gitlab/CommitPresenter/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74", + "message": "Update .gitlab-ci.yml file", + "shortId": "0cb48dd5", + "webUrl": "http://gdk.test:3000/gitlab-org/pipelinestest/-/commit/0cb48dd5deddb7632fd7c3defb16075fc6c3ca74", + }, + "isLast": false, + }, + "name": undefined, + "retryUrl": "/gitlab-org/pipelinestest/-/jobs/860/retry", + }, "status": "success", "triggerer": Object { "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png", @@ -60,8 +86,12 @@ Object { }, "created": "2022-10-17T07:44:17Z", "deployed": "2022-10-17T07:44:43Z", + "deploymentApproval": Object { + "isApprovalActionAvailable": false, + }, "id": "31", "job": undefined, + "rollback": null, "status": "success", "triggerer": Object { "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png", @@ -91,8 +121,12 @@ Object { }, "created": "2022-10-17T07:44:17Z", "deployed": "", + "deploymentApproval": Object { + "isApprovalActionAvailable": false, + }, "id": "31", "job": null, + "rollback": null, "status": "success", "triggerer": Object { "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png", diff --git a/spec/frontend/environments/kubernetes_overview_spec.js b/spec/frontend/environments/kubernetes_overview_spec.js index 8673c657760..1912fd4a82b 100644 --- a/spec/frontend/environments/kubernetes_overview_spec.js +++ b/spec/frontend/environments/kubernetes_overview_spec.js @@ -1,19 +1,28 @@ import { nextTick } from 'vue'; import { shallowMount } from '@vue/test-utils'; -import { GlCollapse, GlButton } from '@gitlab/ui'; +import { GlCollapse, GlButton, GlAlert } from '@gitlab/ui'; import KubernetesOverview from '~/environments/components/kubernetes_overview.vue'; import KubernetesAgentInfo from '~/environments/components/kubernetes_agent_info.vue'; - -const agent = { - project: 'agent-project', - id: '1', - name: 'agent-name', -}; +import KubernetesPods from '~/environments/components/kubernetes_pods.vue'; +import { agent } from './graphql/mock_data'; +import { mockKasTunnelUrl } from './mock_data'; const propsData = { agentId: agent.id, agentName: agent.name, agentProjectPath: agent.project, + namespace: agent.kubernetesNamespace, +}; + +const provide = { + kasTunnelUrl: mockKasTunnelUrl, +}; + +const configuration = { + basePath: provide.kasTunnelUrl.replace(/\/$/, ''), + baseOptions: { + headers: { 'GitLab-Agent-Id': '1' }, + }, }; describe('~/environments/components/kubernetes_overview.vue', () => { @@ -22,10 +31,13 @@ describe('~/environments/components/kubernetes_overview.vue', () => { const findCollapse = () => wrapper.findComponent(GlCollapse); const findCollapseButton = () => wrapper.findComponent(GlButton); const findAgentInfo = () => wrapper.findComponent(KubernetesAgentInfo); + const findKubernetesPods = () => wrapper.findComponent(KubernetesPods); + const findAlert = () => wrapper.findComponent(GlAlert); const createWrapper = () => { wrapper = shallowMount(KubernetesOverview, { propsData, + provide, }); }; @@ -57,6 +69,7 @@ describe('~/environments/components/kubernetes_overview.vue', () => { it("doesn't render components when the collapse is not visible", () => { expect(findAgentInfo().exists()).toBe(false); + expect(findKubernetesPods().exists()).toBe(false); }); it('opens on click', async () => { @@ -70,15 +83,40 @@ describe('~/environments/components/kubernetes_overview.vue', () => { }); describe('when section is expanded', () => { - it('renders kubernetes agent info', async () => { + beforeEach(() => { createWrapper(); - await toggleCollapse(); + toggleCollapse(); + }); + it('renders kubernetes agent info', () => { expect(findAgentInfo().props()).toEqual({ agentName: agent.name, agentId: agent.id, agentProjectPath: agent.project, }); }); + + it('renders kubernetes pods', () => { + expect(findKubernetesPods().props()).toEqual({ + namespace: agent.kubernetesNamespace, + configuration, + }); + }); + }); + + describe('on cluster error', () => { + beforeEach(() => { + createWrapper(); + toggleCollapse(); + }); + + it('shows alert with the error message', async () => { + const error = 'Error message from pods'; + + findKubernetesPods().vm.$emit('cluster-error', error); + await nextTick(); + + expect(findAlert().text()).toBe(error); + }); }); }); diff --git a/spec/frontend/environments/kubernetes_pods_spec.js b/spec/frontend/environments/kubernetes_pods_spec.js new file mode 100644 index 00000000000..137309d7853 --- /dev/null +++ b/spec/frontend/environments/kubernetes_pods_spec.js @@ -0,0 +1,114 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { shallowMount } from '@vue/test-utils'; +import { GlLoadingIcon } from '@gitlab/ui'; +import { GlSingleStat } from '@gitlab/ui/dist/charts'; +import waitForPromises from 'helpers/wait_for_promises'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import KubernetesPods from '~/environments/components/kubernetes_pods.vue'; +import { mockKasTunnelUrl } from './mock_data'; +import { k8sPodsMock } from './graphql/mock_data'; + +Vue.use(VueApollo); + +describe('~/environments/components/kubernetes_pods.vue', () => { + let wrapper; + + const namespace = 'my-kubernetes-namespace'; + const configuration = { + basePath: mockKasTunnelUrl, + baseOptions: { + headers: { 'GitLab-Agent-Id': '1' }, + }, + }; + + const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon); + const findAllStats = () => wrapper.findAllComponents(GlSingleStat); + const findSingleStat = (at) => findAllStats().at(at); + + const createApolloProvider = () => { + const mockResolvers = { + Query: { + k8sPods: jest.fn().mockReturnValue(k8sPodsMock), + }, + }; + + return createMockApollo([], mockResolvers); + }; + + const createWrapper = (apolloProvider = createApolloProvider()) => { + wrapper = shallowMount(KubernetesPods, { + propsData: { namespace, configuration }, + apolloProvider, + }); + }; + + describe('mounted', () => { + it('shows the loading icon', () => { + createWrapper(); + + expect(findLoadingIcon().exists()).toBe(true); + }); + + it('hides the loading icon when the list of pods loaded', async () => { + createWrapper(); + await waitForPromises(); + + expect(findLoadingIcon().exists()).toBe(false); + }); + }); + + describe('when gets pods data', () => { + it('renders stats', async () => { + createWrapper(); + await waitForPromises(); + + expect(findAllStats()).toHaveLength(4); + }); + + it.each` + count | title | index + ${2} | ${KubernetesPods.i18n.runningPods} | ${0} + ${1} | ${KubernetesPods.i18n.pendingPods} | ${1} + ${1} | ${KubernetesPods.i18n.succeededPods} | ${2} + ${2} | ${KubernetesPods.i18n.failedPods} | ${3} + `( + 'renders stat with title "$title" and count "$count" at index $index', + async ({ count, title, index }) => { + createWrapper(); + await waitForPromises(); + + expect(findSingleStat(index).props()).toMatchObject({ + value: count, + title, + }); + }, + ); + }); + + describe('when gets an error from the cluster_client API', () => { + const error = new Error('Error from the cluster_client API'); + const createErroredApolloProvider = () => { + const mockResolvers = { + Query: { + k8sPods: jest.fn().mockRejectedValueOnce(error), + }, + }; + + return createMockApollo([], mockResolvers); + }; + + beforeEach(async () => { + createWrapper(createErroredApolloProvider()); + await waitForPromises(); + }); + + it("doesn't show pods stats", () => { + expect(findAllStats()).toHaveLength(0); + }); + + it('emits an error message', () => { + expect(wrapper.emitted('cluster-error')).toMatchObject([[error]]); + }); + }); +}); diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js index a6d67c26304..bd2c6b7c892 100644 --- a/spec/frontend/environments/mock_data.js +++ b/spec/frontend/environments/mock_data.js @@ -313,6 +313,8 @@ const createEnvironment = (data = {}) => ({ ...data, }); +const mockKasTunnelUrl = 'https://kas.gitlab.com/k8s-proxy'; + export { environment, environmentsList, @@ -321,4 +323,5 @@ export { tableData, deployBoardMockData, createEnvironment, + mockKasTunnelUrl, }; diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js index c04ff896794..b4f5263a151 100644 --- a/spec/frontend/environments/new_environment_item_spec.js +++ b/spec/frontend/environments/new_environment_item_spec.js @@ -7,10 +7,12 @@ import { stubTransition } from 'helpers/stub_transition'; import { formatDate, getTimeago } from '~/lib/utils/datetime_utility'; import { __, s__, sprintf } from '~/locale'; import EnvironmentItem from '~/environments/components/new_environment_item.vue'; +import EnvironmentActions from '~/environments/components/environment_actions.vue'; import Deployment from '~/environments/components/deployment.vue'; import DeployBoardWrapper from '~/environments/components/deploy_board_wrapper.vue'; import KubernetesOverview from '~/environments/components/kubernetes_overview.vue'; import { resolvedEnvironment, rolloutStatus, agent } from './graphql/mock_data'; +import { mockKasTunnelUrl } from './mock_data'; Vue.use(VueApollo); @@ -25,11 +27,18 @@ describe('~/environments/components/new_environment_item.vue', () => { mountExtended(EnvironmentItem, { apolloProvider, propsData: { environment: resolvedEnvironment, ...propsData }, - provide: { helpPagePath: '/help', projectId: '1', projectPath: '/1', ...provideData }, + provide: { + helpPagePath: '/help', + projectId: '1', + projectPath: '/1', + kasTunnelUrl: mockKasTunnelUrl, + ...provideData, + }, stubs: { transition: stubTransition() }, }); const findDeployment = () => wrapper.findComponent(Deployment); + const findActions = () => wrapper.findComponent(EnvironmentActions); const findKubernetesOverview = () => wrapper.findComponent(KubernetesOverview); const expandCollapsedSection = async () => { @@ -124,9 +133,7 @@ describe('~/environments/components/new_environment_item.vue', () => { it('shows a dropdown if there are actions to perform', () => { wrapper = createWrapper({ apolloProvider: createApolloProvider() }); - const actions = wrapper.findByRole('button', { name: __('Deploy to...') }); - - expect(actions.exists()).toBe(true); + expect(findActions().exists()).toBe(true); }); it('does not show a dropdown if there are no actions to perform', () => { @@ -140,17 +147,15 @@ describe('~/environments/components/new_environment_item.vue', () => { }, }); - const actions = wrapper.findByRole('button', { name: __('Deploy to...') }); - - expect(actions.exists()).toBe(false); + expect(findActions().exists()).toBe(false); }); it('passes all the actions down to the action component', () => { wrapper = createWrapper({ apolloProvider: createApolloProvider() }); - const action = wrapper.findByRole('menuitem', { name: 'deploy-staging' }); - - expect(action.exists()).toBe(true); + expect(findActions().props('actions')).toMatchObject( + resolvedEnvironment.lastDeployment.manualActions, + ); }); }); @@ -382,6 +387,7 @@ describe('~/environments/components/new_environment_item.vue', () => { const button = await expandCollapsedSection(); expect(button.attributes('aria-label')).toBe(__('Collapse')); + expect(button.props('category')).toBe('secondary'); expect(collapse.attributes('visible')).toBe('visible'); expect(icon.props('name')).toBe('chevron-lg-down'); expect(environmentName.classes('gl-font-weight-bold')).toBe(true); @@ -537,6 +543,7 @@ describe('~/environments/components/new_environment_item.vue', () => { agentProjectPath: agent.project, agentName: agent.name, agentId: agent.id, + namespace: agent.kubernetesNamespace, }); }); diff --git a/spec/frontend/environments/stop_stale_environments_modal_spec.js b/spec/frontend/environments/stop_stale_environments_modal_spec.js index ddf6670db12..3d28ceba318 100644 --- a/spec/frontend/environments/stop_stale_environments_modal_spec.js +++ b/spec/frontend/environments/stop_stale_environments_modal_spec.js @@ -40,12 +40,12 @@ describe('~/environments/components/stop_stale_environments_modal.vue', () => { jest.resetAllMocks(); }); - it('sets the correct min and max dates', async () => { + it('sets the correct min and max dates', () => { expect(before.props().minDate.toISOString()).toBe(TEN_YEARS_AGO.toISOString()); expect(before.props().maxDate.toISOString()).toBe(ONE_WEEK_AGO.toISOString()); }); - it('requests cleanup when submit is clicked', async () => { + it('requests cleanup when submit is clicked', () => { mock.onPost().replyOnce(HTTP_STATUS_OK); wrapper.findComponent(GlModal).vm.$emit('primary'); const url = STOP_STALE_ENVIRONMENTS_PATH.replace(':id', 1).replace(':version', 'v4'); diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js index 31473899145..c9a2551d11c 100644 --- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js +++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js @@ -98,12 +98,6 @@ describe('ErrorTrackingList', () => { }); }); - afterEach(() => { - if (wrapper) { - wrapper.destroy(); - } - }); - describe('loading', () => { beforeEach(() => { store.state.list.loading = true; @@ -452,32 +446,34 @@ describe('ErrorTrackingList', () => { describe('When pagination is required', () => { describe('and previous cursor is not available', () => { - beforeEach(async () => { + beforeEach(() => { store.state.list.loading = false; delete store.state.list.pagination.previous; mountComponent(); }); - it('disables Prev button in the pagination', async () => { + it('disables Prev button in the pagination', () => { expect(findPagination().props('prevPage')).toBe(null); expect(findPagination().props('nextPage')).not.toBe(null); }); }); describe('and next cursor is not available', () => { - beforeEach(async () => { + beforeEach(() => { store.state.list.loading = false; delete store.state.list.pagination.next; mountComponent(); }); - it('disables Next button in the pagination', async () => { + it('disables Next button in the pagination', () => { expect(findPagination().props('prevPage')).not.toBe(null); expect(findPagination().props('nextPage')).toBe(null); }); }); describe('and the user is not on the first page', () => { describe('and the previous button is clicked', () => { - beforeEach(async () => { + const currentPage = 2; + + beforeEach(() => { store.state.list.loading = false; mountComponent({ stubs: { @@ -485,15 +481,12 @@ describe('ErrorTrackingList', () => { GlPagination: false, }, }); - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ pageValue: 2 }); - await nextTick(); + findPagination().vm.$emit('input', currentPage); }); it('fetches the previous page of results', () => { expect(wrapper.find('.prev-page-item').attributes('aria-disabled')).toBe(undefined); - wrapper.vm.goToPrevPage(); + findPagination().vm.$emit('input', currentPage - 1); expect(actions.fetchPaginatedResults).toHaveBeenCalled(); expect(actions.fetchPaginatedResults).toHaveBeenLastCalledWith( expect.anything(), diff --git a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js index b06e0340991..a12c25c6897 100644 --- a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js +++ b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js @@ -128,11 +128,11 @@ describe('Configure Feature Flags Modal', () => { expect(findSecondaryAction()).toBe(null); }); - it('should not display regenerating instance ID', async () => { + it('should not display regenerating instance ID', () => { expect(findDangerGlAlert().exists()).toBe(false); }); - it('should disable the project name input', async () => { + it('should disable the project name input', () => { expect(findProjectNameInput().exists()).toBe(false); }); }); @@ -142,7 +142,7 @@ describe('Configure Feature Flags Modal', () => { factory({ hasRotateError: true }); }); - it('should display an error', async () => { + it('should display an error', () => { expect(wrapper.findByTestId('rotate-error').exists()).toBe(true); expect(wrapper.find('[name="warning"]').exists()).toBe(true); }); @@ -151,7 +151,7 @@ describe('Configure Feature Flags Modal', () => { describe('is rotating', () => { beforeEach(factory.bind(null, { isRotating: true })); - it('should disable the project name input', async () => { + it('should disable the project name input', () => { expect(findProjectNameInput().attributes('disabled')).toBe('true'); }); }); diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js index 23e86d0eb2f..8492fe7bdde 100644 --- a/spec/frontend/feature_flags/components/feature_flags_spec.js +++ b/spec/frontend/feature_flags/components/feature_flags_spec.js @@ -94,7 +94,7 @@ describe('Feature flags', () => { await limitAlert().vm.$emit('dismiss'); }); - it('hides the alert', async () => { + it('hides the alert', () => { expect(limitAlert().exists()).toBe(false); }); @@ -176,7 +176,7 @@ describe('Feature flags', () => { emptyState = wrapper.findComponent(GlEmptyState); }); - it('should render the empty state', async () => { + it('should render the empty state', () => { expect(emptyState.exists()).toBe(true); }); diff --git a/spec/frontend/filtered_search/dropdown_user_spec.js b/spec/frontend/filtered_search/dropdown_user_spec.js index 02ef813883f..8ddf8390431 100644 --- a/spec/frontend/filtered_search/dropdown_user_spec.js +++ b/spec/frontend/filtered_search/dropdown_user_spec.js @@ -1,4 +1,5 @@ -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlMergeRequestList from 'test_fixtures/merge_requests/merge_request_list.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import DropdownUser from '~/filtered_search/dropdown_user'; import DropdownUtils from '~/filtered_search/dropdown_utils'; import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer'; @@ -71,13 +72,11 @@ describe('Dropdown User', () => { }); describe('hideCurrentUser', () => { - const fixtureTemplate = 'merge_requests/merge_request_list.html'; - let dropdown; let authorFilterDropdownElement; beforeEach(() => { - loadHTMLFixture(fixtureTemplate); + setHTMLFixture(htmlMergeRequestList); authorFilterDropdownElement = document.querySelector('#js-dropdown-author'); const dummyInput = document.createElement('div'); dropdown = new DropdownUser({ diff --git a/spec/frontend/filtered_search/dropdown_utils_spec.js b/spec/frontend/filtered_search/dropdown_utils_spec.js index 2030b45b44c..d8a5b493b7a 100644 --- a/spec/frontend/filtered_search/dropdown_utils_spec.js +++ b/spec/frontend/filtered_search/dropdown_utils_spec.js @@ -1,12 +1,11 @@ -import { loadHTMLFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlMergeRequestList from 'test_fixtures/merge_requests/merge_request_list.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import FilteredSearchSpecHelper from 'helpers/filtered_search_spec_helper'; import DropdownUtils from '~/filtered_search/dropdown_utils'; import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager'; import IssuableFilteredSearchTokenKeys from '~/filtered_search/issuable_filtered_search_token_keys'; describe('Dropdown Utils', () => { - const issuableListFixture = 'merge_requests/merge_request_list.html'; - describe('getEscapedText', () => { it('should return same word when it has no space', () => { const escaped = DropdownUtils.getEscapedText('textWithoutSpace'); @@ -355,7 +354,7 @@ describe('Dropdown Utils', () => { let authorToken; beforeEach(() => { - loadHTMLFixture(issuableListFixture); + setHTMLFixture(htmlMergeRequestList); authorToken = FilteredSearchSpecHelper.createFilterVisualToken('author', '=', '@user'); const searchTermToken = FilteredSearchSpecHelper.createSearchVisualToken('search term'); diff --git a/spec/frontend/fixtures/api_projects.rb b/spec/frontend/fixtures/api_projects.rb index d1dfd223419..24c47d8d139 100644 --- a/spec/frontend/fixtures/api_projects.rb +++ b/spec/frontend/fixtures/api_projects.rb @@ -6,10 +6,11 @@ RSpec.describe API::Projects, '(JavaScript fixtures)', type: :request do include ApiHelpers include JavaScriptFixturesHelpers - let(:namespace) { create(:namespace, name: 'gitlab-test') } - let(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') } - let(:project_empty) { create(:project_empty_repo, namespace: namespace, path: 'lorem-ipsum-empty') } - let(:user) { project.owner } + let_it_be(:namespace) { create(:namespace, name: 'gitlab-test') } + let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') } + let_it_be(:project_empty) { create(:project_empty_repo, namespace: namespace, path: 'lorem-ipsum-empty') } + let_it_be(:user) { project.owner } + let_it_be(:personal_projects) { create_list(:project, 3, namespace: user.namespace, topics: create_list(:topic, 5)) } it 'api/projects/get.json' do get api("/projects/#{project.id}", user) @@ -28,4 +29,10 @@ RSpec.describe API::Projects, '(JavaScript fixtures)', type: :request do expect(response).to be_successful end + + it 'api/users/projects/get.json' do + get api("/users/#{user.id}/projects", user) + + expect(response).to be_successful + end end diff --git a/spec/frontend/fixtures/comment_templates.rb b/spec/frontend/fixtures/comment_templates.rb new file mode 100644 index 00000000000..32f425d7ebd --- /dev/null +++ b/spec/frontend/fixtures/comment_templates.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GraphQL::Query, type: :request, feature_category: :user_profile do + include JavaScriptFixturesHelpers + include ApiHelpers + include GraphqlHelpers + + let_it_be(:current_user) { create(:user) } + + before do + sign_in(current_user) + end + + context 'when user has no comment templates' do + base_input_path = 'comment_templates/queries/' + base_output_path = 'graphql/comment_templates/' + query_name = 'saved_replies.query.graphql' + + it "#{base_output_path}saved_replies_empty.query.graphql.json" do + query = get_graphql_query_as_string("#{base_input_path}#{query_name}") + + post_graphql(query, current_user: current_user) + + expect_graphql_errors_to_be_empty + end + end + + context 'when user has comment templates' do + base_input_path = 'comment_templates/queries/' + base_output_path = 'graphql/comment_templates/' + query_name = 'saved_replies.query.graphql' + + it "#{base_output_path}saved_replies.query.graphql.json" do + create(:saved_reply, user: current_user) + create(:saved_reply, user: current_user) + + query = get_graphql_query_as_string("#{base_input_path}#{query_name}") + + post_graphql(query, current_user: current_user) + + expect_graphql_errors_to_be_empty + end + end + + context 'when user creates comment template' do + base_input_path = 'comment_templates/queries/' + base_output_path = 'graphql/comment_templates/' + query_name = 'create_saved_reply.mutation.graphql' + + it "#{base_output_path}#{query_name}.json" do + query = get_graphql_query_as_string("#{base_input_path}#{query_name}") + + post_graphql(query, current_user: current_user, variables: { name: "Test", content: "Test content" }) + + expect_graphql_errors_to_be_empty + end + end + + context 'when user creates comment template and it errors' do + base_input_path = 'comment_templates/queries/' + base_output_path = 'graphql/comment_templates/' + query_name = 'create_saved_reply.mutation.graphql' + + it "#{base_output_path}create_saved_reply_with_errors.mutation.graphql.json" do + query = get_graphql_query_as_string("#{base_input_path}#{query_name}") + + post_graphql(query, current_user: current_user, variables: { name: nil, content: nil }) + + expect(flattened_errors).not_to be_empty + end + end +end diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb index 1e6baf30a76..e85e683b599 100644 --- a/spec/frontend/fixtures/issues.rb +++ b/spec/frontend/fixtures/issues.rb @@ -20,15 +20,6 @@ RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', :with_licens remove_repository(project) end - it 'issues/new-issue.html' do - get :new, params: { - namespace_id: project.namespace.to_param, - project_id: project - } - - expect(response).to be_successful - end - it 'issues/open-issue.html' do render_issue(create(:issue, project: project)) end diff --git a/spec/frontend/fixtures/job_artifacts.rb b/spec/frontend/fixtures/job_artifacts.rb index e53cdbbaaa5..6dadd6750f1 100644 --- a/spec/frontend/fixtures/job_artifacts.rb +++ b/spec/frontend/fixtures/job_artifacts.rb @@ -12,7 +12,7 @@ RSpec.describe 'Job Artifacts (GraphQL fixtures)' do let_it_be(:pipeline) { create(:ci_pipeline, project: project) } let_it_be(:user) { create(:user) } - job_artifacts_query_path = 'artifacts/graphql/queries/get_job_artifacts.query.graphql' + job_artifacts_query_path = 'ci/artifacts/graphql/queries/get_job_artifacts.query.graphql' it "graphql/#{job_artifacts_query_path}.json" do create(:ci_build, :failed, :artifacts, :trace_artifact, pipeline: pipeline) diff --git a/spec/frontend/fixtures/jobs.rb b/spec/frontend/fixtures/jobs.rb index 3583beb83c2..376c04cd629 100644 --- a/spec/frontend/fixtures/jobs.rb +++ b/spec/frontend/fixtures/jobs.rb @@ -48,49 +48,49 @@ RSpec.describe 'Jobs (JavaScript fixtures)' do let!(:with_artifact) { create(:ci_build, :success, name: 'with_artifact', job_artifacts: [artifact], pipeline: pipeline) } let!(:with_coverage) { create(:ci_build, :success, name: 'with_coverage', coverage: 40.0, pipeline: pipeline) } - fixtures_path = 'graphql/jobs/' - get_jobs_query = 'get_jobs.query.graphql' - full_path = 'frontend-fixtures/builds-project' + shared_examples 'graphql queries' do |path, jobs_query| + let_it_be(:variables) { {} } - let_it_be(:query) do - get_graphql_query_as_string("jobs/components/table/graphql/queries/#{get_jobs_query}") - end + let_it_be(:query) do + get_graphql_query_as_string("#{path}/#{jobs_query}") + end - it "#{fixtures_path}#{get_jobs_query}.json" do - post_graphql(query, current_user: user, variables: { - fullPath: full_path - }) + fixtures_path = 'graphql/jobs/' - expect_graphql_errors_to_be_empty - end + it "#{fixtures_path}#{jobs_query}.json" do + post_graphql(query, current_user: user, variables: variables) - it "#{fixtures_path}#{get_jobs_query}.as_guest.json" do - guest = create(:user) - project.add_guest(guest) + expect_graphql_errors_to_be_empty + end - post_graphql(query, current_user: guest, variables: { - fullPath: full_path - }) + it "#{fixtures_path}#{jobs_query}.as_guest.json" do + guest = create(:user) + project.add_guest(guest) - expect_graphql_errors_to_be_empty - end + post_graphql(query, current_user: guest, variables: variables) - it "#{fixtures_path}#{get_jobs_query}.paginated.json" do - post_graphql(query, current_user: user, variables: { - fullPath: full_path, - first: 2 - }) + expect_graphql_errors_to_be_empty + end - expect_graphql_errors_to_be_empty + it "#{fixtures_path}#{jobs_query}.paginated.json" do + post_graphql(query, current_user: user, variables: variables.merge({ first: 2 })) + + expect_graphql_errors_to_be_empty + end + + it "#{fixtures_path}#{jobs_query}.empty.json" do + post_graphql(query, current_user: user, variables: variables.merge({ first: 0 })) + + expect_graphql_errors_to_be_empty + end end - it "#{fixtures_path}#{get_jobs_query}.empty.json" do - post_graphql(query, current_user: user, variables: { - fullPath: full_path, - first: 0 - }) + it_behaves_like 'graphql queries', 'jobs/components/table/graphql/queries', 'get_jobs.query.graphql' do + let(:variables) { { fullPath: 'frontend-fixtures/builds-project' } } + end - expect_graphql_errors_to_be_empty + it_behaves_like 'graphql queries', 'pages/admin/jobs/components/table/graphql/queries', 'get_all_jobs.query.graphql' do + let(:user) { create(:admin) } end end diff --git a/spec/frontend/fixtures/milestones.rb b/spec/frontend/fixtures/milestones.rb new file mode 100644 index 00000000000..5e39dcf190a --- /dev/null +++ b/spec/frontend/fixtures/milestones.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::MilestonesController, '(JavaScript fixtures)', :with_license, feature_category: :team_planning, type: :controller do + include JavaScriptFixturesHelpers + + let_it_be(:user) { create(:user, feed_token: 'feedtoken:coldfeed') } + let_it_be(:namespace) { create(:namespace, name: 'frontend-fixtures') } + let_it_be(:project) { create(:project_empty_repo, namespace: namespace, path: 'milestones-project') } + + render_views + + before do + project.add_maintainer(user) + sign_in(user) + end + + after do + remove_repository(project) + end + + it 'milestones/new-milestone.html' do + get :new, params: { + namespace_id: project.namespace.to_param, + project_id: project + } + + expect(response).to be_successful + end + + private + + def render_milestone(milestone) + get :show, params: { + namespace_id: project.namespace.to_param, + project_id: project, + id: milestone.to_param + } + + expect(response).to be_successful + end +end diff --git a/spec/frontend/fixtures/pipelines.rb b/spec/frontend/fixtures/pipelines.rb index 768934d6278..24a6f6f7de6 100644 --- a/spec/frontend/fixtures/pipelines.rb +++ b/spec/frontend/fixtures/pipelines.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :controller do + include ApiHelpers + include GraphqlHelpers include JavaScriptFixturesHelpers let_it_be(:namespace) { create(:namespace, name: 'frontend-fixtures') } @@ -56,4 +58,27 @@ RSpec.describe Projects::PipelinesController, '(JavaScript fixtures)', type: :co expect(response).to be_successful end + + describe GraphQL::Query, type: :request do + fixtures_path = 'graphql/pipelines/' + get_pipeline_actions_query = 'get_pipeline_actions.query.graphql' + + let!(:pipeline_with_manual_actions) { create(:ci_pipeline, project: project, user: user) } + let!(:build_scheduled) { create(:ci_build, :scheduled, pipeline: pipeline_with_manual_actions, stage: 'test') } + let!(:build_manual) { create(:ci_build, :manual, pipeline: pipeline_with_manual_actions, stage: 'build') } + let!(:build_manual_cannot_play) do + create(:ci_build, :manual, :skipped, pipeline: pipeline_with_manual_actions, stage: 'build') + end + + let_it_be(:query) do + get_graphql_query_as_string("pipelines/graphql/queries/#{get_pipeline_actions_query}") + end + + it "#{fixtures_path}#{get_pipeline_actions_query}.json" do + post_graphql(query, current_user: user, + variables: { fullPath: project.full_path, iid: pipeline_with_manual_actions.iid }) + + expect_graphql_errors_to_be_empty + end + end end diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb index 2ccf2c0392f..8cd651c5b36 100644 --- a/spec/frontend/fixtures/projects.rb +++ b/spec/frontend/fixtures/projects.rb @@ -67,7 +67,7 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do end end - describe 'Storage', feature_category: :subscription_cost_management do + describe 'Storage', feature_category: :consumables_cost_management do describe GraphQL::Query, type: :request do include GraphqlHelpers context 'project storage statistics query' do diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb index 1581bc58289..099df607487 100644 --- a/spec/frontend/fixtures/runner.rb +++ b/spec/frontend/fixtures/runner.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Runner (JavaScript fixtures)' do +RSpec.describe 'Runner (JavaScript fixtures)', feature_category: :runner_fleet do include AdminModeHelper include ApiHelpers include JavaScriptFixturesHelpers @@ -13,7 +13,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do let_it_be(:project) { create(:project, :repository, :public) } let_it_be(:project_2) { create(:project, :repository, :public) } - let_it_be(:runner) { create(:ci_runner, :instance, description: 'My Runner', version: '1.0.0') } + let_it_be(:runner) { create(:ci_runner, :instance, description: 'My Runner', creator: admin, version: '1.0.0') } let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], version: '2.0.0') } let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group], version: '2.0.0') } let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project, project_2], version: '2.0.0') } @@ -58,6 +58,13 @@ RSpec.describe 'Runner (JavaScript fixtures)' do expect_graphql_errors_to_be_empty end + + it "#{fixtures_path}#{all_runners_query}.with_creator.json" do + # "last: 1" fetches the first runner created, with admin as "creator" + post_graphql(query, current_user: admin, variables: { last: 1 }) + + expect_graphql_errors_to_be_empty + end end describe 'all_runners_count.query.graphql', type: :request do @@ -169,14 +176,17 @@ RSpec.describe 'Runner (JavaScript fixtures)' do get_graphql_query_as_string("#{query_path}#{runner_create_mutation}") end - it "#{fixtures_path}#{runner_create_mutation}.json" do - post_graphql(query, current_user: admin, variables: { - input: { - description: 'My dummy runner' - } - }) + context 'with runnerType set to INSTANCE_TYPE' do + it "#{fixtures_path}#{runner_create_mutation}.json" do + post_graphql(query, current_user: admin, variables: { + input: { + runnerType: 'INSTANCE_TYPE', + description: 'My dummy runner' + } + }) - expect_graphql_errors_to_be_empty + expect_graphql_errors_to_be_empty + end end end end diff --git a/spec/frontend/fixtures/saved_replies.rb b/spec/frontend/fixtures/saved_replies.rb deleted file mode 100644 index 613e4a1b447..00000000000 --- a/spec/frontend/fixtures/saved_replies.rb +++ /dev/null @@ -1,74 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe GraphQL::Query, type: :request, feature_category: :user_profile do - include JavaScriptFixturesHelpers - include ApiHelpers - include GraphqlHelpers - - let_it_be(:current_user) { create(:user) } - - before do - sign_in(current_user) - end - - context 'when user has no saved replies' do - base_input_path = 'saved_replies/queries/' - base_output_path = 'graphql/saved_replies/' - query_name = 'saved_replies.query.graphql' - - it "#{base_output_path}saved_replies_empty.query.graphql.json" do - query = get_graphql_query_as_string("#{base_input_path}#{query_name}") - - post_graphql(query, current_user: current_user) - - expect_graphql_errors_to_be_empty - end - end - - context 'when user has saved replies' do - base_input_path = 'saved_replies/queries/' - base_output_path = 'graphql/saved_replies/' - query_name = 'saved_replies.query.graphql' - - it "#{base_output_path}saved_replies.query.graphql.json" do - create(:saved_reply, user: current_user) - create(:saved_reply, user: current_user) - - query = get_graphql_query_as_string("#{base_input_path}#{query_name}") - - post_graphql(query, current_user: current_user) - - expect_graphql_errors_to_be_empty - end - end - - context 'when user creates saved reply' do - base_input_path = 'saved_replies/queries/' - base_output_path = 'graphql/saved_replies/' - query_name = 'create_saved_reply.mutation.graphql' - - it "#{base_output_path}#{query_name}.json" do - query = get_graphql_query_as_string("#{base_input_path}#{query_name}") - - post_graphql(query, current_user: current_user, variables: { name: "Test", content: "Test content" }) - - expect_graphql_errors_to_be_empty - end - end - - context 'when user creates saved reply and it errors' do - base_input_path = 'saved_replies/queries/' - base_output_path = 'graphql/saved_replies/' - query_name = 'create_saved_reply.mutation.graphql' - - it "#{base_output_path}create_saved_reply_with_errors.mutation.graphql.json" do - query = get_graphql_query_as_string("#{base_input_path}#{query_name}") - - post_graphql(query, current_user: current_user, variables: { name: nil, content: nil }) - - expect(flattened_errors).not_to be_empty - end - end -end diff --git a/spec/frontend/fixtures/startup_css.rb b/spec/frontend/fixtures/startup_css.rb index 18a4aa58c00..5b09e1c9495 100644 --- a/spec/frontend/fixtures/startup_css.rb +++ b/spec/frontend/fixtures/startup_css.rb @@ -40,21 +40,6 @@ RSpec.describe 'Startup CSS fixtures', type: :controller do expect(response).to be_successful end - # This Feature Flag is on by default - # This ensures that the correct css is generated - # When the feature flag is on, the general startup will capture it - # This will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/339348 - it "startup_css/project-#{type}-search-ff-off.html" do - stub_feature_flags(new_header_search: false) - - get :show, params: { - namespace_id: project.namespace.to_param, - id: project - } - - expect(response).to be_successful - end - # This Feature Flag is off by default # This ensures that the correct css is generated for super sidebar # When the feature flag is off, the general startup will capture it diff --git a/spec/frontend/fixtures/static/oauth_remember_me.html b/spec/frontend/fixtures/static/oauth_remember_me.html index 0b4d482925d..60277ecf66e 100644 --- a/spec/frontend/fixtures/static/oauth_remember_me.html +++ b/spec/frontend/fixtures/static/oauth_remember_me.html @@ -1,5 +1,5 @@
    - +
    -

    - - Model candidate details - -

    - @@ -162,12 +96,11 @@ exports[`MlCandidatesShow renders correctly 1`] = ` diff --git a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js index 36455339041..7d03ab3b509 100644 --- a/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js +++ b/spec/frontend/ml/experiment_tracking/routes/candidates/show/ml_candidates_show_spec.js @@ -1,6 +1,7 @@ -import { GlAlert } from '@gitlab/ui'; -import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import MlCandidatesShow from '~/ml/experiment_tracking/routes/candidates/show'; +import DeleteButton from '~/ml/experiment_tracking/components/delete_button.vue'; +import IncubationAlert from '~/vue_shared/components/incubation/incubation_alert.vue'; describe('MlCandidatesShow', () => { let wrapper; @@ -25,23 +26,31 @@ describe('MlCandidatesShow', () => { experiment_name: 'The Experiment', experiment_path: 'path/to/experiment', status: 'SUCCESS', + path: 'path_to_candidate', }, }; - return mountExtended(MlCandidatesShow, { propsData: { candidate } }); + wrapper = shallowMountExtended(MlCandidatesShow, { propsData: { candidate } }); }; - const findAlert = () => wrapper.findComponent(GlAlert); + beforeEach(createWrapper); - it('shows incubation warning', () => { - wrapper = createWrapper(); + const findAlert = () => wrapper.findComponent(IncubationAlert); + const findDeleteButton = () => wrapper.findComponent(DeleteButton); + it('shows incubation warning', () => { expect(findAlert().exists()).toBe(true); }); - it('renders correctly', () => { - wrapper = createWrapper(); + it('shows delete button', () => { + expect(findDeleteButton().exists()).toBe(true); + }); + it('passes the delete path to delete button', () => { + expect(findDeleteButton().props('deletePath')).toBe('path_to_candidate'); + }); + + it('renders correctly', () => { expect(wrapper.element).toMatchSnapshot(); }); }); diff --git a/spec/frontend/ml/experiment_tracking/routes/experiments/show/components/experiment_header_spec.js b/spec/frontend/ml/experiment_tracking/routes/experiments/show/components/experiment_header_spec.js new file mode 100644 index 00000000000..b56755043fb --- /dev/null +++ b/spec/frontend/ml/experiment_tracking/routes/experiments/show/components/experiment_header_spec.js @@ -0,0 +1,55 @@ +import { GlButton } from '@gitlab/ui'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import ExperimentHeader from '~/ml/experiment_tracking/routes/experiments/show/components/experiment_header.vue'; +import DeleteButton from '~/ml/experiment_tracking/components/delete_button.vue'; +import setWindowLocation from 'helpers/set_window_location_helper'; +import * as urlHelpers from '~/lib/utils/url_utility'; +import { MOCK_EXPERIMENT } from '../mock_data'; + +const DELETE_INFO = { + deletePath: '/delete', + deleteConfirmationText: 'MODAL_BODY', + actionPrimaryText: 'Delete!', + modalTitle: 'MODAL_TITLE', +}; + +describe('~/ml/experiment_tracking/routes/experiments/show/components/experiment_header.vue', () => { + let wrapper; + + const createWrapper = () => { + wrapper = mountExtended(ExperimentHeader, { + propsData: { title: MOCK_EXPERIMENT.name, deleteInfo: DELETE_INFO }, + }); + }; + + const findDeleteButton = () => wrapper.findComponent(DeleteButton); + const findButton = () => wrapper.findComponent(GlButton); + + beforeEach(createWrapper); + + describe('Delete', () => { + it('shows delete button', () => { + expect(findDeleteButton().exists()).toBe(true); + }); + + it('passes the right props', () => { + expect(findDeleteButton().props()).toMatchObject(DELETE_INFO); + }); + }); + + describe('CSV download', () => { + it('shows download CSV button', () => { + expect(findDeleteButton().exists()).toBe(true); + }); + + it('calls the action to download the CSV', () => { + setWindowLocation('https://blah.com/something/1?name=query&orderBy=name'); + jest.spyOn(urlHelpers, 'visitUrl').mockImplementation(() => {}); + + findButton().vm.$emit('click'); + + expect(urlHelpers.visitUrl).toHaveBeenCalledTimes(1); + expect(urlHelpers.visitUrl).toHaveBeenCalledWith('/something/1.csv?name=query&orderBy=name'); + }); + }); +}); diff --git a/spec/frontend/ml/experiment_tracking/routes/experiments/show/ml_experiments_show_spec.js b/spec/frontend/ml/experiment_tracking/routes/experiments/show/ml_experiments_show_spec.js index 97a5049ea88..38b3d96ed11 100644 --- a/spec/frontend/ml/experiment_tracking/routes/experiments/show/ml_experiments_show_spec.js +++ b/spec/frontend/ml/experiment_tracking/routes/experiments/show/ml_experiments_show_spec.js @@ -1,11 +1,12 @@ import { GlAlert, GlTableLite, GlLink, GlEmptyState } from '@gitlab/ui'; import { mountExtended } from 'helpers/vue_test_utils_helper'; import MlExperimentsShow from '~/ml/experiment_tracking/routes/experiments/show/ml_experiments_show.vue'; +import ExperimentHeader from '~/ml/experiment_tracking/routes/experiments/show/components/experiment_header.vue'; import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue'; import Pagination from '~/vue_shared/components/incubation/pagination.vue'; import setWindowLocation from 'helpers/set_window_location_helper'; import * as urlHelpers from '~/lib/utils/url_utility'; -import { MOCK_START_CURSOR, MOCK_PAGE_INFO, MOCK_CANDIDATES } from './mock_data'; +import { MOCK_START_CURSOR, MOCK_PAGE_INFO, MOCK_CANDIDATES, MOCK_EXPERIMENT } from './mock_data'; describe('MlExperimentsShow', () => { let wrapper; @@ -15,9 +16,10 @@ describe('MlExperimentsShow', () => { metricNames = [], paramNames = [], pageInfo = MOCK_PAGE_INFO, + experiment = MOCK_EXPERIMENT, ) => { wrapper = mountExtended(MlExperimentsShow, { - propsData: { candidates, metricNames, paramNames, pageInfo }, + propsData: { experiment, candidates, metricNames, paramNames, pageInfo }, }); }; @@ -34,6 +36,8 @@ describe('MlExperimentsShow', () => { const findTableRows = () => findTable().findAll('tbody > tr'); const findNthTableRow = (idx) => findTableRows().at(idx); const findColumnInRow = (row, col) => findNthTableRow(row).findAll('td').at(col); + const findExperimentHeader = () => wrapper.findComponent(ExperimentHeader); + const hrefInRowAndColumn = (row, col) => findColumnInRow(row, col).findComponent(GlLink).attributes().href; @@ -44,7 +48,7 @@ describe('MlExperimentsShow', () => { }); describe('default inputs', () => { - beforeEach(async () => { + beforeEach(() => { createWrapper(); }); @@ -56,6 +60,14 @@ describe('MlExperimentsShow', () => { expect(findPagination().exists()).toBe(false); }); + it('shows experiment header', () => { + expect(findExperimentHeader().exists()).toBe(true); + }); + + it('passes the correct title to experiment header', () => { + expect(findExperimentHeader().props('title')).toBe(MOCK_EXPERIMENT.name); + }); + it('does not show table', () => { expect(findTable().exists()).toBe(false); }); diff --git a/spec/frontend/ml/experiment_tracking/routes/experiments/show/mock_data.js b/spec/frontend/ml/experiment_tracking/routes/experiments/show/mock_data.js index 66378cd3f0d..adfb3dbf773 100644 --- a/spec/frontend/ml/experiment_tracking/routes/experiments/show/mock_data.js +++ b/spec/frontend/ml/experiment_tracking/routes/experiments/show/mock_data.js @@ -7,6 +7,8 @@ export const MOCK_PAGE_INFO = { hasPreviousPage: true, }; +export const MOCK_EXPERIMENT = { name: 'experiment', path: '/path/to/experiment' }; + export const MOCK_CANDIDATES = [ { rmse: 1, diff --git a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js index 96b228fd3b2..e6c5569fa19 100644 --- a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js +++ b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js @@ -53,7 +53,7 @@ describe('Custom variable component', () => { expect(findDropdown().exists()).toBe(true); }); - it('changing dropdown items triggers update', async () => { + it('changing dropdown items triggers update', () => { createShallowWrapper(); findDropdownItems().at(1).vm.$emit('click'); diff --git a/spec/frontend/monitoring/pages/dashboard_page_spec.js b/spec/frontend/monitoring/pages/dashboard_page_spec.js index c5a8b50ee60..3de99673e71 100644 --- a/spec/frontend/monitoring/pages/dashboard_page_spec.js +++ b/spec/frontend/monitoring/pages/dashboard_page_spec.js @@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils'; import Dashboard from '~/monitoring/components/dashboard.vue'; import DashboardPage from '~/monitoring/pages/dashboard_page.vue'; import { createStore } from '~/monitoring/stores'; +import { assertProps } from 'helpers/assert_props'; import { dashboardProps } from '../fixture_data'; describe('monitoring/pages/dashboard_page', () => { @@ -45,7 +46,7 @@ describe('monitoring/pages/dashboard_page', () => { }); it('throws errors if dashboard props are not passed', () => { - expect(() => buildWrapper()).toThrow('Missing required prop: "dashboardProps"'); + expect(() => assertProps(DashboardPage, {})).toThrow('Missing required prop: "dashboardProps"'); }); it('renders the dashboard page with dashboard component', () => { diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js index 8097857f226..b3b198d6b51 100644 --- a/spec/frontend/monitoring/store/actions_spec.js +++ b/spec/frontend/monitoring/store/actions_spec.js @@ -260,7 +260,7 @@ describe('Monitoring store actions', () => { }); }); - it('does not show an alert error when showErrorBanner is disabled', async () => { + it('does not show an alert when showErrorBanner is disabled', async () => { state.showErrorBanner = false; await result(); diff --git a/spec/frontend/nav/components/new_nav_toggle_spec.js b/spec/frontend/nav/components/new_nav_toggle_spec.js index fe543a346b5..cf8e59d6522 100644 --- a/spec/frontend/nav/components/new_nav_toggle_spec.js +++ b/spec/frontend/nav/components/new_nav_toggle_spec.js @@ -9,6 +9,7 @@ import NewNavToggle from '~/nav/components/new_nav_toggle.vue'; import waitForPromises from 'helpers/wait_for_promises'; import { createAlert } from '~/alert'; import { s__ } from '~/locale'; +import { mockTracking } from 'helpers/tracking_helper'; jest.mock('~/alert'); @@ -18,6 +19,7 @@ describe('NewNavToggle', () => { useMockLocationHelper(); let wrapper; + let trackingSpy; const findToggle = () => wrapper.findComponent(GlToggle); const findDisclosureItem = () => wrapper.findComponent(GlDisclosureDropdownItem); @@ -29,6 +31,8 @@ describe('NewNavToggle', () => { ...propsData, }, }); + + trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); }; const getByText = (text, options) => @@ -61,15 +65,17 @@ describe('NewNavToggle', () => { }); describe.each` - desc | actFn - ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} - ${'on menu item action'} | ${() => findDisclosureItem().vm.$emit('action')} - `('$desc', ({ actFn }) => { + desc | actFn | toggleValue | trackingLabel | trackingProperty + ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'} + ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'} + ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'} + ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'} + `('$desc', ({ actFn, toggleValue, trackingLabel, trackingProperty }) => { let mock; beforeEach(() => { mock = new MockAdapter(axios); - createComponent({ enabled: false, newNavigation: true }); + createComponent({ enabled: toggleValue }); }); it('reloads the page on success', async () => { @@ -100,7 +106,17 @@ describe('NewNavToggle', () => { it('changes the toggle', async () => { await actFn(); - expect(findToggle().props('value')).toBe(true); + expect(findToggle().props('value')).toBe(!toggleValue); + }); + + it('tracks the Snowplow event', async () => { + mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK); + await actFn(); + await waitForPromises(); + expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_toggle', { + label: trackingLabel, + property: trackingProperty, + }); }); afterEach(() => { @@ -136,15 +152,17 @@ describe('NewNavToggle', () => { }); describe.each` - desc | actFn - ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} - ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} - `('$desc', ({ actFn }) => { + desc | actFn | toggleValue | trackingLabel | trackingProperty + ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'} + ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${false} | ${'enable_new_nav_beta'} | ${'navigation_top'} + ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'} + ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')} | ${true} | ${'disable_new_nav_beta'} | ${'nav_user_menu'} + `('$desc', ({ actFn, toggleValue, trackingLabel, trackingProperty }) => { let mock; beforeEach(() => { mock = new MockAdapter(axios); - createComponent({ enabled: false }); + createComponent({ enabled: toggleValue }); }); it('reloads the page on success', async () => { @@ -175,7 +193,17 @@ describe('NewNavToggle', () => { it('changes the toggle', async () => { await actFn(); - expect(findToggle().props('value')).toBe(true); + expect(findToggle().props('value')).toBe(!toggleValue); + }); + + it('tracks the Snowplow event', async () => { + mock.onPut(TEST_ENDPONT).reply(HTTP_STATUS_OK); + await actFn(); + await waitForPromises(); + expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_toggle', { + label: trackingLabel, + property: trackingProperty, + }); }); afterEach(() => { diff --git a/spec/frontend/new_branch_spec.js b/spec/frontend/new_branch_spec.js index 5a09598059d..766e840cac5 100644 --- a/spec/frontend/new_branch_spec.js +++ b/spec/frontend/new_branch_spec.js @@ -11,7 +11,7 @@ describe('Branch', () => { describe('create a new branch', () => { function fillNameWith(value) { document.querySelector('.js-branch-name').value = value; - const event = new CustomEvent('blur'); + const event = new CustomEvent('change'); document.querySelector('.js-branch-name').dispatchEvent(event); } diff --git a/spec/frontend/notebook/cells/output/dataframe_spec.js b/spec/frontend/notebook/cells/output/dataframe_spec.js new file mode 100644 index 00000000000..abf6631353c --- /dev/null +++ b/spec/frontend/notebook/cells/output/dataframe_spec.js @@ -0,0 +1,59 @@ +import { shallowMount } from '@vue/test-utils'; +import DataframeOutput from '~/notebook/cells/output/dataframe.vue'; +import JSONTable from '~/behaviors/components/json_table.vue'; +import { outputWithDataframe } from '../../mock_data'; + +describe('~/notebook/cells/output/DataframeOutput', () => { + let wrapper; + + function createComponent(rawCode) { + wrapper = shallowMount(DataframeOutput, { + propsData: { + rawCode, + count: 0, + index: 0, + }, + }); + } + + const findTable = () => wrapper.findComponent(JSONTable); + + describe('with valid dataframe', () => { + beforeEach(() => createComponent(outputWithDataframe.data['text/html'].join(''))); + + it('mounts the table', () => { + expect(findTable().exists()).toBe(true); + }); + + it('table caption is empty', () => { + expect(findTable().props().caption).toEqual(''); + }); + + it('allows filtering', () => { + expect(findTable().props().hasFilter).toBe(true); + }); + + it('sets the correct fields', () => { + expect(findTable().props().fields).toEqual([ + { key: 'index', label: '', sortable: true }, + { key: 'column_1', label: 'column_1', sortable: true }, + { key: 'column_2', label: 'column_2', sortable: true }, + ]); + }); + + it('sets the correct items', () => { + expect(findTable().props().items).toEqual([ + { index: 0, column_1: 'abc de f', column_2: 'a' }, + { index: 1, column_1: 'True', column_2: '0.1' }, + ]); + }); + }); + + describe('invalid dataframe', () => { + it('still displays the table', () => { + createComponent('dataframe'); + + expect(findTable().exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/notebook/cells/output/dataframe_util_spec.js b/spec/frontend/notebook/cells/output/dataframe_util_spec.js new file mode 100644 index 00000000000..ddc1b3cfe26 --- /dev/null +++ b/spec/frontend/notebook/cells/output/dataframe_util_spec.js @@ -0,0 +1,113 @@ +import { isDataframe, convertHtmlTableToJson } from '~/notebook/cells/output/dataframe_util'; +import { outputWithDataframeContent } from '../../mock_data'; +import sanitizeTests from './html_sanitize_fixtures'; + +describe('notebook/cells/output/dataframe_utils', () => { + describe('isDataframe', () => { + describe('when output data has no text/html', () => { + it('is is not a dataframe', () => { + const input = { data: { 'image/png': ['blah'] } }; + + expect(isDataframe(input)).toBe(false); + }); + }); + + describe('when output data has no text/html, but no mention of dataframe', () => { + it('is is not a dataframe', () => { + const input = { data: { 'text/html': ['blah'] } }; + + expect(isDataframe(input)).toBe(false); + }); + }); + + describe('when output data has text/html, but no mention of dataframe in the first 20 lines', () => { + it('is is not a dataframe', () => { + const input = { data: { 'text/html': [...new Array(20).fill('a'), 'dataframe'] } }; + + expect(isDataframe(input)).toBe(false); + }); + }); + + describe('when output data has text/html, and includes "dataframe" within the first 20 lines', () => { + it('is is not a dataframe', () => { + const input = { data: { 'text/html': ['dataframe'] } }; + + expect(isDataframe(input)).toBe(true); + }); + }); + }); + + describe('convertHtmlTableToJson', () => { + it('converts table correctly', () => { + const input = outputWithDataframeContent; + + const output = { + fields: [ + { key: 'index', label: '', sortable: true }, + { key: 'column_1', label: 'column_1', sortable: true }, + { key: 'column_2', label: 'column_2', sortable: true }, + ], + items: [ + { index: 0, column_1: 'abc de f', column_2: 'a' }, + { index: 1, column_1: 'True', column_2: '0.1' }, + ], + }; + + expect(convertHtmlTableToJson(input)).toEqual(output); + }); + + describe('sanitizes input before parsing table', () => { + it('sanitizes input html', () => { + const parser = new DOMParser(); + const spy = jest.spyOn(parser, 'parseFromString'); + const input = 'hello'; + + convertHtmlTableToJson(input, parser); + + expect(spy).toHaveBeenCalledWith('hello', 'text/html'); + }); + }); + + describe('does not include harmful html', () => { + const makeDataframeWithHtml = (html) => { + return [ + '
    - + The Experiment - +
    - Artifacts - +
    \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ` \n`, + ' \n', + ' \n', + '
    column_1
    0${html}
    \n', + '
    ', + ]; + }; + + it.each([ + ['table', 0], + ['style', 1], + ['iframe', 2], + ['svg', 3], + ])('sanitizes output for: %p', (tag, index) => { + const inputHtml = makeDataframeWithHtml(sanitizeTests[index][1].input); + const convertedHtml = convertHtmlTableToJson(inputHtml).items[0].column_1; + + expect(convertedHtml).not.toContain(tag); + }); + }); + + describe('when dataframe is invalid', () => { + it('returns empty', () => { + const input = [' dataframe', ' blah']; + + expect(convertHtmlTableToJson(input)).toEqual({ fields: [], items: [] }); + }); + }); + }); +}); diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js index 1241c133b89..efbdfca8d8c 100644 --- a/spec/frontend/notebook/cells/output/index_spec.js +++ b/spec/frontend/notebook/cells/output/index_spec.js @@ -2,7 +2,13 @@ import { mount } from '@vue/test-utils'; import json from 'test_fixtures/blob/notebook/basic.json'; import Output from '~/notebook/cells/output/index.vue'; import MarkdownOutput from '~/notebook/cells/output/markdown.vue'; -import { relativeRawPath, markdownCellContent } from '../../mock_data'; +import DataframeOutput from '~/notebook/cells/output/dataframe.vue'; +import { + relativeRawPath, + markdownCellContent, + outputWithDataframe, + outputWithDataframeContent, +} from '../../mock_data'; describe('Output component', () => { let wrapper; @@ -105,6 +111,16 @@ describe('Output component', () => { }); }); + describe('Dataframe output', () => { + it('renders DataframeOutput component', () => { + createComponent(outputWithDataframe); + + expect(wrapper.findComponent(DataframeOutput).props('rawCode')).toBe( + outputWithDataframeContent.join(''), + ); + }); + }); + describe('default to plain text', () => { beforeEach(() => { const unknownType = json.cells[6]; diff --git a/spec/frontend/notebook/mock_data.js b/spec/frontend/notebook/mock_data.js index 5c47cb5aa9b..15db2931b3c 100644 --- a/spec/frontend/notebook/mock_data.js +++ b/spec/frontend/notebook/mock_data.js @@ -6,3 +6,47 @@ export const errorOutputContent = [ '\u001b[0;32m/var/folders/cq/l637k4x13gx6y9p_gfs4c_gc0000gn/T/ipykernel_79203/294318627.py\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mTo\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m', "\u001b[0;31mNameError\u001b[0m: name 'To' is not defined", ]; +export const outputWithDataframeContent = [ + '
    \n', + '\n', + '\n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + ' \n', + '
    column_1column_2
    0abc de fa
    1True0.1
    \n', + '
    ', +]; + +export const outputWithDataframe = { + data: { + 'text/html': outputWithDataframeContent, + }, +}; diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js index 062cd098640..04143bb5b60 100644 --- a/spec/frontend/notes/components/comment_form_spec.js +++ b/spec/frontend/notes/components/comment_form_spec.js @@ -5,6 +5,7 @@ import MockAdapter from 'axios-mock-adapter'; import Vue, { nextTick } from 'vue'; import Vuex from 'vuex'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; +import { useLocalStorageSpy } from 'helpers/local_storage_helper'; import batchComments from '~/batch_comments/stores/modules/batch_comments'; import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests'; import { createAlert } from '~/alert'; @@ -27,6 +28,8 @@ jest.mock('~/alert'); Vue.use(Vuex); describe('issue_comment_form component', () => { + useLocalStorageSpy(); + let store; let wrapper; let axiosMock; @@ -649,6 +652,37 @@ describe('issue_comment_form component', () => { }); }); + describe('check sensitive tokens', () => { + const sensitiveMessage = 'token: glpat-1234567890abcdefghij'; + const nonSensitiveMessage = 'text'; + + it('should not save note when it contains sensitive token', () => { + mountComponent({ + mountFunction: mount, + initialData: { note: sensitiveMessage }, + }); + + jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue(); + + clickCommentButton(); + + expect(wrapper.vm.saveNote).not.toHaveBeenCalled(); + }); + + it('should save note it does not contain sensitive token', () => { + mountComponent({ + mountFunction: mount, + initialData: { note: nonSensitiveMessage }, + }); + + jest.spyOn(wrapper.vm, 'saveNote').mockResolvedValue(); + + clickCommentButton(); + + expect(wrapper.vm.saveNote).toHaveBeenCalled(); + }); + }); + describe('user is not logged in', () => { beforeEach(() => { mountComponent({ userData: null, noteableData: loggedOutnoteableData, mountFunction: mount }); diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js index ed1ced1b3d1..28e5e65c177 100644 --- a/spec/frontend/notes/components/discussion_filter_spec.js +++ b/spec/frontend/notes/components/discussion_filter_spec.js @@ -1,4 +1,4 @@ -import { GlDropdown } from '@gitlab/ui'; +import { GlDropdown, GlDropdownItem } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import AxiosMockAdapter from 'axios-mock-adapter'; @@ -77,17 +77,16 @@ describe('DiscussionFilter component', () => { // as it doesn't matter for our tests here mock.onGet(DISCUSSION_PATH).reply(HTTP_STATUS_OK, ''); window.mrTabs = undefined; - wrapper = mountComponent(); jest.spyOn(Tracking, 'event'); }); afterEach(() => { - wrapper.vm.$destroy(); mock.restore(); }); describe('default', () => { beforeEach(() => { + wrapper = mountComponent(); jest.spyOn(store, 'dispatch').mockImplementation(); }); @@ -104,6 +103,7 @@ describe('DiscussionFilter component', () => { describe('when asc', () => { beforeEach(() => { + wrapper = mountComponent(); jest.spyOn(store, 'dispatch').mockImplementation(); }); @@ -123,6 +123,7 @@ describe('DiscussionFilter component', () => { describe('when desc', () => { beforeEach(() => { + wrapper = mountComponent(); store.state.discussionSortOrder = DESC; jest.spyOn(store, 'dispatch').mockImplementation(); }); @@ -145,56 +146,62 @@ describe('DiscussionFilter component', () => { }); }); - it('renders the all filters', () => { - expect(wrapper.findAll('.discussion-filter-container .dropdown-item').length).toBe( - discussionFiltersMock.length, - ); - }); + describe('discussion filter functionality', () => { + beforeEach(() => { + wrapper = mountComponent(); + }); - it('renders the default selected item', () => { - expect(wrapper.find('.discussion-filter-container .dropdown-item').text().trim()).toBe( - discussionFiltersMock[0].title, - ); - }); + it('renders the all filters', () => { + expect(wrapper.findAll('.discussion-filter-container .dropdown-item').length).toBe( + discussionFiltersMock.length, + ); + }); - it('disables the dropdown when discussions are loading', () => { - store.state.isLoading = true; + it('renders the default selected item', () => { + expect(wrapper.find('.discussion-filter-container .dropdown-item').text().trim()).toBe( + discussionFiltersMock[0].title, + ); + }); - expect(wrapper.findComponent(GlDropdown).props('disabled')).toBe(true); - }); + it('disables the dropdown when discussions are loading', () => { + store.state.isLoading = true; - it('updates to the selected item', () => { - const filterItem = findFilter(DISCUSSION_FILTER_TYPES.ALL); + expect(wrapper.findComponent(GlDropdown).props('disabled')).toBe(true); + }); - filterItem.trigger('click'); + it('updates to the selected item', () => { + const filterItem = findFilter(DISCUSSION_FILTER_TYPES.ALL); - expect(wrapper.vm.currentFilter.title).toBe(filterItem.text().trim()); - }); + filterItem.trigger('click'); - it('only updates when selected filter changes', () => { - findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click'); + expect(filterItem.text().trim()).toBe('Show all activity'); + }); - expect(filterDiscussion).not.toHaveBeenCalled(); - }); + it('only updates when selected filter changes', () => { + findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click'); + + expect(filterDiscussion).not.toHaveBeenCalled(); + }); - it('disables timeline view if it was enabled', () => { - store.state.isTimelineEnabled = true; + it('disables timeline view if it was enabled', () => { + store.state.isTimelineEnabled = true; - findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click'); + findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click'); - expect(wrapper.vm.$store.state.isTimelineEnabled).toBe(false); - }); + expect(store.state.isTimelineEnabled).toBe(false); + }); - it('disables commenting when "Show history only" filter is applied', () => { - findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click'); + it('disables commenting when "Show history only" filter is applied', () => { + findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click'); - expect(wrapper.vm.$store.state.commentsDisabled).toBe(true); - }); + expect(store.state.commentsDisabled).toBe(true); + }); - it('enables commenting when "Show history only" filter is not applied', () => { - findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click'); + it('enables commenting when "Show history only" filter is not applied', () => { + findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click'); - expect(wrapper.vm.$store.state.commentsDisabled).toBe(false); + expect(store.state.commentsDisabled).toBe(false); + }); }); describe('Merge request tabs', () => { @@ -222,52 +229,41 @@ describe('DiscussionFilter component', () => { }); describe('URL with Links to notes', () => { + const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem); + afterEach(() => { window.location.hash = ''; }); - it('updates the filter when the URL links to a note', async () => { - window.location.hash = `note_${discussionMock.notes[0].id}`; - wrapper.vm.currentValue = discussionFiltersMock[2].value; - wrapper.vm.handleLocationHash(); - - await nextTick(); - expect(wrapper.vm.currentValue).toBe(DISCUSSION_FILTERS_DEFAULT_VALUE); - }); - it('does not update the filter when the current filter is "Show all activity"', async () => { window.location.hash = `note_${discussionMock.notes[0].id}`; - wrapper.vm.handleLocationHash(); + wrapper = mountComponent(); await nextTick(); - expect(wrapper.vm.currentValue).toBe(DISCUSSION_FILTERS_DEFAULT_VALUE); + const filtered = findDropdownItems().filter((el) => el.classes('is-active')); + + expect(filtered).toHaveLength(1); + expect(filtered.at(0).text()).toBe(discussionFiltersMock[0].title); }); it('only updates filter when the URL links to a note', async () => { window.location.hash = `testing123`; - wrapper.vm.handleLocationHash(); + wrapper = mountComponent(); await nextTick(); - expect(wrapper.vm.currentValue).toBe(DISCUSSION_FILTERS_DEFAULT_VALUE); - }); + const filtered = findDropdownItems().filter((el) => el.classes('is-active')); - it('fetches discussions when there is a hash', async () => { - window.location.hash = `note_${discussionMock.notes[0].id}`; - wrapper.vm.currentValue = discussionFiltersMock[2].value; - jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {}); - wrapper.vm.handleLocationHash(); - - await nextTick(); - expect(wrapper.vm.selectFilter).toHaveBeenCalled(); + expect(filtered).toHaveLength(1); + expect(filtered.at(0).text()).toBe(discussionFiltersMock[0].title); }); it('does not fetch discussions when there is no hash', async () => { window.location.hash = ''; - jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {}); - wrapper.vm.handleLocationHash(); + const selectFilterSpy = jest.spyOn(wrapper.vm, 'selectFilter').mockImplementation(() => {}); + wrapper = mountComponent(); await nextTick(); - expect(wrapper.vm.selectFilter).not.toHaveBeenCalled(); + expect(selectFilterSpy).not.toHaveBeenCalled(); }); }); }); diff --git a/spec/frontend/notes/components/note_actions/timeline_event_button_spec.js b/spec/frontend/notes/components/note_actions/timeline_event_button_spec.js index bee08ee0605..7860e9d45da 100644 --- a/spec/frontend/notes/components/note_actions/timeline_event_button_spec.js +++ b/spec/frontend/notes/components/note_actions/timeline_event_button_spec.js @@ -22,7 +22,7 @@ describe('NoteTimelineEventButton', () => { const findTimelineButton = () => wrapper.findComponent(GlButton); - it('emits click-promote-comment-to-event', async () => { + it('emits click-promote-comment-to-event', () => { findTimelineButton().vm.$emit('click'); expect(wrapper.emitted('click-promote-comment-to-event')).toEqual([[emitData]]); diff --git a/spec/frontend/notes/components/note_awards_list_spec.js b/spec/frontend/notes/components/note_awards_list_spec.js index 89ac0216f41..0107b27f980 100644 --- a/spec/frontend/notes/components/note_awards_list_spec.js +++ b/spec/frontend/notes/components/note_awards_list_spec.js @@ -1,76 +1,110 @@ import AxiosMockAdapter from 'axios-mock-adapter'; import Vue from 'vue'; +import Vuex from 'vuex'; import { TEST_HOST } from 'helpers/test_constants'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { userDataMock } from 'jest/notes/mock_data'; +import EmojiPicker from '~/emoji/components/picker.vue'; import axios from '~/lib/utils/axios_utils'; import { HTTP_STATUS_OK } from '~/lib/utils/http_status'; import awardsNote from '~/notes/components/note_awards_list.vue'; import createStore from '~/notes/stores'; -import { noteableDataMock, notesDataMock } from '../mock_data'; -describe('note_awards_list component', () => { - let store; - let vm; - let awardsMock; - let mock; - - const toggleAwardPath = `${TEST_HOST}/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji`; - - beforeEach(() => { - mock = new AxiosMockAdapter(axios); - - mock.onPost(toggleAwardPath).reply(HTTP_STATUS_OK, ''); +Vue.use(Vuex); - const Component = Vue.extend(awardsNote); - - store = createStore(); - store.dispatch('setNoteableData', noteableDataMock); - store.dispatch('setNotesData', notesDataMock); - awardsMock = [ - { - name: 'flag_tz', - user: { id: 1, name: 'Administrator', username: 'root' }, - }, - { - name: 'cartwheel_tone3', - user: { id: 12, name: 'Bobbie Stehr', username: 'erin' }, - }, - ]; +describe('Note Awards List', () => { + let wrapper; + let mock; - vm = new Component({ + const awardsMock = [ + { + name: 'flag_tz', + user: { id: 1, name: 'Administrator', username: 'root' }, + }, + { + name: 'cartwheel_tone3', + user: { id: 12, name: 'Bobbie Stehr', username: 'erin' }, + }, + ]; + const toggleAwardPathMock = `${TEST_HOST}/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji`; + + const defaultProps = { + awards: awardsMock, + noteAuthorId: 2, + noteId: '545', + canAwardEmoji: false, + toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', + }; + + const findAddAward = () => wrapper.find('.js-add-award'); + const findAwardButton = () => wrapper.findByTestId('award-button'); + const findAllEmojiAwards = () => wrapper.findAll('gl-emoji'); + const findEmojiPicker = () => wrapper.findComponent(EmojiPicker); + + const createComponent = (props = defaultProps, store = createStore()) => { + wrapper = mountExtended(awardsNote, { store, propsData: { - awards: awardsMock, - noteAuthorId: 2, - noteId: '545', - canAwardEmoji: true, - toggleAwardPath, + ...props, }, - }).$mount(); - }); + }); + }; + + describe('Note Awards functionality', () => { + const toggleAwardRequestSpy = jest.fn(); + const fakeStore = () => { + return new Vuex.Store({ + getters: { + getUserData: () => userDataMock, + }, + actions: { + toggleAwardRequest: toggleAwardRequestSpy, + }, + }); + }; - afterEach(() => { - mock.restore(); - vm.$destroy(); - }); + beforeEach(() => { + mock = new AxiosMockAdapter(axios); + mock.onPost(toggleAwardPathMock).reply(HTTP_STATUS_OK, ''); - it('should render awarded emojis', () => { - expect(vm.$el.querySelector('.js-awards-block button [data-name="flag_tz"]')).toBeDefined(); - expect( - vm.$el.querySelector('.js-awards-block button [data-name="cartwheel_tone3"]'), - ).toBeDefined(); - }); + createComponent( + { + awards: awardsMock, + noteAuthorId: 2, + noteId: '545', + canAwardEmoji: true, + toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', + }, + fakeStore(), + ); + }); - it('should be possible to remove awarded emoji', () => { - jest.spyOn(vm, 'handleAward'); - jest.spyOn(vm, 'toggleAwardRequest'); - vm.$el.querySelector('.js-awards-block button').click(); + afterEach(() => { + mock.restore(); + }); - expect(vm.handleAward).toHaveBeenCalledWith('flag_tz'); - expect(vm.toggleAwardRequest).toHaveBeenCalled(); - }); + it('should render awarded emojis', () => { + const emojiAwards = findAllEmojiAwards(); + + expect(emojiAwards).toHaveLength(awardsMock.length); + expect(emojiAwards.at(0).attributes('data-name')).toBe('flag_tz'); + expect(emojiAwards.at(1).attributes('data-name')).toBe('cartwheel_tone3'); + }); + + it('should be possible to add new emoji', () => { + expect(findEmojiPicker().exists()).toBe(true); + }); + + it('should be possible to remove awarded emoji', async () => { + await findAwardButton().vm.$emit('click'); - it('should be possible to add new emoji', () => { - expect(vm.$el.querySelector('.js-add-award')).toBeDefined(); + const { toggleAwardPath, noteId } = defaultProps; + expect(toggleAwardRequestSpy).toHaveBeenCalledWith(expect.anything(), { + awardName: awardsMock[0].name, + endpoint: toggleAwardPath, + noteId, + }); + }); }); describe('when the user name contains special HTML characters', () => { @@ -79,85 +113,69 @@ describe('note_awards_list component', () => { user: { id: index, name: `&<>"\`'-${index}`, username: `user-${index}` }, }); - const mountComponent = () => { - const Component = Vue.extend(awardsNote); - vm = new Component({ - store, - propsData: { - awards: awardsMock, - noteAuthorId: 0, - noteId: '545', - canAwardEmoji: true, - toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', - }, - }).$mount(); + const customProps = { + awards: awardsMock, + noteAuthorId: 0, + noteId: '545', + canAwardEmoji: true, + toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', }; - const findTooltip = () => vm.$el.querySelector('[title]').getAttribute('title'); - - it('should only escape & and " characters', () => { - awardsMock = [...new Array(1)].map(createAwardEmoji); - mountComponent(); - const escapedName = awardsMock[0].user.name.replace(/&/g, '&').replace(/"/g, '"'); - - expect(vm.$el.querySelector('[title]').outerHTML).toContain(escapedName); - }); - it('should not escape special HTML characters twice when only 1 person awarded', () => { - awardsMock = [...new Array(1)].map(createAwardEmoji); - mountComponent(); + const awardsCopy = [...new Array(1)].map(createAwardEmoji); + createComponent({ + ...customProps, + awards: awardsCopy, + }); - awardsMock.forEach((award) => { - expect(findTooltip()).toContain(award.user.name); + awardsCopy.forEach((award) => { + expect(findAwardButton().attributes('title')).toContain(award.user.name); }); }); it('should not escape special HTML characters twice when 2 people awarded', () => { - awardsMock = [...new Array(2)].map(createAwardEmoji); - mountComponent(); + const awardsCopy = [...new Array(2)].map(createAwardEmoji); + createComponent({ + ...customProps, + awards: awardsCopy, + }); - awardsMock.forEach((award) => { - expect(findTooltip()).toContain(award.user.name); + awardsCopy.forEach((award) => { + expect(findAwardButton().attributes('title')).toContain(award.user.name); }); }); it('should not escape special HTML characters twice when more than 10 people awarded', () => { - awardsMock = [...new Array(11)].map(createAwardEmoji); - mountComponent(); + const awardsCopy = [...new Array(11)].map(createAwardEmoji); + createComponent({ + ...customProps, + awards: awardsCopy, + }); // Testing only the first 10 awards since 11 onward will not be displayed. - awardsMock.slice(0, 10).forEach((award) => { - expect(findTooltip()).toContain(award.user.name); + awardsCopy.slice(0, 10).forEach((award) => { + expect(findAwardButton().attributes('title')).toContain(award.user.name); }); }); }); - describe('when the user cannot award emoji', () => { + describe('when the user cannot award an emoji', () => { beforeEach(() => { - const Component = Vue.extend(awardsNote); - - vm = new Component({ - store, - propsData: { - awards: awardsMock, - noteAuthorId: 2, - noteId: '545', - canAwardEmoji: false, - toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', - }, - }).$mount(); + createComponent({ + awards: awardsMock, + noteAuthorId: 2, + noteId: '545', + canAwardEmoji: false, + toggleAwardPath: '/gitlab-org/gitlab-foss/notes/545/toggle_award_emoji', + }); }); - it('should not be possible to remove awarded emoji', () => { - jest.spyOn(vm, 'toggleAwardRequest'); - - vm.$el.querySelector('.js-awards-block button').click(); - - expect(vm.toggleAwardRequest).not.toHaveBeenCalled(); + it('should display an award emoji button with a disabled class', () => { + expect(findAwardButton().classes()).toContain('disabled'); }); it('should not be possible to add new emoji', () => { - expect(vm.$el.querySelector('.js-add-award')).toBeNull(); + expect(findAddAward().exists()).toBe(false); }); }); }); diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js index b4f185004bb..c4f8e50b969 100644 --- a/spec/frontend/notes/components/note_body_spec.js +++ b/spec/frontend/notes/components/note_body_spec.js @@ -7,10 +7,7 @@ import NoteAwardsList from '~/notes/components/note_awards_list.vue'; import NoteForm from '~/notes/components/note_form.vue'; import createStore from '~/notes/stores'; import notes from '~/notes/stores/modules/index'; -import Autosave from '~/autosave'; - import Suggestions from '~/vue_shared/components/markdown/suggestions.vue'; - import { noteableDataMock, notesDataMock, note } from '../mock_data'; jest.mock('~/autosave'); @@ -82,11 +79,6 @@ describe('issue_note_body component', () => { expect(wrapper.findComponent(NoteForm).props('saveButtonTitle')).toBe(buttonText); }); - it('adds autosave', () => { - // passing undefined instead of an element because of shallowMount - expect(Autosave).toHaveBeenCalledWith(undefined, ['Note', note.noteable_type, note.id]); - }); - describe('isInternalNote', () => { beforeEach(() => { wrapper.setProps({ isInternalNote: true }); diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js index 59362e18098..d6413d33c99 100644 --- a/spec/frontend/notes/components/note_form_spec.js +++ b/spec/frontend/notes/components/note_form_spec.js @@ -1,42 +1,39 @@ -import { GlLink } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; +import { GlLink, GlFormCheckbox } from '@gitlab/ui'; import { nextTick } from 'vue'; import batchComments from '~/batch_comments/stores/modules/batch_comments'; -import { getDraft, updateDraft } from '~/lib/utils/autosave'; import NoteForm from '~/notes/components/note_form.vue'; import createStore from '~/notes/stores'; import MarkdownField from '~/vue_shared/components/markdown/field.vue'; import { AT_WHO_ACTIVE_CLASS } from '~/gfm_auto_complete'; +import eventHub from '~/environments/event_hub'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; import { noteableDataMock, notesDataMock, discussionMock, note } from '../mock_data'; jest.mock('~/lib/utils/autosave'); describe('issue_note_form component', () => { - const dummyAutosaveKey = 'some-autosave-key'; - const dummyDraft = 'dummy draft content'; - let store; let wrapper; let props; - const createComponentWrapper = () => { - return mount(NoteForm, { + const createComponentWrapper = (propsData = {}, provide = {}) => { + wrapper = mountExtended(NoteForm, { store, - propsData: props, + propsData: { + ...props, + ...propsData, + }, + provide: { + glFeatures: provide, + }, }); }; - const findCancelButton = () => wrapper.find('[data-testid="cancel"]'); + const findCancelButton = () => wrapper.findByTestId('cancel'); + const findCancelCommentButton = () => wrapper.findByTestId('cancelBatchCommentsEnabled'); + const findMarkdownField = () => wrapper.findComponent(MarkdownField); beforeEach(() => { - getDraft.mockImplementation((key) => { - if (key === dummyAutosaveKey) { - return dummyDraft; - } - - return null; - }); - store = createStore(); store.dispatch('setNoteableData', noteableDataMock); store.dispatch('setNotesData', notesDataMock); @@ -50,27 +47,37 @@ describe('issue_note_form component', () => { describe('noteHash', () => { beforeEach(() => { - wrapper = createComponentWrapper(); + createComponentWrapper(); }); it('returns note hash string based on `noteId`', () => { expect(wrapper.vm.noteHash).toBe(`#note_${props.noteId}`); }); - it('return note hash as `#` when `noteId` is empty', async () => { - wrapper.setProps({ - ...props, + it('return note hash as `#` when `noteId` is empty', () => { + createComponentWrapper({ noteId: '', }); - await nextTick(); expect(wrapper.vm.noteHash).toBe('#'); }); }); + it('hides content editor switcher if feature flag content_editor_on_issues is off', () => { + createComponentWrapper({}, { contentEditorOnIssues: false }); + + expect(wrapper.text()).not.toContain('Rich text'); + }); + + it('shows content editor switcher if feature flag content_editor_on_issues is on', () => { + createComponentWrapper({}, { contentEditorOnIssues: true }); + + expect(wrapper.text()).toContain('Rich text'); + }); + describe('conflicts editing', () => { beforeEach(() => { - wrapper = createComponentWrapper(); + createComponentWrapper(); }); it('should show conflict message if note changes outside the component', async () => { @@ -94,15 +101,13 @@ describe('issue_note_form component', () => { describe('form', () => { beforeEach(() => { - wrapper = createComponentWrapper(); + createComponentWrapper(); }); it('should render text area with placeholder', () => { const textarea = wrapper.find('textarea'); - expect(textarea.attributes('placeholder')).toEqual( - 'Write a comment or drag your files here…', - ); + expect(textarea.attributes('placeholder')).toBe('Write a comment or drag your files here…'); }); it('should set data-supports-quick-actions to enable autocomplete', () => { @@ -117,23 +122,21 @@ describe('issue_note_form component', () => { ${true} | ${'Write an internal note or drag your files here…'} `( 'should set correct textarea placeholder text when discussion confidentiality is $internal', - ({ internal, placeholder }) => { + async ({ internal, placeholder }) => { props.note = { ...note, internal, }; - wrapper = createComponentWrapper(); + createComponentWrapper(); + + await nextTick(); expect(wrapper.find('textarea').attributes('placeholder')).toBe(placeholder); }, ); it('should link to markdown docs', () => { - const { markdownDocsPath } = notesDataMock; - const markdownField = wrapper.findComponent(MarkdownField); - const markdownFieldProps = markdownField.props(); - - expect(markdownFieldProps.markdownDocsPath).toBe(markdownDocsPath); + expect(findMarkdownField().props('markdownDocsPath')).toBe(notesDataMock.markdownDocsPath); }); describe('keyboard events', () => { @@ -146,12 +149,11 @@ describe('issue_note_form component', () => { describe('up', () => { it('should ender edit mode', () => { - // TODO: do not spy on vm - jest.spyOn(wrapper.vm, 'editMyLastNote'); + const eventHubSpy = jest.spyOn(eventHub, '$emit'); textarea.trigger('keydown.up'); - expect(wrapper.vm.editMyLastNote).toHaveBeenCalled(); + expect(eventHubSpy).not.toHaveBeenCalled(); }); }); @@ -159,17 +161,13 @@ describe('issue_note_form component', () => { it('should save note when cmd+enter is pressed', () => { textarea.trigger('keydown.enter', { metaKey: true }); - const { handleFormUpdate } = wrapper.emitted(); - - expect(handleFormUpdate.length).toBe(1); + expect(wrapper.emitted('handleFormUpdate')).toHaveLength(1); }); it('should save note when ctrl+enter is pressed', () => { textarea.trigger('keydown.enter', { ctrlKey: true }); - const { handleFormUpdate } = wrapper.emitted(); - - expect(handleFormUpdate.length).toBe(1); + expect(wrapper.emitted('handleFormUpdate')).toHaveLength(1); }); it('should disable textarea when ctrl+enter is pressed', async () => { @@ -185,151 +183,62 @@ describe('issue_note_form component', () => { }); describe('actions', () => { - it('should be possible to cancel', async () => { - wrapper.setProps({ - ...props, - }); - await nextTick(); + it('should be possible to cancel', () => { + createComponentWrapper(); - const cancelButton = findCancelButton(); - cancelButton.vm.$emit('click'); - await nextTick(); + findCancelButton().vm.$emit('click'); - expect(wrapper.emitted().cancelForm).toHaveLength(1); + expect(wrapper.emitted('cancelForm')).toHaveLength(1); }); it('will not cancel form if there is an active at-who-active class', async () => { - wrapper.setProps({ - ...props, - }); - await nextTick(); + createComponentWrapper(); - const textareaEl = wrapper.vm.$refs.textarea; + const textareaEl = wrapper.vm.$refs.markdownEditor.$el.querySelector('textarea'); const cancelButton = findCancelButton(); textareaEl.classList.add(AT_WHO_ACTIVE_CLASS); cancelButton.vm.$emit('click'); await nextTick(); - expect(wrapper.emitted().cancelForm).toBeUndefined(); + expect(wrapper.emitted('cancelForm')).toBeUndefined(); }); - it('should be possible to update the note', async () => { - wrapper.setProps({ - ...props, - }); - await nextTick(); + it('should be possible to update the note', () => { + createComponentWrapper(); const textarea = wrapper.find('textarea'); textarea.setValue('Foo'); const saveButton = wrapper.find('.js-vue-issue-save'); saveButton.vm.$emit('click'); - expect(wrapper.vm.isSubmitting).toBe(true); + expect(wrapper.emitted('handleFormUpdate')).toHaveLength(1); }); }); }); - describe('with autosaveKey', () => { - describe('with draft', () => { - beforeEach(() => { - Object.assign(props, { - noteBody: '', - autosaveKey: dummyAutosaveKey, - }); - wrapper = createComponentWrapper(); - - return nextTick(); - }); - - it('displays the draft in textarea', () => { - const textarea = wrapper.find('textarea'); - - expect(textarea.element.value).toBe(dummyDraft); - }); - }); - - describe('without draft', () => { - beforeEach(() => { - Object.assign(props, { - noteBody: '', - autosaveKey: 'some key without draft', - }); - wrapper = createComponentWrapper(); - - return nextTick(); - }); - - it('leaves the textarea empty', () => { - const textarea = wrapper.find('textarea'); - - expect(textarea.element.value).toBe(''); - }); - }); - - it('updates the draft if textarea content changes', () => { - Object.assign(props, { - noteBody: '', - autosaveKey: dummyAutosaveKey, - }); - wrapper = createComponentWrapper(); - const textarea = wrapper.find('textarea'); - const dummyContent = 'some new content'; - - textarea.setValue(dummyContent); - - expect(updateDraft).toHaveBeenCalledWith(dummyAutosaveKey, dummyContent); - }); - - it('does not save draft when ctrl+enter is pressed', () => { - const options = { - noteBody: '', - autosaveKey: dummyAutosaveKey, - }; - - props = { ...props, ...options }; - wrapper = createComponentWrapper(); - - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ isSubmittingWithKeydown: true }); - - const textarea = wrapper.find('textarea'); - textarea.setValue('some content'); - textarea.trigger('keydown.enter', { metaKey: true }); - - expect(updateDraft).not.toHaveBeenCalled(); - }); - }); - describe('with batch comments', () => { beforeEach(() => { store.registerModule('batchComments', batchComments()); - wrapper = createComponentWrapper(); - wrapper.setProps({ - ...props, + createComponentWrapper({ isDraft: true, noteId: '', discussion: { ...discussionMock, for_commit: false }, }); }); - it('should be possible to cancel', async () => { - jest.spyOn(wrapper.vm, 'cancelHandler'); + it('should be possible to cancel', () => { + findCancelCommentButton().vm.$emit('click'); - await nextTick(); - const cancelButton = wrapper.find('[data-testid="cancelBatchCommentsEnabled"]'); - cancelButton.vm.$emit('click'); - - expect(wrapper.vm.cancelHandler).toHaveBeenCalledWith(true); + expect(wrapper.emitted('cancelForm')).toEqual([[true, false]]); }); it('shows resolve checkbox', () => { - expect(wrapper.find('.js-resolve-checkbox').exists()).toBe(true); + expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(true); }); - it('hides resolve checkbox', async () => { - wrapper.setProps({ + it('hides resolve checkbox', () => { + createComponentWrapper({ isDraft: false, discussion: { ...discussionMock, @@ -344,15 +253,11 @@ describe('issue_note_form component', () => { }, }); - await nextTick(); - - expect(wrapper.find('.js-resolve-checkbox').exists()).toBe(false); + expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(false); }); - it('hides actions for commits', async () => { - wrapper.setProps({ discussion: { for_commit: true } }); - - await nextTick(); + it('hides actions for commits', () => { + createComponentWrapper({ discussion: { for_commit: true } }); expect(wrapper.find('.note-form-actions').text()).not.toContain('Start a review'); }); @@ -361,13 +266,12 @@ describe('issue_note_form component', () => { it('should start review or add to review when cmd+enter is pressed', async () => { const textarea = wrapper.find('textarea'); - jest.spyOn(wrapper.vm, 'handleAddToReview'); - textarea.setValue('Foo'); textarea.trigger('keydown.enter', { metaKey: true }); await nextTick(); - expect(wrapper.vm.handleAddToReview).toHaveBeenCalled(); + + expect(wrapper.emitted('handleFormUpdateAddToReview')).toEqual([['Foo', false]]); }); }); }); diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js index b158cfff10d..bce335aa035 100644 --- a/spec/frontend/notes/components/noteable_note_spec.js +++ b/spec/frontend/notes/components/noteable_note_spec.js @@ -375,6 +375,17 @@ describe('issue_note', () => { expect(wrapper.emitted('handleUpdateNote')).toHaveLength(1); }); + it('should not update note with sensitive token', () => { + const sensitiveMessage = 'token: glpat-1234567890abcdefghij'; + + createWrapper(); + updateActions(); + wrapper + .findComponent(NoteBody) + .vm.$emit('handleFormUpdate', { ...params, noteText: sensitiveMessage }); + expect(updateNote).not.toHaveBeenCalled(); + }); + it('does not stringify empty position', () => { createWrapper(); updateActions(); diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js index 832264aa7d3..3fe31506223 100644 --- a/spec/frontend/notes/components/notes_app_spec.js +++ b/spec/frontend/notes/components/notes_app_spec.js @@ -174,7 +174,7 @@ describe('note_app', () => { }); describe('while fetching data', () => { - beforeEach(async () => { + beforeEach(() => { wrapper = mountComponent(); }); diff --git a/spec/frontend/notes/deprecated_notes_spec.js b/spec/frontend/notes/deprecated_notes_spec.js index 40f10ca901b..355ecb78187 100644 --- a/spec/frontend/notes/deprecated_notes_spec.js +++ b/spec/frontend/notes/deprecated_notes_spec.js @@ -1,9 +1,11 @@ /* eslint-disable import/no-commonjs, no-new */ -import MockAdapter from 'axios-mock-adapter'; import $ from 'jquery'; +import MockAdapter from 'axios-mock-adapter'; +import htmlPipelineSchedulesEditSnippets from 'test_fixtures/snippets/show.html'; +import htmlPipelineSchedulesEditCommit from 'test_fixtures/commit/show.html'; import '~/behaviors/markdown/render_gfm'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { TEST_HOST } from 'helpers/test_constants'; import waitForPromises from 'helpers/wait_for_promises'; import axios from '~/lib/utils/axios_utils'; @@ -19,7 +21,6 @@ const Notes = require('~/deprecated_notes').default; const FLASH_TYPE_ALERT = 'alert'; const NOTES_POST_PATH = /(.*)\/notes\?html=true$/; -const fixture = 'snippets/show.html'; let mockAxios; window.project_uploads_path = `${TEST_HOST}/uploads`; @@ -36,7 +37,7 @@ function wrappedDiscussionNote(note) { // eslint-disable-next-line jest/no-disabled-tests describe.skip('Old Notes (~/deprecated_notes.js)', () => { beforeEach(() => { - loadHTMLFixture(fixture); + setHTMLFixture(htmlPipelineSchedulesEditSnippets); // Re-declare this here so that test_setup.js#beforeEach() doesn't // overwrite it. @@ -671,7 +672,7 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => { let $notesContainer; beforeEach(() => { - loadHTMLFixture('commit/show.html'); + setHTMLFixture(htmlPipelineSchedulesEditCommit); mockAxios.onPost(NOTES_POST_PATH).reply(HTTP_STATUS_OK, note); new Notes('', []); diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js index 0d3ebea7af2..97249d232dc 100644 --- a/spec/frontend/notes/stores/actions_spec.js +++ b/spec/frontend/notes/stores/actions_spec.js @@ -257,14 +257,14 @@ describe('Actions Notes Store', () => { axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_OK, pollResponse, pollHeaders); const failureMock = () => axiosMock.onGet(notesDataMock.notesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR); - const advanceAndRAF = async (time) => { + const advanceAndRAF = (time) => { if (time) { jest.advanceTimersByTime(time); } return waitForPromises(); }; - const advanceXMoreIntervals = async (number) => { + const advanceXMoreIntervals = (number) => { const timeoutLength = pollInterval * number; return advanceAndRAF(timeoutLength); @@ -273,7 +273,7 @@ describe('Actions Notes Store', () => { await store.dispatch('poll'); await advanceAndRAF(2); }; - const cleanUp = async () => { + const cleanUp = () => { jest.clearAllTimers(); return store.dispatch('stopPolling'); diff --git a/spec/frontend/notifications/components/custom_notifications_modal_spec.js b/spec/frontend/notifications/components/custom_notifications_modal_spec.js index 0fbd073191e..480d617fcb2 100644 --- a/spec/frontend/notifications/components/custom_notifications_modal_spec.js +++ b/spec/frontend/notifications/components/custom_notifications_modal_spec.js @@ -103,7 +103,7 @@ describe('CustomNotificationsModal', () => { ${1} | ${'new_note'} | ${'New note'} | ${false} | ${false} `( 'renders a checkbox for "$eventName" with checked=$enabled', - async ({ index, eventName, enabled, loading }) => { + ({ index, eventName, enabled, loading }) => { const checkbox = findCheckboxAt(index); expect(checkbox.text()).toContain(eventName); expect(checkbox.vm.$attrs.checked).toBe(enabled); diff --git a/spec/frontend/oauth_application/components/oauth_secret_spec.js b/spec/frontend/oauth_application/components/oauth_secret_spec.js new file mode 100644 index 00000000000..c38bd066da8 --- /dev/null +++ b/spec/frontend/oauth_application/components/oauth_secret_spec.js @@ -0,0 +1,116 @@ +import { GlButton, GlModal } from '@gitlab/ui'; +import { shallowMount } from '@vue/test-utils'; +import MockAdapter from 'axios-mock-adapter'; +import waitForPromises from 'helpers/wait_for_promises'; +import { createAlert, VARIANT_SUCCESS, VARIANT_WARNING } from '~/alert'; +import axios from '~/lib/utils/axios_utils'; +import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status'; +import OAuthSecret from '~/oauth_application/components/oauth_secret.vue'; +import { + RENEW_SECRET_FAILURE, + RENEW_SECRET_SUCCESS, + WARNING_NO_SECRET, +} from '~/oauth_application/constants'; +import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue'; + +jest.mock('~/alert'); +const mockEvent = { preventDefault: jest.fn() }; + +describe('OAuthSecret', () => { + let wrapper; + const renewPath = '/applications/1/renew'; + + const createComponent = (provide = {}) => { + wrapper = shallowMount(OAuthSecret, { + provide: { + initialSecret: undefined, + renewPath, + ...provide, + }, + }); + }; + + const findInputCopyToggleVisibility = () => wrapper.findComponent(InputCopyToggleVisibility); + const findRenewSecretButton = () => wrapper.findComponent(GlButton); + const findModal = () => wrapper.findComponent(GlModal); + + describe('when secret is provided', () => { + const initialSecret = 'my secret'; + beforeEach(() => { + createComponent({ initialSecret }); + }); + + it('shows the masked secret', () => { + expect(findInputCopyToggleVisibility().props('value')).toBe(initialSecret); + }); + + it('shows the renew secret button', () => { + expect(findRenewSecretButton().exists()).toBe(true); + }); + }); + + describe('when secret is not provided', () => { + beforeEach(() => { + createComponent(); + }); + + it('shows an alert', () => { + expect(createAlert).toHaveBeenCalledWith({ + message: WARNING_NO_SECRET, + variant: VARIANT_WARNING, + }); + }); + + it('shows the renew secret button', () => { + expect(findRenewSecretButton().exists()).toBe(true); + }); + + describe('when renew secret button is selected', () => { + beforeEach(() => { + createComponent(); + findRenewSecretButton().vm.$emit('click'); + }); + + it('shows a modal', () => { + expect(findModal().props('visible')).toBe(true); + }); + + describe('when secret renewal succeeds', () => { + const initialSecret = 'my secret'; + + beforeEach(async () => { + const mockAxios = new MockAdapter(axios); + mockAxios.onPut().reply(HTTP_STATUS_OK, { secret: initialSecret }); + findModal().vm.$emit('primary', mockEvent); + await waitForPromises(); + }); + + it('shows an alert', () => { + expect(createAlert).toHaveBeenCalledWith({ + message: RENEW_SECRET_SUCCESS, + variant: VARIANT_SUCCESS, + }); + }); + + it('shows the new secret', () => { + expect(findInputCopyToggleVisibility().props('value')).toBe(initialSecret); + }); + }); + + describe('when secret renewal fails', () => { + beforeEach(async () => { + const mockAxios = new MockAdapter(axios); + mockAxios.onPut().reply(HTTP_STATUS_INTERNAL_SERVER_ERROR); + findModal().vm.$emit('primary', mockEvent); + await waitForPromises(); + }); + + it('creates an alert', () => { + expect(createAlert).toHaveBeenCalledWith({ + message: RENEW_SECRET_FAILURE, + }); + }); + }); + }); + }); +}); diff --git a/spec/frontend/oauth_remember_me_spec.js b/spec/frontend/oauth_remember_me_spec.js index 1fa0e0aa8f6..7be3d441eb3 100644 --- a/spec/frontend/oauth_remember_me_spec.js +++ b/spec/frontend/oauth_remember_me_spec.js @@ -17,19 +17,16 @@ describe('OAuthRememberMe', () => { resetHTMLFixture(); }); - it('adds the "remember_me" query parameter to all OAuth login buttons', () => { - $('#oauth-container #remember_me').click(); + it('adds and removes the "remember_me" query parameter from all OAuth login buttons', () => { + $('#oauth-container #remember_me_omniauth').click(); expect(findFormAction('.twitter')).toBe('http://example.com/?remember_me=1'); expect(findFormAction('.github')).toBe('http://example.com/?remember_me=1'); expect(findFormAction('.facebook')).toBe( 'http://example.com/?redirect_fragment=L1&remember_me=1', ); - }); - it('removes the "remember_me" query parameter from all OAuth login buttons', () => { - $('#oauth-container #remember_me').click(); - $('#oauth-container #remember_me').click(); + $('#oauth-container #remember_me_omniauth').click(); expect(findFormAction('.twitter')).toBe('http://example.com/'); expect(findFormAction('.github')).toBe('http://example.com/'); diff --git a/spec/frontend/observability/index_spec.js b/spec/frontend/observability/index_spec.js index 83f72ff72b5..25eb048c62b 100644 --- a/spec/frontend/observability/index_spec.js +++ b/spec/frontend/observability/index_spec.js @@ -52,7 +52,7 @@ describe('renderObservability', () => { ); }); - it('handle route-update events', async () => { + it('handle route-update events', () => { component.vm.$router.push('/something?foo=bar'); component.vm.$emit('route-update', { url: '/some_path' }); expect(component.vm.$router.currentRoute.path).toBe('/something'); diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js index ee450dfc851..6ea08d4a9a5 100644 --- a/spec/frontend/operation_settings/components/metrics_settings_spec.js +++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js @@ -198,7 +198,7 @@ describe('operation settings external dashboard component', () => { expect(refreshCurrentPage).toHaveBeenCalled(); }); - it('creates alert banner on error', async () => { + it('creates an alert on error', async () => { mountComponent(false); const message = 'mockErrorMessage'; axios.patch.mockRejectedValue({ response: { data: { message } } }); diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js index 9e443234c34..01089422376 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js @@ -1,10 +1,10 @@ import { GlDropdownItem, GlIcon, GlDropdown } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import VueApollo from 'vue-apollo'; import Vue, { nextTick } from 'vue'; import { numberToHumanSize } from '~/lib/utils/number_utils'; import { useFakeDate } from 'helpers/fake_date'; import createMockApollo from 'helpers/mock_apollo_helper'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import waitForPromises from 'helpers/wait_for_promises'; import component from '~/packages_and_registries/container_registry/explorer/components/details_page/details_header.vue'; @@ -22,37 +22,27 @@ import { } from '~/packages_and_registries/container_registry/explorer/constants'; import getContainerRepositoryMetadata from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_metadata.query.graphql'; import TitleArea from '~/vue_shared/components/registry/title_area.vue'; -import { imageTagsCountMock } from '../../mock_data'; +import { containerRepositoryMock, imageTagsCountMock } from '../../mock_data'; describe('Details Header', () => { let wrapper; let apolloProvider; const defaultImage = { - name: 'foo', - updatedAt: '2020-11-03T13:29:21Z', - canDelete: true, - project: { - visibility: 'public', - path: 'path', - containerExpirationPolicy: { - enabled: false, - }, - }, + ...containerRepositoryMock, }; // set the date to Dec 4, 2020 useFakeDate(2020, 11, 4); - const findByTestId = (testId) => wrapper.find(`[data-testid="${testId}"]`); - const findLastUpdatedAndVisibility = () => findByTestId('updated-and-visibility'); - const findTitle = () => findByTestId('title'); - const findTagsCount = () => findByTestId('tags-count'); - const findCleanup = () => findByTestId('cleanup'); + const findCreatedAndVisibility = () => wrapper.findByTestId('created-and-visibility'); + const findTitle = () => wrapper.findByTestId('title'); + const findTagsCount = () => wrapper.findByTestId('tags-count'); + const findCleanup = () => wrapper.findByTestId('cleanup'); const findDeleteButton = () => wrapper.findComponent(GlDropdownItem); const findInfoIcon = () => wrapper.findComponent(GlIcon); const findMenu = () => wrapper.findComponent(GlDropdown); - const findSize = () => findByTestId('image-size'); + const findSize = () => wrapper.findByTestId('image-size'); const waitForMetadataItems = async () => { // Metadata items are printed by a loop in the title-area and it takes two ticks for them to be available @@ -69,7 +59,7 @@ describe('Details Header', () => { const requestHandlers = [[getContainerRepositoryMetadata, resolver]]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMount(component, { + wrapper = shallowMountExtended(component, { apolloProvider, propsData, directives: { @@ -97,7 +87,7 @@ describe('Details Header', () => { }); it('root image shows project path name', () => { - expect(findTitle().text()).toBe('path'); + expect(findTitle().text()).toBe('gitlab-test'); }); it('has an icon', () => { @@ -119,7 +109,7 @@ describe('Details Header', () => { }); it('shows image.name', () => { - expect(findTitle().text()).toContain('foo'); + expect(findTitle().text()).toContain('rails-12009'); }); it('has no icon', () => { @@ -247,7 +237,7 @@ describe('Details Header', () => { expect(findCleanup().props('icon')).toBe('expire'); }); - it('when the expiration policy is disabled', async () => { + it('when cleanup is not scheduled', async () => { mountComponent(); await waitForMetadataItems(); @@ -287,12 +277,12 @@ describe('Details Header', () => { ); }); - describe('visibility and updated at', () => { - it('has last updated text', async () => { + describe('visibility and created at', () => { + it('has created text', async () => { mountComponent(); await waitForMetadataItems(); - expect(findLastUpdatedAndVisibility().props('text')).toBe('Last updated 1 month ago'); + expect(findCreatedAndVisibility().props('text')).toBe('Created Nov 3, 2020 13:29'); }); describe('visibility icon', () => { @@ -300,7 +290,7 @@ describe('Details Header', () => { mountComponent(); await waitForMetadataItems(); - expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye'); + expect(findCreatedAndVisibility().props('icon')).toBe('eye'); }); it('shows an eye slashed when the project is not public', async () => { mountComponent({ @@ -308,7 +298,7 @@ describe('Details Header', () => { }); await waitForMetadataItems(); - expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye-slash'); + expect(findCreatedAndVisibility().props('icon')).toBe('eye-slash'); }); }); }); diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js index 09d0370efbf..0cbb9eab018 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js @@ -4,13 +4,15 @@ import { GlEmptyState } from '@gitlab/ui'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; - +import Tracking from '~/tracking'; import component from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list.vue'; import TagsListRow from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue'; import TagsLoader from '~/packages_and_registries/shared/components/tags_loader.vue'; import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue'; import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql'; +import deleteContainerRepositoryTagsMutation from '~/packages_and_registries/container_registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql'; + import { GRAPHQL_PAGE_SIZE, NO_TAGS_TITLE, @@ -19,7 +21,13 @@ import { NO_TAGS_MATCHING_FILTERS_DESCRIPTION, } from '~/packages_and_registries/container_registry/explorer/constants/index'; import { FILTERED_SEARCH_TERM } from '~/vue_shared/components/filtered_search_bar/constants'; -import { tagsMock, imageTagsMock, tagsPageInfo } from '../../mock_data'; +import { + graphQLDeleteImageRepositoryTagsMock, + tagsMock, + imageTagsMock, + tagsPageInfo, +} from '../../mock_data'; +import { DeleteModal } from '../../stubs'; describe('Tags List', () => { let wrapper; @@ -31,6 +39,7 @@ describe('Tags List', () => { noContainersImage: 'noContainersImage', }; + const findDeleteModal = () => wrapper.findComponent(DeleteModal); const findPersistedSearch = () => wrapper.findComponent(PersistedSearch); const findTagsListRow = () => wrapper.findAllComponents(TagsListRow); const findRegistryList = () => wrapper.findComponent(RegistryList); @@ -42,20 +51,23 @@ describe('Tags List', () => { }; const waitForApolloRequestRender = async () => { + fireFirstSortUpdate(); await waitForPromises(); - await nextTick(); }; - const mountComponent = ({ propsData = { isMobile: false, id: 1 } } = {}) => { + const mountComponent = ({ propsData = { isMobile: false, id: 1 }, mutationResolver } = {}) => { Vue.use(VueApollo); - const requestHandlers = [[getContainerRepositoryTagsQuery, resolver]]; + const requestHandlers = [ + [getContainerRepositoryTagsQuery, resolver], + [deleteContainerRepositoryTagsMutation, mutationResolver], + ]; apolloProvider = createMockApollo(requestHandlers); wrapper = shallowMount(component, { apolloProvider, propsData, - stubs: { RegistryList }, + stubs: { RegistryList, DeleteModal }, provide() { return { config: defaultConfig, @@ -66,12 +78,12 @@ describe('Tags List', () => { beforeEach(() => { resolver = jest.fn().mockResolvedValue(imageTagsMock()); + jest.spyOn(Tracking, 'event'); }); describe('registry list', () => { beforeEach(async () => { mountComponent(); - fireFirstSortUpdate(); await waitForApolloRequestRender(); }); @@ -126,11 +138,46 @@ describe('Tags List', () => { }); }); - it('emits a delete event when list emits delete', () => { - const eventPayload = 'foo'; - findRegistryList().vm.$emit('delete', eventPayload); + describe('delete event', () => { + describe('single item', () => { + beforeEach(() => { + findRegistryList().vm.$emit('delete', [tags[0]]); + }); + + it('opens the modal', () => { + expect(DeleteModal.methods.show).toHaveBeenCalled(); + }); + + it('sets modal props', () => { + expect(findDeleteModal().props('itemsToBeDeleted')).toMatchObject([tags[0]]); + }); + + it('tracks a single delete event', () => { + expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', { + label: 'registry_tag_delete', + }); + }); + }); + + describe('multiple items', () => { + beforeEach(() => { + findRegistryList().vm.$emit('delete', tags); + }); + + it('opens the modal', () => { + expect(DeleteModal.methods.show).toHaveBeenCalled(); + }); - expect(wrapper.emitted('delete')).toEqual([[eventPayload]]); + it('sets modal props', () => { + expect(findDeleteModal().props('itemsToBeDeleted')).toMatchObject(tags); + }); + + it('tracks multiple delete event', () => { + expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', { + label: 'bulk_registry_tag_delete', + }); + }); + }); }); }); }); @@ -138,7 +185,6 @@ describe('Tags List', () => { describe('list rows', () => { it('one row exist for each tag', async () => { mountComponent(); - fireFirstSortUpdate(); await waitForApolloRequestRender(); @@ -147,7 +193,6 @@ describe('Tags List', () => { it('the correct props are bound to it', async () => { mountComponent({ propsData: { disabled: true, id: 1 } }); - fireFirstSortUpdate(); await waitForApolloRequestRender(); @@ -162,7 +207,6 @@ describe('Tags List', () => { describe('events', () => { it('select event update the selected items', async () => { mountComponent(); - fireFirstSortUpdate(); await waitForApolloRequestRender(); findTagsListRow().at(0).vm.$emit('select'); @@ -172,13 +216,44 @@ describe('Tags List', () => { expect(findTagsListRow().at(0).attributes('selected')).toBe('true'); }); - it('delete event emit a delete event', async () => { - mountComponent(); - fireFirstSortUpdate(); - await waitForApolloRequestRender(); + describe('delete event', () => { + let mutationResolver; + + beforeEach(async () => { + mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock); + resolver = jest.fn().mockResolvedValue(imageTagsMock()); + mountComponent({ mutationResolver }); - findTagsListRow().at(0).vm.$emit('delete'); - expect(wrapper.emitted('delete')[0][0][0].name).toBe(tags[0].name); + await waitForApolloRequestRender(); + findTagsListRow().at(0).vm.$emit('delete'); + }); + + it('opens the modal', () => { + expect(DeleteModal.methods.show).toHaveBeenCalled(); + }); + + it('tracks a single delete event', () => { + expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', { + label: 'registry_tag_delete', + }); + }); + + it('confirmDelete event calls apollo mutation with the right parameters and refetches the tags list query', async () => { + findDeleteModal().vm.$emit('confirmDelete'); + + expect(mutationResolver).toHaveBeenCalledWith( + expect.objectContaining({ tagNames: [tags[0].name] }), + ); + + await waitForPromises(); + + expect(resolver).toHaveBeenLastCalledWith({ + first: GRAPHQL_PAGE_SIZE, + name: '', + sort: 'NAME_ASC', + id: '1', + }); + }); }); }); }); @@ -187,7 +262,6 @@ describe('Tags List', () => { it('sets registry list hiddenDelete prop to true', async () => { resolver = jest.fn().mockResolvedValue(imageTagsMock({ canDelete: false })); mountComponent(); - fireFirstSortUpdate(); await waitForApolloRequestRender(); expect(findRegistryList().props('hiddenDelete')).toBe(true); @@ -198,7 +272,6 @@ describe('Tags List', () => { beforeEach(async () => { resolver = jest.fn().mockResolvedValue(imageTagsMock({ nodes: [] })); mountComponent(); - fireFirstSortUpdate(); await waitForApolloRequestRender(); }); @@ -225,7 +298,7 @@ describe('Tags List', () => { filters: [{ type: FILTERED_SEARCH_TERM, value: { data: 'foo' } }], }); - await waitForApolloRequestRender(); + await waitForPromises(); expect(findEmptyState().props()).toMatchObject({ svgPath: defaultConfig.noContainersImage, @@ -236,6 +309,175 @@ describe('Tags List', () => { }); }); + describe('modal', () => { + it('exists', async () => { + mountComponent(); + await waitForApolloRequestRender(); + + expect(findDeleteModal().exists()).toBe(true); + }); + + describe('cancel event', () => { + it('tracks cancel_delete', async () => { + mountComponent(); + await waitForApolloRequestRender(); + + findDeleteModal().vm.$emit('cancel'); + + expect(Tracking.event).toHaveBeenCalledWith(undefined, 'cancel_delete', { + label: 'registry_tag_delete', + }); + }); + }); + + describe('confirmDelete event', () => { + let mutationResolver; + + describe('when mutation', () => { + beforeEach(() => { + mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock); + mountComponent({ mutationResolver }); + + return waitForApolloRequestRender(); + }); + + it('is started renders loader', async () => { + findRegistryList().vm.$emit('delete', [tags[0]]); + + findDeleteModal().vm.$emit('confirmDelete'); + await nextTick(); + + expect(findTagsLoader().exists()).toBe(true); + expect(findTagsListRow().exists()).toBe(false); + }); + + it('ends, loader is hidden', async () => { + findRegistryList().vm.$emit('delete', [tags[0]]); + + findDeleteModal().vm.$emit('confirmDelete'); + await waitForPromises(); + + expect(findTagsLoader().exists()).toBe(false); + expect(findTagsListRow().exists()).toBe(true); + }); + }); + + describe.each([ + { + description: 'rejection', + mutationMock: jest.fn().mockRejectedValue(), + }, + { + description: 'error', + mutationMock: jest.fn().mockResolvedValue({ + data: { + destroyContainerRepositoryTags: { + errors: [new Error()], + }, + }, + }), + }, + ])('when mutation fails with $description', ({ mutationMock }) => { + beforeEach(() => { + mutationResolver = mutationMock; + mountComponent({ mutationResolver }); + + return waitForApolloRequestRender(); + }); + + it('when one item is selected to be deleted calls apollo mutation with the right parameters and emits delete event with right arguments', async () => { + findRegistryList().vm.$emit('delete', [tags[0]]); + + resolver.mockClear(); + + findDeleteModal().vm.$emit('confirmDelete'); + + expect(mutationResolver).toHaveBeenCalledWith( + expect.objectContaining({ tagNames: [tags[0].name] }), + ); + + expect(resolver).not.toHaveBeenCalled(); + + await waitForPromises(); + + expect(wrapper.emitted('delete')).toHaveLength(1); + expect(wrapper.emitted('delete')[0][0]).toEqual('danger_tag'); + }); + + it('when more than one item is selected to be deleted calls apollo mutation with the right parameters and emits delete event with right arguments', async () => { + findRegistryList().vm.$emit('delete', tagsMock); + resolver.mockClear(); + + findDeleteModal().vm.$emit('confirmDelete'); + + expect(mutationResolver).toHaveBeenCalledWith( + expect.objectContaining({ tagNames: tagsMock.map((t) => t.name) }), + ); + + expect(resolver).not.toHaveBeenCalled(); + + await waitForPromises(); + + expect(wrapper.emitted('delete')).toHaveLength(1); + expect(wrapper.emitted('delete')[0][0]).toEqual('danger_tags'); + }); + }); + + describe('when mutation is successful', () => { + beforeEach(() => { + mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock); + mountComponent({ mutationResolver }); + + return waitForApolloRequestRender(); + }); + + it('and one item is selected to be deleted calls apollo mutation with the right parameters and refetches the tags list query', async () => { + findRegistryList().vm.$emit('delete', [tags[0]]); + + findDeleteModal().vm.$emit('confirmDelete'); + + expect(mutationResolver).toHaveBeenCalledWith( + expect.objectContaining({ tagNames: [tags[0].name] }), + ); + + expect(resolver).toHaveBeenLastCalledWith({ + first: GRAPHQL_PAGE_SIZE, + name: '', + sort: 'NAME_ASC', + id: '1', + }); + + await waitForPromises(); + + expect(wrapper.emitted('delete')).toHaveLength(1); + expect(wrapper.emitted('delete')[0][0]).toEqual('success_tag'); + }); + + it('and more than one item is selected to be deleted calls apollo mutation with the right parameters and refetches the tags list query', async () => { + findRegistryList().vm.$emit('delete', tagsMock); + + findDeleteModal().vm.$emit('confirmDelete'); + + expect(mutationResolver).toHaveBeenCalledWith( + expect.objectContaining({ tagNames: tagsMock.map((t) => t.name) }), + ); + + expect(resolver).toHaveBeenLastCalledWith({ + first: GRAPHQL_PAGE_SIZE, + name: '', + sort: 'NAME_ASC', + id: '1', + }); + + await waitForPromises(); + + expect(wrapper.emitted('delete')).toHaveLength(1); + expect(wrapper.emitted('delete')[0][0]).toEqual('success_tags'); + }); + }); + }); + }); + describe('loading state', () => { it.each` isImageLoading | queryExecuting | loadingVisible @@ -247,7 +489,6 @@ describe('Tags List', () => { 'when the isImageLoading is $isImageLoading, and is $queryExecuting that the query is still executing is $loadingVisible that the loader is shown', async ({ isImageLoading, queryExecuting, loadingVisible }) => { mountComponent({ propsData: { isImageLoading, isMobile: false, id: 1 } }); - fireFirstSortUpdate(); if (!queryExecuting) { await waitForApolloRequestRender(); } diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js index 45304cc2329..b7f3698e155 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js @@ -81,7 +81,7 @@ describe('registry_header', () => { }); }); - describe('expiration policy', () => { + describe('cleanup policy', () => { it('when is disabled', async () => { await mountComponent({ expirationPolicy: { enabled: false }, @@ -111,11 +111,11 @@ describe('registry_header', () => { const cleanupLink = findSetupCleanUpLink(); expect(text.exists()).toBe(true); - expect(text.props('text')).toBe('Expiration policy will run in '); + expect(text.props('text')).toBe('Cleanup will run in '); expect(cleanupLink.exists()).toBe(true); expect(cleanupLink.text()).toBe(SET_UP_CLEANUP); }); - it('when the expiration policy is completely disabled', async () => { + it('when the cleanup policy is not scheduled', async () => { await mountComponent({ expirationPolicy: { enabled: true }, expirationPolicyHelpPagePath: 'foo', diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js index cd54b856c97..8ca74f5077e 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js @@ -127,7 +127,6 @@ export const containerRepositoryMock = { location: 'host.docker.internal:5000/gitlab-org/gitlab-test/rails-12009', canDelete: true, createdAt: '2020-11-03T13:29:21Z', - updatedAt: '2020-11-03T13:29:21Z', expirationPolicyStartedAt: null, expirationPolicyCleanupStatus: 'UNSCHEDULED', project: { diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js index 888c3e5bffa..7fed81acead 100644 --- a/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js +++ b/spec/frontend/packages_and_registries/container_registry/explorer/pages/details_spec.js @@ -22,22 +22,15 @@ import { MISSING_OR_DELETED_IMAGE_TITLE, MISSING_OR_DELETED_IMAGE_MESSAGE, } from '~/packages_and_registries/container_registry/explorer/constants'; -import deleteContainerRepositoryTagsMutation from '~/packages_and_registries/container_registry/explorer/graphql/mutations/delete_container_repository_tags.mutation.graphql'; import getContainerRepositoryDetailsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_details.query.graphql'; -import getContainerRepositoryTagsQuery from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql'; -import getContainerRepositoriesDetails from '~/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repositories_details.query.graphql'; import component from '~/packages_and_registries/container_registry/explorer/pages/details.vue'; import Tracking from '~/tracking'; import { graphQLImageDetailsMock, - graphQLDeleteImageRepositoryTagsMock, - graphQLProjectImageRepositoriesDetailsMock, containerRepositoryMock, graphQLEmptyImageDetailsMock, - tagsMock, - imageTagsMock, } from '../mock_data'; import { DeleteModal } from '../stubs'; @@ -69,13 +62,6 @@ describe('Details Page', () => { isGroupPage: false, }; - const cleanTags = tagsMock.map((t) => { - const result = { ...t }; - // eslint-disable-next-line no-underscore-dangle - delete result.__typename; - return result; - }); - const waitForApolloRequestRender = async () => { await waitForPromises(); await nextTick(); @@ -83,20 +69,12 @@ describe('Details Page', () => { const mountComponent = ({ resolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock()), - mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock), - tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock())), - detailsResolver = jest.fn().mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock), options, config = defaultConfig, } = {}) => { Vue.use(VueApollo); - const requestHandlers = [ - [getContainerRepositoryDetailsQuery, resolver], - [deleteContainerRepositoryTagsMutation, mutationResolver], - [getContainerRepositoryTagsQuery, tagsResolver], - [getContainerRepositoriesDetails, detailsResolver], - ]; + const requestHandlers = [[getContainerRepositoryDetailsQuery, resolver]]; apolloProvider = createMockApollo(requestHandlers); @@ -184,50 +162,6 @@ describe('Details Page', () => { isMobile: false, }); }); - - describe('deleteEvent', () => { - describe('single item', () => { - let tagToBeDeleted; - beforeEach(async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - [tagToBeDeleted] = cleanTags; - findTagsList().vm.$emit('delete', [tagToBeDeleted]); - }); - - it('open the modal', async () => { - expect(DeleteModal.methods.show).toHaveBeenCalled(); - }); - - it('tracks a single delete event', () => { - expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', { - label: 'registry_tag_delete', - }); - }); - }); - - describe('multiple items', () => { - beforeEach(async () => { - mountComponent(); - - await waitForApolloRequestRender(); - - findTagsList().vm.$emit('delete', cleanTags); - }); - - it('open the modal', () => { - expect(DeleteModal.methods.show).toHaveBeenCalled(); - }); - - it('tracks a single delete event', () => { - expect(Tracking.event).toHaveBeenCalledWith(undefined, 'click_button', { - label: 'bulk_registry_tag_delete', - }); - }); - }); - }); }); describe('modal', () => { @@ -248,61 +182,24 @@ describe('Details Page', () => { findDeleteModal().vm.$emit('cancel'); expect(Tracking.event).toHaveBeenCalledWith(undefined, 'cancel_delete', { - label: 'registry_tag_delete', + label: 'registry_image_delete', }); }); }); - describe('confirmDelete event', () => { - let mutationResolver; - let tagsResolver; - let detailsResolver; - + describe('tags list delete event', () => { beforeEach(() => { - mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock); - tagsResolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock(imageTagsMock())); - detailsResolver = jest.fn().mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock); - mountComponent({ mutationResolver, tagsResolver, detailsResolver }); + mountComponent(); return waitForApolloRequestRender(); }); - describe('when one item is selected to be deleted', () => { - it('calls apollo mutation with the right parameters and refetches the tags list query', async () => { - findTagsList().vm.$emit('delete', [cleanTags[0]]); - - await nextTick(); - - findDeleteModal().vm.$emit('confirmDelete'); - - expect(mutationResolver).toHaveBeenCalledWith( - expect.objectContaining({ tagNames: [cleanTags[0].name] }), - ); - - await waitForPromises(); - - expect(tagsResolver).toHaveBeenCalled(); - expect(detailsResolver).toHaveBeenCalled(); - }); - }); - - describe('when more than one item is selected to be deleted', () => { - it('calls apollo mutation with the right parameters and refetches the tags list query', async () => { - findTagsList().vm.$emit('delete', tagsMock); - - await nextTick(); + it('sets delete alert modal deleteAlertType value', async () => { + findTagsList().vm.$emit('delete', 'success_tag'); - findDeleteModal().vm.$emit('confirmDelete'); - - expect(mutationResolver).toHaveBeenCalledWith( - expect.objectContaining({ tagNames: tagsMock.map((t) => t.name) }), - ); - - await waitForPromises(); + await nextTick(); - expect(tagsResolver).toHaveBeenCalled(); - expect(detailsResolver).toHaveBeenCalled(); - }); + expect(findDeleteAlert().props('deleteAlertType')).toBe('success_tag'); }); }); }); diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js index c2ae34ce697..2e7195aa59b 100644 --- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js +++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js @@ -5,7 +5,6 @@ import { GlFormInputGroup, GlFormGroup, GlModal, - GlSkeletonLoader, GlSprintf, GlEmptyState, } from '@gitlab/ui'; @@ -72,8 +71,6 @@ describe('DependencyProxyApp', () => { const findClipBoardButton = () => wrapper.findComponent(ClipboardButton); const findFormGroup = () => wrapper.findComponent(GlFormGroup); const findFormInputGroup = () => wrapper.findComponent(GlFormInputGroup); - const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader); - const findMainArea = () => wrapper.findByTestId('main-area'); const findProxyCountText = () => wrapper.findByTestId('proxy-count'); const findManifestList = () => wrapper.findComponent(ManifestsList); const findEmptyState = () => wrapper.findComponent(GlEmptyState); @@ -99,23 +96,11 @@ describe('DependencyProxyApp', () => { describe('when the dependency proxy is available', () => { describe('when is loading', () => { - it('renders the skeleton loader', () => { - createComponent(); - - expect(findSkeletonLoader().exists()).toBe(true); - }); - it('does not render a form group with label', () => { createComponent(); expect(findFormGroup().exists()).toBe(false); }); - - it('does not show the main section', () => { - createComponent(); - - expect(findMainArea().exists()).toBe(false); - }); }); describe('when the app is loaded', () => { @@ -125,10 +110,6 @@ describe('DependencyProxyApp', () => { return waitForPromises(); }); - it('renders the main area', () => { - expect(findMainArea().exists()).toBe(true); - }); - it('renders a form group with a label', () => { expect(findFormGroup().attributes('label')).toBe( DependencyProxyApp.i18n.proxyImagePrefix, @@ -213,13 +194,6 @@ describe('DependencyProxyApp', () => { }); describe('triggering page event on list', () => { - it('re-renders the skeleton loader', async () => { - findManifestList().vm.$emit('next-page'); - await nextTick(); - - expect(findSkeletonLoader().exists()).toBe(true); - }); - it('renders form group with label', async () => { findManifestList().vm.$emit('next-page'); await nextTick(); @@ -228,13 +202,6 @@ describe('DependencyProxyApp', () => { expect.stringMatching(DependencyProxyApp.i18n.proxyImagePrefix), ); }); - - it('does not show the main section', async () => { - findManifestList().vm.$emit('next-page'); - await nextTick(); - - expect(findMainArea().exists()).toBe(false); - }); }); it('shows the clear cache dropdown list', () => { diff --git a/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js index 639a4fbb99d..0d8af42bae3 100644 --- a/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js +++ b/spec/frontend/packages_and_registries/dependency_proxy/components/manifest_list_spec.js @@ -1,7 +1,6 @@ -import { GlKeysetPagination } from '@gitlab/ui'; +import { GlKeysetPagination, GlSkeletonLoader } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import ManifestRow from '~/packages_and_registries/dependency_proxy/components/manifest_row.vue'; - import Component from '~/packages_and_registries/dependency_proxy/components/manifests_list.vue'; import { proxyManifests, @@ -14,6 +13,7 @@ describe('Manifests List', () => { const defaultProps = { manifests: proxyManifests(), pagination: pagination(), + loading: false, }; const createComponent = (propsData = defaultProps) => { @@ -24,6 +24,8 @@ describe('Manifests List', () => { const findRows = () => wrapper.findAllComponents(ManifestRow); const findPagination = () => wrapper.findComponent(GlKeysetPagination); + const findMainArea = () => wrapper.findByTestId('main-area'); + const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader); it('has the correct title', () => { createComponent(); @@ -45,6 +47,19 @@ describe('Manifests List', () => { }); }); + describe('loading', () => { + it.each` + loading | expectLoader | expectContent + ${false} | ${false} | ${true} + ${true} | ${true} | ${false} + `('when loading is $loading', ({ loading, expectLoader, expectContent }) => { + createComponent({ ...defaultProps, loading }); + + expect(findSkeletonLoader().exists()).toBe(expectLoader); + expect(findMainArea().exists()).toBe(expectContent); + }); + }); + describe('pagination', () => { it('is hidden when there is no next or prev pages', () => { createComponent({ ...defaultProps, pagination: {} }); diff --git a/spec/frontend/packages_and_registries/harbor_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/harbor_registry/pages/list_spec.js index 63ea8feb1e7..1bc2657822e 100644 --- a/spec/frontend/packages_and_registries/harbor_registry/pages/list_spec.js +++ b/spec/frontend/packages_and_registries/harbor_registry/pages/list_spec.js @@ -74,7 +74,7 @@ describe('Harbor List Page', () => { }); describe('isLoading is true', () => { - it('shows the skeleton loader', async () => { + it('shows the skeleton loader', () => { mountComponent(); fireFirstSortUpdate(); @@ -93,7 +93,7 @@ describe('Harbor List Page', () => { expect(findCliCommands().exists()).toBe(false); }); - it('title has the metadataLoading props set to true', async () => { + it('title has the metadataLoading props set to true', () => { mountComponent(); fireFirstSortUpdate(); diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js index 7c7faa8a3b0..12859b1d77c 100644 --- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js +++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js @@ -32,7 +32,7 @@ describe('Infrastructure Title', () => { }); it('has the correct title', () => { - expect(findTitleArea().props('title')).toBe('Infrastructure Registry'); + expect(findTitleArea().props('title')).toBe('Terraform Module Registry'); }); describe('with no modules', () => { diff --git a/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js b/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js index d0817a8678e..b0fc9ef0f0c 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js @@ -1,7 +1,14 @@ -import { GlModal as RealGlModal } from '@gitlab/ui'; +import { GlModal as RealGlModal, GlSprintf } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { stubComponent } from 'helpers/stub_component'; import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue'; +import { + DELETE_PACKAGE_MODAL_PRIMARY_ACTION, + DELETE_PACKAGE_REQUEST_FORWARDING_MODAL_CONTENT, + DELETE_PACKAGE_WITH_REQUEST_FORWARDING_PRIMARY_ACTION, + DELETE_PACKAGES_REQUEST_FORWARDING_MODAL_CONTENT, + DELETE_PACKAGES_WITH_REQUEST_FORWARDING_PRIMARY_ACTION, +} from '~/packages_and_registries/package_registry/constants'; const GlModal = stubComponent(RealGlModal, { methods: { @@ -15,21 +22,28 @@ describe('DeleteModal', () => { const defaultItemsToBeDeleted = [ { name: 'package 01', + version: '1.0.0', }, { name: 'package 02', + version: '1.0.0', }, ]; const findModal = () => wrapper.findComponent(GlModal); - const mountComponent = ({ itemsToBeDeleted = defaultItemsToBeDeleted } = {}) => { + const mountComponent = ({ + itemsToBeDeleted = defaultItemsToBeDeleted, + showRequestForwardingContent = false, + } = {}) => { wrapper = shallowMountExtended(DeleteModal, { propsData: { itemsToBeDeleted, + showRequestForwardingContent, }, stubs: { GlModal, + GlSprintf, }, }); }; @@ -50,11 +64,64 @@ describe('DeleteModal', () => { }); it('renders description', () => { - expect(findModal().text()).toContain( + expect(findModal().text()).toMatchInterpolatedText( 'You are about to delete 2 packages. This operation is irreversible.', ); }); + it('with only one item to be deleted renders correct description', () => { + mountComponent({ itemsToBeDeleted: [defaultItemsToBeDeleted[0]] }); + + expect(findModal().text()).toMatchInterpolatedText( + 'You are about to delete version 1.0.0 of package 01. Are you sure?', + ); + }); + + it('sets the right action primary text', () => { + expect(findModal().props('actionPrimary')).toMatchObject({ + text: DELETE_PACKAGE_MODAL_PRIMARY_ACTION, + }); + }); + + describe('when showRequestForwardingContent is set', () => { + it('renders correct description', () => { + mountComponent({ showRequestForwardingContent: true }); + + expect(findModal().text()).toMatchInterpolatedText( + DELETE_PACKAGES_REQUEST_FORWARDING_MODAL_CONTENT, + ); + }); + + it('sets the right action primary text', () => { + mountComponent({ showRequestForwardingContent: true }); + + expect(findModal().props('actionPrimary')).toMatchObject({ + text: DELETE_PACKAGES_WITH_REQUEST_FORWARDING_PRIMARY_ACTION, + }); + }); + + describe('and only one item to be deleted', () => { + beforeEach(() => { + mountComponent({ + showRequestForwardingContent: true, + itemsToBeDeleted: [defaultItemsToBeDeleted[0]], + }); + }); + + it('renders correct description', () => { + expect(findModal().text()).toMatchInterpolatedText( + DELETE_PACKAGE_REQUEST_FORWARDING_MODAL_CONTENT, + ); + }); + + it('sets the right action primary text', () => { + expect(findModal().props('actionPrimary')).toMatchObject({ + text: DELETE_PACKAGE_WITH_REQUEST_FORWARDING_PRIMARY_ACTION, + }); + }); + }); + }); + it('emits confirm when primary event is emitted', () => { expect(wrapper.emitted('confirm')).toBeUndefined(); diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js index fc7f5c80d45..a700f42d367 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js @@ -1,5 +1,11 @@ +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import { GlAlert } from '@gitlab/ui'; +import * as Sentry from '@sentry/browser'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import createMockApollo from 'helpers/mock_apollo_helper'; import { stubComponent } from 'helpers/stub_component'; +import waitForPromises from 'helpers/wait_for_promises'; import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue'; import DeletePackageModal from '~/packages_and_registries/shared/components/delete_package_modal.vue'; import PackageVersionsList from '~/packages_and_registries/package_registry/components/details/package_versions_list.vue'; @@ -14,24 +20,26 @@ import { DELETE_PACKAGE_VERSIONS_TRACKING_ACTION, REQUEST_DELETE_PACKAGE_VERSION_TRACKING_ACTION, REQUEST_DELETE_PACKAGE_VERSIONS_TRACKING_ACTION, + GRAPHQL_PAGE_SIZE, } from '~/packages_and_registries/package_registry/constants'; -import { packageData } from '../../mock_data'; +import getPackageVersionsQuery from '~/packages_and_registries/package_registry/graphql//queries/get_package_versions.query.graphql'; +import { + emptyPackageVersionsQuery, + packageVersionsQuery, + packageVersions, + pagination, +} from '../../mock_data'; + +Vue.use(VueApollo); describe('PackageVersionsList', () => { let wrapper; + let apolloProvider; const EmptySlotStub = { name: 'empty-slot-stub', template: '
    empty message
    ' }; - const packageList = [ - packageData({ - name: 'version 1', - }), - packageData({ - id: 'gid://gitlab/Packages::Package/112', - name: 'version 2', - }), - ]; const uiElements = { + findAlert: () => wrapper.findComponent(GlAlert), findLoader: () => wrapper.findComponent(PackagesListLoader), findRegistryList: () => wrapper.findComponent(RegistryList), findEmptySlot: () => wrapper.findComponent(EmptySlotStub), @@ -40,12 +48,20 @@ describe('PackageVersionsList', () => { findDeletePackagesModal: () => wrapper.findComponent(DeleteModal), findPackageListDeleteModal: () => wrapper.findComponent(DeletePackageModal), }; - const mountComponent = (props) => { + + const mountComponent = ({ + props = {}, + resolver = jest.fn().mockResolvedValue(packageVersionsQuery()), + } = {}) => { + const requestHandlers = [[getPackageVersionsQuery, resolver]]; + apolloProvider = createMockApollo(requestHandlers); + wrapper = shallowMountExtended(PackageVersionsList, { + apolloProvider, propsData: { - versions: packageList, - pageInfo: {}, - isLoading: false, + packageId: packageVersionsQuery().data.package.id, + isMutationLoading: false, + count: packageVersions().length, ...props, }, stubs: { @@ -62,9 +78,13 @@ describe('PackageVersionsList', () => { }); }; + beforeEach(() => { + jest.spyOn(Sentry, 'captureException').mockImplementation(); + }); + describe('when list is loading', () => { beforeEach(() => { - mountComponent({ isLoading: true, versions: [] }); + mountComponent({ props: { isMutationLoading: true } }); }); it('displays loader', () => { expect(uiElements.findLoader().exists()).toBe(true); @@ -81,11 +101,24 @@ describe('PackageVersionsList', () => { it('does not display registry list', () => { expect(uiElements.findRegistryList().exists()).toBe(false); }); + + it('does not display alert', () => { + expect(uiElements.findAlert().exists()).toBe(false); + }); }); describe('when list is loaded and has no data', () => { - beforeEach(() => { - mountComponent({ isLoading: false, versions: [] }); + const resolver = jest.fn().mockResolvedValue(emptyPackageVersionsQuery); + beforeEach(async () => { + mountComponent({ + props: { isMutationLoading: false, count: 0 }, + resolver, + }); + await waitForPromises(); + }); + + it('skips graphql query', () => { + expect(resolver).not.toHaveBeenCalled(); }); it('displays empty slot message', () => { @@ -103,11 +136,44 @@ describe('PackageVersionsList', () => { it('does not display registry list', () => { expect(uiElements.findRegistryList().exists()).toBe(false); }); + + it('does not display alert', () => { + expect(uiElements.findAlert().exists()).toBe(false); + }); + }); + + describe('if load fails, alert', () => { + beforeEach(async () => { + mountComponent({ resolver: jest.fn().mockRejectedValue() }); + + await waitForPromises(); + }); + + it('is displayed', () => { + expect(uiElements.findAlert().exists()).toBe(true); + }); + + it('shows error message', () => { + expect(uiElements.findAlert().text()).toMatchInterpolatedText('Failed to load version data'); + }); + + it('is not dismissible', () => { + expect(uiElements.findAlert().props('dismissible')).toBe(false); + }); + + it('is of variant danger', () => { + expect(uiElements.findAlert().attributes('variant')).toBe('danger'); + }); + + it('error is logged in sentry', () => { + expect(Sentry.captureException).toHaveBeenCalled(); + }); }); describe('when list is loaded with data', () => { - beforeEach(() => { + beforeEach(async () => { mountComponent(); + await waitForPromises(); }); it('displays package registry list', () => { @@ -116,7 +182,7 @@ describe('PackageVersionsList', () => { it('binds the right props', () => { expect(uiElements.findRegistryList().props()).toMatchObject({ - items: packageList, + items: packageVersions(), pagination: {}, isLoading: false, hiddenDelete: true, @@ -125,16 +191,16 @@ describe('PackageVersionsList', () => { it('displays package version rows', () => { expect(uiElements.findAllListRow().exists()).toEqual(true); - expect(uiElements.findAllListRow()).toHaveLength(packageList.length); + expect(uiElements.findAllListRow()).toHaveLength(packageVersions().length); }); it('binds the correct props', () => { expect(uiElements.findAllListRow().at(0).props()).toMatchObject({ - packageEntity: expect.objectContaining(packageList[0]), + packageEntity: expect.objectContaining(packageVersions()[0]), }); expect(uiElements.findAllListRow().at(1).props()).toMatchObject({ - packageEntity: expect.objectContaining(packageList[1]), + packageEntity: expect.objectContaining(packageVersions()[1]), }); }); @@ -148,40 +214,52 @@ describe('PackageVersionsList', () => { }); describe('when user interacts with pagination', () => { - beforeEach(() => { - mountComponent({ pageInfo: { hasNextPage: true } }); + const resolver = jest.fn().mockResolvedValue(packageVersionsQuery()); + + beforeEach(async () => { + mountComponent({ resolver }); + await waitForPromises(); }); - it('emits prev-page event when registry list emits prev event', () => { - uiElements.findRegistryList().vm.$emit('prev-page'); + it('when list emits next-page fetches the next set of records', async () => { + uiElements.findRegistryList().vm.$emit('next-page'); + await waitForPromises(); - expect(wrapper.emitted('prev-page')).toHaveLength(1); + expect(resolver).toHaveBeenLastCalledWith( + expect.objectContaining({ after: pagination().endCursor, first: GRAPHQL_PAGE_SIZE }), + ); }); - it('emits next-page when registry list emits next event', () => { - uiElements.findRegistryList().vm.$emit('next-page'); + it('when list emits prev-page fetches the prev set of records', async () => { + uiElements.findRegistryList().vm.$emit('prev-page'); + await waitForPromises(); - expect(wrapper.emitted('next-page')).toHaveLength(1); + expect(resolver).toHaveBeenLastCalledWith( + expect.objectContaining({ before: pagination().startCursor, last: GRAPHQL_PAGE_SIZE }), + ); }); }); describe.each` description | finderFunction | deletePayload - ${'when the user can destroy the package'} | ${uiElements.findListRow} | ${packageList[0]} - ${'when the user can bulk destroy packages and deletes only one package'} | ${uiElements.findRegistryList} | ${[packageList[0]]} + ${'when the user can destroy the package'} | ${uiElements.findListRow} | ${packageVersions()[0]} + ${'when the user can bulk destroy packages and deletes only one package'} | ${uiElements.findRegistryList} | ${[packageVersions()[0]]} `('$description', ({ finderFunction, deletePayload }) => { let eventSpy; const category = 'UI::NpmPackages'; const { findPackageListDeleteModal } = uiElements; - beforeEach(() => { + beforeEach(async () => { eventSpy = jest.spyOn(Tracking, 'event'); - mountComponent({ canDestroy: true }); + mountComponent({ props: { canDestroy: true } }); + await waitForPromises(); finderFunction().vm.$emit('delete', deletePayload); }); it('passes itemToBeDeleted to the modal', () => { - expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual(packageList[0]); + expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual( + packageVersions()[0], + ); }); it('requesting delete tracks the right action', () => { @@ -198,7 +276,7 @@ describe('PackageVersionsList', () => { }); it('emits delete when modal confirms', () => { - expect(wrapper.emitted('delete')[0][0]).toEqual([packageList[0]]); + expect(wrapper.emitted('delete')[0][0]).toEqual([packageVersions()[0]]); }); it('tracks the right action', () => { @@ -231,14 +309,15 @@ describe('PackageVersionsList', () => { let eventSpy; const { findDeletePackagesModal, findRegistryList } = uiElements; - beforeEach(() => { + beforeEach(async () => { eventSpy = jest.spyOn(Tracking, 'event'); - mountComponent({ canDestroy: true }); + mountComponent({ props: { canDestroy: true } }); + await waitForPromises(); }); it('binds the right props', () => { expect(uiElements.findRegistryList().props()).toMatchObject({ - items: packageList, + items: packageVersions(), pagination: {}, isLoading: false, hiddenDelete: false, @@ -248,11 +327,13 @@ describe('PackageVersionsList', () => { describe('upon deletion', () => { beforeEach(() => { - findRegistryList().vm.$emit('delete', packageList); + findRegistryList().vm.$emit('delete', packageVersions()); }); it('passes itemsToBeDeleted to the modal', () => { - expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual(packageList); + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual( + packageVersions(), + ); expect(wrapper.emitted('delete')).toBeUndefined(); }); @@ -270,7 +351,7 @@ describe('PackageVersionsList', () => { }); it('emits delete event', () => { - expect(wrapper.emitted('delete')[0]).toEqual([packageList]); + expect(wrapper.emitted('delete')[0]).toEqual([packageVersions()]); }); it('tracks the right action', () => { diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js index 91417d2fc9f..52d222ed07b 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js @@ -132,7 +132,7 @@ describe('packages_list_row', () => { }); }); - it('emits the delete event when the delete button is clicked', async () => { + it('emits the delete event when the delete button is clicked', () => { mountComponent({ packageEntity: packageWithoutTags }); findDeleteDropdown().vm.$emit('click'); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js index ae990f3ea00..483b7a9383d 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js @@ -4,7 +4,6 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { stubComponent } from 'helpers/stub_component'; import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue'; import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; -import DeletePackageModal from '~/packages_and_registries/shared/components/delete_package_modal.vue'; import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue'; import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; import { @@ -17,7 +16,7 @@ import { } from '~/packages_and_registries/package_registry/constants'; import PackagesList from '~/packages_and_registries/package_registry/components/list/packages_list.vue'; import Tracking from '~/tracking'; -import { packageData } from '../../mock_data'; +import { defaultPackageGroupSettings, packageData } from '../../mock_data'; describe('packages_list', () => { let wrapper; @@ -39,18 +38,20 @@ describe('packages_list', () => { list: [firstPackage, secondPackage], isLoading: false, pageInfo: {}, + groupSettings: defaultPackageGroupSettings, }; const EmptySlotStub = { name: 'empty-slot-stub', template: '
    bar
    ' }; const findPackagesListLoader = () => wrapper.findComponent(PackagesListLoader); - const findPackageListDeleteModal = () => wrapper.findComponent(DeletePackageModal); const findEmptySlot = () => wrapper.findComponent(EmptySlotStub); const findRegistryList = () => wrapper.findComponent(RegistryList); const findPackagesListRow = () => wrapper.findComponent(PackagesListRow); const findErrorPackageAlert = () => wrapper.findComponent(GlAlert); const findDeletePackagesModal = () => wrapper.findComponent(DeleteModal); + const showMock = jest.fn(); + const mountComponent = (props) => { wrapper = shallowMountExtended(PackagesList, { propsData: { @@ -58,10 +59,9 @@ describe('packages_list', () => { ...props, }, stubs: { - DeletePackageModal, DeleteModal: stubComponent(DeleteModal, { methods: { - show: jest.fn(), + show: showMock, }, }), GlSprintf, @@ -119,15 +119,20 @@ describe('packages_list', () => { }); describe('layout', () => { - it("doesn't contain a visible modal component", () => { + beforeEach(() => { mountComponent(); + }); - expect(findPackageListDeleteModal().props('itemToBeDeleted')).toBeNull(); + it('modal component is not shown', () => { + expect(showMock).not.toHaveBeenCalled(); }); - it('does not have an error alert displayed', () => { - mountComponent(); + it('modal component props is empty', () => { + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toEqual([]); + expect(findDeletePackagesModal().props('showRequestForwardingContent')).toBe(false); + }); + it('does not have an error alert displayed', () => { expect(findErrorPackageAlert().exists()).toBe(false); }); }); @@ -146,8 +151,8 @@ describe('packages_list', () => { finderFunction().vm.$emit('delete', deletePayload); }); - it('passes itemToBeDeleted to the modal', () => { - expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual(firstPackage); + it('passes itemsToBeDeleted to the modal', () => { + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual([firstPackage]); }); it('requesting delete tracks the right action', () => { @@ -158,9 +163,13 @@ describe('packages_list', () => { ); }); + it('modal component is shown', () => { + expect(showMock).toHaveBeenCalledTimes(1); + }); + describe('when modal confirms', () => { beforeEach(() => { - findPackageListDeleteModal().vm.$emit('ok'); + findDeletePackagesModal().vm.$emit('confirm'); }); it('emits delete when modal confirms', () => { @@ -176,14 +185,14 @@ describe('packages_list', () => { }); }); - it.each(['ok', 'cancel'])('resets itemToBeDeleted when modal emits %s', async (event) => { - await findPackageListDeleteModal().vm.$emit(event); + it.each(['confirm', 'cancel'])('resets itemsToBeDeleted when modal emits %s', async (event) => { + await findDeletePackagesModal().vm.$emit(event); - expect(findPackageListDeleteModal().props('itemToBeDeleted')).toBeNull(); + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toEqual([]); }); it('canceling delete tracks the right action', () => { - findPackageListDeleteModal().vm.$emit('cancel'); + findDeletePackagesModal().vm.$emit('cancel'); expect(eventSpy).toHaveBeenCalledWith( category, @@ -237,7 +246,7 @@ describe('packages_list', () => { it.each(['confirm', 'cancel'])('resets itemsToBeDeleted when modal emits %s', async (event) => { await findDeletePackagesModal().vm.$emit(event); - expect(findDeletePackagesModal().props('itemsToBeDeleted')).toHaveLength(0); + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toEqual([]); }); it('canceling delete tracks the right action', () => { @@ -258,7 +267,7 @@ describe('packages_list', () => { return nextTick(); }); - it('should display an alert message', () => { + it('should display an alert', () => { expect(findErrorPackageAlert().exists()).toBe(true); expect(findErrorPackageAlert().props('title')).toBe( 'There was an error publishing a error package package', @@ -273,7 +282,9 @@ describe('packages_list', () => { await nextTick(); - expect(findPackageListDeleteModal().text()).toContain(errorPackage.name); + expect(showMock).toHaveBeenCalledTimes(1); + + expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual([errorPackage]); }); }); diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js index 1250ecaf61f..82fa5b76367 100644 --- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js @@ -54,7 +54,7 @@ describe('Package Search', () => { expect(findRegistrySearch().exists()).toBe(true); }); - it('registry search is mounted after mount', async () => { + it('registry search is mounted after mount', () => { mountComponent(); expect(findRegistrySearch().exists()).toBe(false); diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js index 19c098e1f82..9054e4998bb 100644 --- a/spec/frontend/packages_and_registries/package_registry/mock_data.js +++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js @@ -103,12 +103,20 @@ export const linksData = { }, }; +export const defaultPackageGroupSettings = { + mavenPackageRequestsForwarding: true, + npmPackageRequestsForwarding: true, + pypiPackageRequestsForwarding: true, + __typename: 'PackageSettings', +}; + export const packageVersions = () => [ { createdAt: '2021-08-10T09:33:54Z', id: 'gid://gitlab/Packages::Package/243', name: '@gitlab-org/package-15', status: 'DEFAULT', + packageType: 'NPM', canDestroy: true, tags: { nodes: packageTags() }, version: '1.0.1', @@ -120,6 +128,7 @@ export const packageVersions = () => [ id: 'gid://gitlab/Packages::Package/244', name: '@gitlab-org/package-15', status: 'DEFAULT', + packageType: 'NPM', canDestroy: true, tags: { nodes: packageTags() }, version: '1.0.2', @@ -130,7 +139,7 @@ export const packageVersions = () => [ export const packageData = (extend) => ({ __typename: 'Package', - id: 'gid://gitlab/Packages::Package/111', + id: 'gid://gitlab/Packages::Package/1', canDestroy: true, name: '@gitlab-org/package-15', packageType: 'NPM', @@ -244,14 +253,6 @@ export const packageDetailsQuery = (extendPackage) => ({ }, versions: { count: packageVersions().length, - nodes: packageVersions(), - pageInfo: { - hasNextPage: true, - hasPreviousPage: false, - endCursor: 'endCursor', - startCursor: 'startCursor', - }, - __typename: 'PackageConnection', }, dependencyLinks: { nodes: dependencyLinks(), @@ -298,6 +299,41 @@ export const packageMetadataQuery = (packageType) => { }; }; +export const packageVersionsQuery = (versions = packageVersions()) => ({ + data: { + package: { + id: 'gid://gitlab/Packages::Package/111', + versions: { + count: versions.length, + nodes: versions, + pageInfo: pagination(), + __typename: 'PackageConnection', + }, + __typename: 'PackageDetailsType', + }, + }, +}); + +export const emptyPackageVersionsQuery = { + data: { + package: { + id: 'gid://gitlab/Packages::Package/111', + versions: { + count: 0, + nodes: [], + pageInfo: { + hasNextPage: false, + hasPreviousPage: false, + endCursor: 'endCursor', + startCursor: 'startCursor', + }, + __typename: 'PackageConnection', + }, + __typename: 'PackageDetailsType', + }, + }, +}; + export const packagesDestroyMutation = () => ({ data: { destroyPackages: { @@ -352,7 +388,12 @@ export const packageDestroyFilesMutationError = () => ({ ], }); -export const packagesListQuery = ({ type = 'group', extend = {}, extendPagination = {} } = {}) => ({ +export const packagesListQuery = ({ + type = 'group', + extend = {}, + extendPagination = {}, + packageSettings = defaultPackageGroupSettings, +} = {}) => ({ data: { [type]: { id: '1', @@ -379,6 +420,14 @@ export const packagesListQuery = ({ type = 'group', extend = {}, extendPaginatio pageInfo: pagination(extendPagination), __typename: 'PackageConnection', }, + ...(type === 'group' && { packageSettings }), + ...(type === 'project' && { + group: { + id: '1', + packageSettings, + __typename: 'Group', + }, + }), ...extend, __typename: capitalize(type), }, diff --git a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js index 49f69a46395..e1765917035 100644 --- a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js @@ -1,4 +1,4 @@ -import { GlEmptyState, GlTabs, GlTab, GlSprintf } from '@gitlab/ui'; +import { GlEmptyState, GlModal, GlTabs, GlTab, GlSprintf } from '@gitlab/ui'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; @@ -7,7 +7,7 @@ import { useMockLocationHelper } from 'helpers/mock_window_location_helper'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { createAlert } from '~/alert'; - +import { stubComponent } from 'helpers/stub_component'; import AdditionalMetadata from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue'; import PackagesApp from '~/packages_and_registries/package_registry/pages/details.vue'; import DependencyRow from '~/packages_and_registries/package_registry/components/details/dependency_row.vue'; @@ -33,6 +33,7 @@ import { import destroyPackageFilesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_files.mutation.graphql'; import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql'; +import getPackageVersionsQuery from '~/packages_and_registries/package_registry/graphql//queries/get_package_versions.query.graphql'; import { packageDetailsQuery, packageData, @@ -42,12 +43,13 @@ import { packageFiles, packageDestroyFilesMutation, packageDestroyFilesMutationError, - pagination, } from '../mock_data'; jest.mock('~/alert'); useMockLocationHelper(); +Vue.use(VueApollo); + describe('PackagesApp', () => { let wrapper; let apolloProvider; @@ -57,7 +59,7 @@ describe('PackagesApp', () => { }; const provide = { - packageId: '111', + packageId: '1', emptyListIllustration: 'svgPath', projectListUrl: 'projectListUrl', groupListUrl: 'groupListUrl', @@ -66,14 +68,13 @@ describe('PackagesApp', () => { }; const { __typename, ...packageWithoutTypename } = packageData(); + const showMock = jest.fn(); function createComponent({ resolver = jest.fn().mockResolvedValue(packageDetailsQuery()), filesDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFilesMutation()), routeId = '1', } = {}) { - Vue.use(VueApollo); - const requestHandlers = [ [getPackageDetails, resolver], [destroyPackageFilesMutation, filesDeleteMutationResolver], @@ -86,17 +87,11 @@ describe('PackagesApp', () => { stubs: { PackageTitle, DeletePackages, - GlModal: { - template: ` -
    - -

    -
    - `, + GlModal: stubComponent(GlModal, { methods: { - show: jest.fn(), + show: showMock, }, - }, + }), GlSprintf, GlTabs, GlTab, @@ -251,7 +246,7 @@ describe('PackagesApp', () => { await findDeleteButton().trigger('click'); - expect(findDeleteModal().find('p').text()).toBe( + expect(findDeleteModal().text()).toBe( 'You are about to delete version 1.0.0 of @gitlab-org/package-15. Are you sure?', ); }); @@ -318,7 +313,7 @@ describe('PackagesApp', () => { describe('deleting a file', () => { const [fileToDelete] = packageFiles(); - const doDeleteFile = async () => { + const doDeleteFile = () => { findPackageFiles().vm.$emit('delete-files', [fileToDelete]); findDeleteFileModal().vm.$emit('primary'); @@ -331,13 +326,15 @@ describe('PackagesApp', () => { await waitForPromises(); - const showDeleteFileSpy = jest.spyOn(wrapper.vm.$refs.deleteFileModal, 'show'); - const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show'); - findPackageFiles().vm.$emit('delete-files', [fileToDelete]); - expect(showDeletePackageSpy).not.toHaveBeenCalled(); - expect(showDeleteFileSpy).toHaveBeenCalled(); + expect(showMock).toHaveBeenCalledTimes(1); + + await waitForPromises(); + + expect(findDeleteFileModal().text()).toBe( + 'You are about to delete foo-1.0.1.tgz. This is a destructive action that may render your package unusable. Are you sure?', + ); }); it('when its the only file opens delete package confirmation modal', async () => { @@ -360,17 +357,13 @@ describe('PackagesApp', () => { await waitForPromises(); - const showDeleteFileSpy = jest.spyOn(wrapper.vm.$refs.deleteFileModal, 'show'); - const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show'); - findPackageFiles().vm.$emit('delete-files', [fileToDelete]); - expect(showDeletePackageSpy).toHaveBeenCalled(); - expect(showDeleteFileSpy).not.toHaveBeenCalled(); + expect(showMock).toHaveBeenCalledTimes(1); await waitForPromises(); - expect(findDeleteModal().find('p').text()).toBe( + expect(findDeleteModal().text()).toBe( 'Deleting the last package asset will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?', ); }); @@ -440,7 +433,7 @@ describe('PackagesApp', () => { }); describe('deleting multiple files', () => { - const doDeleteFiles = async () => { + const doDeleteFiles = () => { findPackageFiles().vm.$emit('delete-files', packageFiles()); findDeleteFilesModal().vm.$emit('primary'); @@ -482,6 +475,8 @@ describe('PackagesApp', () => { await doDeleteFiles(); + expect(resolver).toHaveBeenCalledTimes(2); + expect(createAlert).toHaveBeenCalledWith( expect.objectContaining({ message: DELETE_PACKAGE_FILES_SUCCESS_MESSAGE, @@ -542,15 +537,13 @@ describe('PackagesApp', () => { await waitForPromises(); - const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show'); - findPackageFiles().vm.$emit('delete-files', packageFiles()); - expect(showDeletePackageSpy).toHaveBeenCalled(); + expect(showMock).toHaveBeenCalledTimes(1); await waitForPromises(); - expect(findDeleteModal().find('p').text()).toBe( + expect(findDeleteModal().text()).toBe( 'Deleting all package assets will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?', ); }); @@ -574,8 +567,6 @@ describe('PackagesApp', () => { packageDetailsQuery({ versions: { count: 0, - nodes: [], - pageInfo: pagination({ hasNextPage: false, hasPreviousPage: false }), }, }), ), @@ -591,61 +582,61 @@ describe('PackagesApp', () => { }); it('binds the correct props', async () => { - const versionNodes = packageVersions(); createComponent(); await waitForPromises(); expect(findVersionsList().props()).toMatchObject({ canDestroy: true, - versions: expect.arrayContaining(versionNodes), + count: packageVersions().length, + isMutationLoading: false, + packageId: 'gid://gitlab/Packages::Package/1', }); }); describe('delete packages', () => { - it('exists and has the correct props', async () => { + beforeEach(async () => { createComponent(); - await waitForPromises(); - - expect(findDeletePackages().props()).toMatchObject({ - refetchQueries: [{ query: getPackageDetails, variables: {} }], - showSuccessAlert: true, - }); }); - it('deletePackages is bound to package-versions-list delete event', async () => { - createComponent(); - - await waitForPromises(); + it('exists and has the correct props', () => { + expect(findDeletePackages().props('showSuccessAlert')).toBe(true); + expect(findDeletePackages().props('refetchQueries')).toEqual([ + { + query: getPackageVersionsQuery, + variables: { + first: 20, + id: 'gid://gitlab/Packages::Package/1', + }, + }, + ]); + }); + it('deletePackages is bound to package-versions-list delete event', () => { findVersionsList().vm.$emit('delete', [{ id: 1 }]); expect(findDeletePackages().emitted('start')).toEqual([[]]); }); it('start and end event set loading correctly', async () => { - createComponent(); - - await waitForPromises(); - findDeletePackages().vm.$emit('start'); await nextTick(); - expect(findVersionsList().props('isLoading')).toBe(true); + expect(findVersionsList().props('isMutationLoading')).toBe(true); findDeletePackages().vm.$emit('end'); await nextTick(); - expect(findVersionsList().props('isLoading')).toBe(false); + expect(findVersionsList().props('isMutationLoading')).toBe(false); }); }); }); describe('dependency links', () => { - it('does not show the dependency links for a non nuget package', async () => { + it('does not show the dependency links for a non nuget package', () => { createComponent(); expect(findDependenciesCountBadge().exists()).toBe(false); diff --git a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js index 60bb055b1db..2ee24200ed3 100644 --- a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js +++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js @@ -1,9 +1,11 @@ -import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui'; +import { GlButton, GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; +import { s__ } from '~/locale'; import { WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants'; import ListPage from '~/packages_and_registries/package_registry/pages/list.vue'; import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue'; @@ -30,6 +32,7 @@ describe('PackagesListApp', () => { emptyListIllustration: 'emptyListIllustration', isGroupPage: true, fullPath: 'gitlab-org', + settingsPath: 'settings-path', }; const PackageList = { @@ -49,6 +52,7 @@ describe('PackagesListApp', () => { const findListComponent = () => wrapper.findComponent(PackageList); const findEmptyState = () => wrapper.findComponent(GlEmptyState); const findDeletePackages = () => wrapper.findComponent(DeletePackages); + const findSettingsLink = () => wrapper.findComponent(GlButton); const mountComponent = ({ resolver = jest.fn().mockResolvedValue(packagesListQuery()), @@ -71,13 +75,17 @@ describe('PackagesListApp', () => { GlLoadingIcon, GlSprintf, GlLink, + PackageTitle, PackageList, DeletePackages, }, + directives: { + GlTooltip: createMockDirective('gl-tooltip'), + }, }); }; - const waitForFirstRequest = async () => { + const waitForFirstRequest = () => { // emit a search update so the query is executed findSearch().vm.$emit('update', { sort: 'NAME_DESC', filters: [] }); return waitForPromises(); @@ -103,6 +111,52 @@ describe('PackagesListApp', () => { }); }); + describe('link to settings', () => { + describe('when settings path is not provided', () => { + beforeEach(() => { + mountComponent({ + provide: { + ...defaultProvide, + settingsPath: '', + }, + }); + }); + + it('is not rendered', () => { + expect(findSettingsLink().exists()).toBe(false); + }); + }); + + describe('when settings path is provided', () => { + const label = s__('PackageRegistry|Configure in settings'); + + beforeEach(() => { + mountComponent(); + }); + + it('is rendered', () => { + expect(findSettingsLink().exists()).toBe(true); + }); + + it('has the right icon', () => { + expect(findSettingsLink().props('icon')).toBe('settings'); + }); + + it('has the right attributes', () => { + expect(findSettingsLink().attributes()).toMatchObject({ + 'aria-label': label, + href: defaultProvide.settingsPath, + }); + }); + + it('sets tooltip with right label', () => { + const tooltip = getBinding(findSettingsLink().element, 'gl-tooltip'); + + expect(tooltip.value).toBe(label); + }); + }); + }); + describe('search component', () => { it('exists', () => { mountComponent(); @@ -141,6 +195,11 @@ describe('PackagesListApp', () => { list: expect.arrayContaining([expect.objectContaining({ id: packageData().id })]), isLoading: false, pageInfo: expect.objectContaining({ endCursor: pagination().endCursor }), + groupSettings: expect.objectContaining({ + mavenPackageRequestsForwarding: true, + npmPackageRequestsForwarding: true, + pypiPackageRequestsForwarding: true, + }), }); }); @@ -191,6 +250,16 @@ describe('PackagesListApp', () => { expect.objectContaining({ isGroupPage, [sortType]: 'NAME_DESC' }), ); }); + + it('list component has group settings prop set', () => { + expect(findListComponent().props()).toMatchObject({ + groupSettings: expect.objectContaining({ + mavenPackageRequestsForwarding: true, + npmPackageRequestsForwarding: true, + pypiPackageRequestsForwarding: true, + }), + }); + }); }); describe.each` diff --git a/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js index 22e42f8c0ab..49e76cfbae0 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/components/package_settings_spec.js @@ -177,7 +177,7 @@ describe('Packages Settings', () => { }); }); - it('renders ExceptionsInput and assigns duplication allowness and exception props', async () => { + it('renders ExceptionsInput and assigns duplication allowness and exception props', () => { mountComponent({ mountFn: mountExtended }); const { genericDuplicatesAllowed, genericDuplicateExceptionRegex } = packageSettings; @@ -192,7 +192,7 @@ describe('Packages Settings', () => { }); }); - it('on update event calls the mutation', async () => { + it('on update event calls the mutation', () => { const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock()); mountComponent({ mountFn: mountExtended, mutationResolver }); diff --git a/spec/frontend/packages_and_registries/settings/group/components/packages_forwarding_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/packages_forwarding_settings_spec.js index d57077b31c8..8a66a685733 100644 --- a/spec/frontend/packages_and_registries/settings/group/components/packages_forwarding_settings_spec.js +++ b/spec/frontend/packages_and_registries/settings/group/components/packages_forwarding_settings_spec.js @@ -1,12 +1,13 @@ import Vue from 'vue'; -import { GlButton } from '@gitlab/ui'; +import { GlButton, GlLink, GlSprintf } from '@gitlab/ui'; import VueApollo from 'vue-apollo'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; +import { s__ } from '~/locale'; import component from '~/packages_and_registries/settings/group/components/packages_forwarding_settings.vue'; import { - PACKAGE_FORWARDING_SETTINGS_DESCRIPTION, + REQUEST_FORWARDING_HELP_PAGE_PATH, PACKAGE_FORWARDING_SETTINGS_HEADER, } from '~/packages_and_registries/settings/group/constants'; @@ -60,6 +61,7 @@ describe('Packages Forwarding Settings', () => { forwardSettings, }, stubs: { + GlSprintf, SettingsBlock, }, }); @@ -72,6 +74,7 @@ describe('Packages Forwarding Settings', () => { const findMavenForwardingSettings = () => wrapper.findByTestId('maven'); const findNpmForwardingSettings = () => wrapper.findByTestId('npm'); const findPyPiForwardingSettings = () => wrapper.findByTestId('pypi'); + const findRequestForwardingDocsLink = () => wrapper.findComponent(GlLink); const fillApolloCache = () => { apolloProvider.defaultClient.cache.writeQuery({ @@ -111,8 +114,18 @@ describe('Packages Forwarding Settings', () => { it('has the correct description text', () => { mountComponent(); - expect(findDescription().text()).toMatchInterpolatedText( - PACKAGE_FORWARDING_SETTINGS_DESCRIPTION, + expect(findDescription().text()).toBe( + s__( + 'PackageRegistry|Forward package requests to a public registry if the packages are not found in the GitLab package registry.', + ), + ); + }); + + it('has the right help link', () => { + mountComponent(); + + expect(findRequestForwardingDocsLink().attributes('href')).toBe( + REQUEST_FORWARDING_HELP_PAGE_PATH, ); }); diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js index 49e8601da88..cbe68df5343 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js @@ -126,7 +126,7 @@ describe('Cleanup image tags project settings', () => { }); describe('an admin is visiting the page', () => { - it('shows the admin part of the alert message', async () => { + it('shows the admin part of the alert', async () => { mountComponentWithApollo({ provide: { ...defaultProvidedValues, isAdmin: true }, resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()), diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js index 57b48407174..a68087f7f57 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js @@ -46,7 +46,7 @@ describe('Container Expiration Policy Settings Form', () => { const findOlderThanDropdown = () => wrapper.find('[data-testid="older-than-dropdown"]'); const findRemoveRegexInput = () => wrapper.find('[data-testid="remove-regex-input"]'); - const submitForm = async () => { + const submitForm = () => { findForm().trigger('submit'); return waitForPromises(); }; diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js index 19f25d0aef7..c9dd9ce7a45 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js @@ -109,7 +109,7 @@ describe('Container expiration policy project settings', () => { }); describe('an admin is visiting the page', () => { - it('shows the admin part of the alert message', async () => { + it('shows the admin part of the alert', async () => { mountComponentWithApollo({ provide: { ...defaultProvidedValues, isAdmin: true }, resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()), diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js index b9c0c38bf9e..50b72d3ad72 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_cleanup_policy_form_spec.js @@ -48,7 +48,7 @@ describe('Packages Cleanup Policy Settings Form', () => { wrapper.findByTestId('keep-n-duplicated-package-files-dropdown'); const findNextRunAt = () => wrapper.findByTestId('next-run-at'); - const submitForm = async () => { + const submitForm = () => { findForm().trigger('submit'); return waitForPromises(); }; diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js index 54655acdf2a..12425909454 100644 --- a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js +++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js @@ -79,7 +79,7 @@ describe('Registry Settings app', () => { ${false} | ${true} ${false} | ${false} `( - 'container expiration policy $showContainerRegistrySettings and package cleanup policy is $showPackageRegistrySettings', + 'container cleanup policy $showContainerRegistrySettings and package cleanup policy is $showPackageRegistrySettings', ({ showContainerRegistrySettings, showPackageRegistrySettings }) => { mountComponent({ showContainerRegistrySettings, diff --git a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap index e6e89806ce0..e9ee6ebdb5c 100644 --- a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap +++ b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap @@ -5,7 +5,6 @@ exports[`Registry Breadcrumb when is not rootRoute renders 1`] = ` aria-label="Breadcrumb" class="gl-breadcrumbs" > -
    diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap index 66cf2354bc7..5c487754b87 100644 --- a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap +++ b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap @@ -8,7 +8,7 @@ exports[`History Item renders the correct markup 1`] = ` class="timeline-entry-inner" >
    { }; function createComponent(props = {}) { - wrapper = mount(CodeInstruction, { + wrapper = shallowMount(CodeInstruction, { propsData: { ...defaultProps, ...props, diff --git a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap index 623f7d083c5..65427374e1b 100644 --- a/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap +++ b/spec/frontend/vue_shared/components/resizable_chart/__snapshots__/skeleton_loader_spec.js.snap @@ -15,8 +15,8 @@ exports[`Resizable Skeleton Loader default setup renders the bars, labels, and g { await waitForPromises(); }); - it('should not show alert', async () => { + it('should not show alert', () => { expect(findAlert().exists()).toBe(false); }); @@ -85,13 +85,13 @@ describe('RunnerCliInstructions component', () => { }); }); - it('binary instructions are shown', async () => { + it('binary instructions are shown', () => { const instructions = findBinaryInstructions().text(); expect(instructions).toBe(installInstructions.trim()); }); - it('register command is shown with a replaced token', async () => { + it('register command is shown with a replaced token', () => { const command = findRegisterCommand().text(); expect(command).toBe( diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js index cb35cbd35ad..cd4ebe334c0 100644 --- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js +++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js @@ -80,7 +80,7 @@ describe('RunnerInstructionsModal component', () => { await waitForPromises(); }); - it('should not show alert', async () => { + it('should not show alert', () => { expect(findAlert().exists()).toBe(false); }); @@ -202,7 +202,7 @@ describe('RunnerInstructionsModal component', () => { expect(findAlert().exists()).toBe(true); }); - it('should show alert when instructions cannot be loaded', async () => { + it('should show an alert when instructions cannot be loaded', async () => { createComponent(); await waitForPromises(); diff --git a/spec/frontend/vue_shared/components/slot_switch_spec.js b/spec/frontend/vue_shared/components/slot_switch_spec.js index f25b9877aba..daca4977817 100644 --- a/spec/frontend/vue_shared/components/slot_switch_spec.js +++ b/spec/frontend/vue_shared/components/slot_switch_spec.js @@ -1,4 +1,5 @@ import { shallowMount } from '@vue/test-utils'; +import { assertProps } from 'helpers/assert_props'; import SlotSwitch from '~/vue_shared/components/slot_switch.vue'; @@ -26,7 +27,9 @@ describe('SlotSwitch', () => { }); it('throws an error if activeSlotNames is missing', () => { - expect(createComponent).toThrow('[Vue warn]: Missing required prop: "activeSlotNames"'); + expect(() => assertProps(SlotSwitch, {})).toThrow( + '[Vue warn]: Missing required prop: "activeSlotNames"', + ); }); it('renders no slots if activeSlotNames is empty', () => { diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js index 6c8fc244fa0..9a38a96663d 100644 --- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js +++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js @@ -10,16 +10,10 @@ const DEFAULT_PROPS = { describe('Chunk Line component', () => { let wrapper; - const fileLineBlame = true; const createComponent = (props = {}) => { wrapper = shallowMountExtended(ChunkLine, { propsData: { ...DEFAULT_PROPS, ...props }, - provide: { - glFeatures: { - fileLineBlame, - }, - }, }); }; diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js index 59880496d74..ff50326917f 100644 --- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js +++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js @@ -11,7 +11,6 @@ describe('Chunk component', () => { const createComponent = (props = {}) => { wrapper = shallowMountExtended(Chunk, { propsData: { ...CHUNK_1, ...props }, - provide: { glFeatures: { fileLineBlame: true } }, }); }; diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js index c911e3d308b..4cec129b6e4 100644 --- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js +++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_deprecated_spec.js @@ -168,7 +168,7 @@ describe('Source Viewer component', () => { }); describe('LineHighlighter', () => { - it('instantiates the lineHighlighter class', async () => { + it('instantiates the lineHighlighter class', () => { expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' }); }); }); diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js index 6b869db4058..ffa25ae8448 100644 --- a/spec/frontend/vue_shared/components/split_button_spec.js +++ b/spec/frontend/vue_shared/components/split_button_spec.js @@ -2,6 +2,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import { nextTick } from 'vue'; +import { assertProps } from 'helpers/assert_props'; import SplitButton from '~/vue_shared/components/split_button.vue'; const mockActionItems = [ @@ -42,12 +43,12 @@ describe('SplitButton', () => { it('fails for empty actionItems', () => { const actionItems = []; - expect(() => createComponent({ actionItems })).toThrow(); + expect(() => assertProps(SplitButton, { actionItems })).toThrow(); }); it('fails for single actionItems', () => { const actionItems = [mockActionItems[0]]; - expect(() => createComponent({ actionItems })).toThrow(); + expect(() => assertProps(SplitButton, { actionItems })).toThrow(); }); it('renders actionItems', () => { diff --git a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js index 3807bb4cc63..f5da498a205 100644 --- a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js +++ b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js @@ -79,12 +79,12 @@ describe('TooltipOnTruncate component', () => { }; describe('when truncated', () => { - beforeEach(async () => { + beforeEach(() => { hasHorizontalOverflow.mockReturnValueOnce(true); createComponent(); }); - it('renders tooltip', async () => { + it('renders tooltip', () => { expect(hasHorizontalOverflow).toHaveBeenLastCalledWith(wrapper.element); expect(getTooltipValue()).toStrictEqual({ title: MOCK_TITLE, @@ -96,7 +96,7 @@ describe('TooltipOnTruncate component', () => { }); describe('with default target', () => { - beforeEach(async () => { + beforeEach(() => { hasHorizontalOverflow.mockReturnValueOnce(false); createComponent(); }); diff --git a/spec/frontend/vue_shared/components/user_callout_dismisser_spec.js b/spec/frontend/vue_shared/components/user_callout_dismisser_spec.js index b04e578c931..a4efbda06ce 100644 --- a/spec/frontend/vue_shared/components/user_callout_dismisser_spec.js +++ b/spec/frontend/vue_shared/components/user_callout_dismisser_spec.js @@ -31,18 +31,18 @@ describe('UserCalloutDismisser', () => { const MOCK_FEATURE_NAME = 'mock_feature_name'; // Query handlers - const successHandlerFactory = (dismissedCallouts = []) => async () => - userCalloutsResponse(dismissedCallouts); - const anonUserHandler = async () => anonUserCalloutsResponse(); + const successHandlerFactory = (dismissedCallouts = []) => () => + Promise.resolve(userCalloutsResponse(dismissedCallouts)); + const anonUserHandler = () => Promise.resolve(anonUserCalloutsResponse()); const errorHandler = () => Promise.reject(new Error('query error')); const pendingHandler = () => new Promise(() => {}); // Mutation handlers - const mutationSuccessHandlerSpy = jest.fn(async (variables) => - userCalloutMutationResponse(variables), + const mutationSuccessHandlerSpy = jest.fn((variables) => + Promise.resolve(userCalloutMutationResponse(variables)), ); - const mutationErrorHandlerSpy = jest.fn(async (variables) => - userCalloutMutationResponse(variables, ['mutation error']), + const mutationErrorHandlerSpy = jest.fn((variables) => + Promise.resolve(userCalloutMutationResponse(variables, ['mutation error'])), ); const defaultScopedSlotSpy = jest.fn(); diff --git a/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js b/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js index 6491e5a66cd..d77e357a50c 100644 --- a/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js +++ b/spec/frontend/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list_spec.js @@ -61,7 +61,7 @@ describe('User deletion obstacles list', () => { ${true} | ${'You are currently a part of:'} | ${'Removing yourself may put your on-call team at risk of missing a notification.'} ${false} | ${`User ${userName} is currently part of:`} | ${'Removing this user may put their on-call team at risk of missing a notification.'} `('when current user', ({ isCurrentUser, titleText, footerText }) => { - it(`${isCurrentUser ? 'is' : 'is not'} a part of on-call management`, async () => { + it(`${isCurrentUser ? 'is' : 'is not'} a part of on-call management`, () => { createComponent({ isCurrentUser, }); diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js index 8ecab5cc043..41181ab9a68 100644 --- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js +++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js @@ -1,5 +1,6 @@ import { GlSkeletonLoader, GlIcon } from '@gitlab/ui'; -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import mrDiffCommentFixture from 'test_fixtures/merge_requests/diff_comment.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { sprintf } from '~/locale'; import { mountExtended } from 'helpers/vue_test_utils_helper'; import { AVAILABILITY_STATUS } from '~/set_status_modal/constants'; @@ -41,12 +42,10 @@ const DEFAULT_PROPS = { }; describe('User Popover Component', () => { - const fixtureTemplate = 'merge_requests/diff_comment.html'; - let wrapper; beforeEach(() => { - loadHTMLFixture(fixtureTemplate); + setHTMLFixture(mrDiffCommentFixture); gon.features = {}; }); @@ -276,7 +275,7 @@ describe('User Popover Component', () => { createWrapper({ user }); - expect(wrapper.findByText('(Busy)').exists()).toBe(true); + expect(wrapper.findByText('Busy').exists()).toBe(true); }); it('should hide the busy status for any other status', () => { @@ -287,7 +286,7 @@ describe('User Popover Component', () => { createWrapper({ user }); - expect(wrapper.findByText('(Busy)').exists()).toBe(false); + expect(wrapper.findByText('Busy').exists()).toBe(false); }); it('shows pronouns when user has them set', () => { diff --git a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js index acbb931b7b6..e24c5a4609d 100644 --- a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js +++ b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js @@ -3,10 +3,10 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue'; -const TestComponent = Vue.extend({ +const TestComponent = { inject: ['vuexModule'], template: `
    {{ vuexModule }}
    `, -}); +}; const TEST_VUEX_MODULE = 'testVuexModule'; @@ -32,6 +32,13 @@ describe('~/vue_shared/components/vuex_module_provider', () => { expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE); }); + it('provides "vuexModel" set from "vuex-module" prop when using @vue/compat', () => { + createComponent({ + propsData: { 'vuex-module': TEST_VUEX_MODULE }, + }); + expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE); + }); + it('does not blow up when used with vue-apollo', () => { // See https://github.com/vuejs/vue-apollo/pull/1153 for details Vue.use(VueApollo); diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js index f6eb11aaddf..4b2ce24a49f 100644 --- a/spec/frontend/vue_shared/components/web_ide_link_spec.js +++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js @@ -345,7 +345,7 @@ describe('Web IDE link component', () => { it.each(testActions)( 'emits the correct event when an action handler is called', - async ({ props, expectedEventPayload }) => { + ({ props, expectedEventPayload }) => { createComponent({ ...props, needsToFork: true, disableForkModal: true }); findActionsButton().props('actions')[0].handle(); @@ -354,7 +354,7 @@ describe('Web IDE link component', () => { }, ); - it.each(testActions)('renders the fork confirmation modal', async ({ props }) => { + it.each(testActions)('renders the fork confirmation modal', ({ props }) => { createComponent({ ...props, needsToFork: true }); expect(findForkConfirmModal().exists()).toBe(true); diff --git a/spec/frontend/vue_shared/directives/track_event_spec.js b/spec/frontend/vue_shared/directives/track_event_spec.js index 4bf84b06246..fc69e884258 100644 --- a/spec/frontend/vue_shared/directives/track_event_spec.js +++ b/spec/frontend/vue_shared/directives/track_event_spec.js @@ -1,50 +1,47 @@ import { shallowMount } from '@vue/test-utils'; -import Vue, { nextTick } from 'vue'; +import Vue from 'vue'; import Tracking from '~/tracking'; import TrackEvent from '~/vue_shared/directives/track_event'; jest.mock('~/tracking'); -const Component = Vue.component('DummyElement', { - directives: { - TrackEvent, - }, - data() { - return { - trackingOptions: null, - }; - }, - template: '', -}); +describe('TrackEvent directive', () => { + let wrapper; -let wrapper; -let button; + const clickButton = () => wrapper.find('button').trigger('click'); -describe('Error Tracking directive', () => { - beforeEach(() => { - wrapper = shallowMount(Component); - button = wrapper.find('#trackable'); - }); + const createComponent = (trackingOptions) => + Vue.component('DummyElement', { + directives: { + TrackEvent, + }, + data() { + return { + trackingOptions, + }; + }, + template: '', + }); + + const mountComponent = (trackingOptions) => shallowMount(createComponent(trackingOptions)); + + it('does not track the event if required arguments are not provided', () => { + wrapper = mountComponent(); + clickButton(); - it('should not track the event if required arguments are not provided', () => { - button.trigger('click'); expect(Tracking.event).not.toHaveBeenCalled(); }); - it('should track event on click if tracking info provided', async () => { - const trackingOptions = { + it('tracks event on click if tracking info provided', () => { + wrapper = mountComponent({ category: 'Tracking', action: 'click_trackable_btn', label: 'Trackable Info', - }; - - // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details - // eslint-disable-next-line no-restricted-syntax - wrapper.setData({ trackingOptions }); - const { category, action, label, property, value } = trackingOptions; + }); + clickButton(); - await nextTick(); - button.trigger('click'); - expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value }); + expect(Tracking.event).toHaveBeenCalledWith('Tracking', 'click_trackable_btn', { + label: 'Trackable Info', + }); }); }); diff --git a/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js index 61e6d2a420a..d5603d4ba4b 100644 --- a/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js +++ b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js @@ -139,7 +139,7 @@ describe('IssuableBlockedIcon', () => { expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true); }); - it('should not query for blocking issuables by default', async () => { + it('should not query for blocking issuables by default', () => { createWrapperWithApollo(); expect(findGlPopover().text()).not.toContain(mockBlockingIssue1.title); @@ -195,18 +195,18 @@ describe('IssuableBlockedIcon', () => { await mouseenter(); }); - it('should render a title of the issuable', async () => { + it('should render a title of the issuable', () => { expect(findIssuableTitle().text()).toBe(mockBlockingIssue1.title); }); - it('should render issuable reference and link to the issuable', async () => { + it('should render issuable reference and link to the issuable', () => { const formattedRef = mockBlockingIssue1.reference.split('/')[1]; expect(findGlLink().text()).toBe(formattedRef); expect(findGlLink().attributes('href')).toBe(mockBlockingIssue1.webUrl); }); - it('should render popover title with correct blocking issuable count', async () => { + it('should render popover title with correct blocking issuable count', () => { expect(findPopoverTitle().text()).toBe('Blocked by 1 issue'); }); }); @@ -241,7 +241,7 @@ describe('IssuableBlockedIcon', () => { expect(wrapper.html()).toMatchSnapshot(); }); - it('should render popover title with correct blocking issuable count', async () => { + it('should render popover title with correct blocking issuable count', () => { expect(findPopoverTitle().text()).toBe('Blocked by 4 issues'); }); @@ -249,7 +249,7 @@ describe('IssuableBlockedIcon', () => { expect(findHiddenBlockingCount().text()).toBe('+ 1 more issue'); }); - it('should link to the blocked issue page at the related issue anchor', async () => { + it('should link to the blocked issue page at the related issue anchor', () => { expect(findViewAllIssuableLink().text()).toBe('View all blocking issues'); expect(findViewAllIssuableLink().attributes('href')).toBe( `${mockBlockedIssue2.webUrl}#related-issues`, diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js index 45daf0dc34b..502fa609ebc 100644 --- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js @@ -331,7 +331,7 @@ describe('IssuableItem', () => { }); }); - it('renders spam icon when issuable is hidden', async () => { + it('renders spam icon when issuable is hidden', () => { wrapper = createComponent({ issuable: { ...mockIssuable, hidden: true } }); const hiddenIcon = wrapper.findComponent(GlIcon); diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js index 9a4636e0f4d..ec975dfdcb5 100644 --- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js +++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js @@ -333,7 +333,7 @@ describe('IssuableListRoot', () => { describe('alert', () => { const error = 'oopsie!'; - it('shows alert when there is an error', () => { + it('shows an alert when there is an error', () => { wrapper = createComponent({ props: { error } }); expect(findAlert().text()).toBe(error); @@ -504,7 +504,7 @@ describe('IssuableListRoot', () => { }); }); - it('has the page size change component', async () => { + it('has the page size change component', () => { expect(findPageSizeSelector().exists()).toBe(true); }); diff --git a/spec/frontend/vue_shared/issuable/list/mock_data.js b/spec/frontend/vue_shared/issuable/list/mock_data.js index b67bd0f42fe..964b48f4275 100644 --- a/spec/frontend/vue_shared/issuable/list/mock_data.js +++ b/spec/frontend/vue_shared/issuable/list/mock_data.js @@ -60,6 +60,12 @@ export const mockIssuable = { type: 'issue', }; +export const mockIssuableItems = (n) => + [...Array(n).keys()].map((i) => ({ + id: i, + ...mockIssuable, + })); + export const mockIssuables = [ mockIssuable, { diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js index 7e665b7c76e..02e729a00bd 100644 --- a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js @@ -1,5 +1,5 @@ +import { GlLink } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; import { useFakeDate } from 'helpers/fake_date'; import IssuableBody from '~/vue_shared/issuable/show/components/issuable_body.vue'; @@ -14,96 +14,76 @@ import { mockIssuableShowProps, mockIssuable } from '../mock_data'; jest.mock('~/autosave'); jest.mock('~/alert'); +jest.mock('~/task_list'); const issuableBodyProps = { ...mockIssuableShowProps, issuable: mockIssuable, }; -const createComponent = (propsData = issuableBodyProps) => - shallowMount(IssuableBody, { - propsData, - stubs: { - IssuableTitle, - IssuableDescription, - IssuableEditForm, - TimeAgoTooltip, - }, - slots: { - 'status-badge': 'Open', - 'edit-form-actions': ` - - - `, - }, - }); - describe('IssuableBody', () => { // Some assertions expect a date later than our default useFakeDate(2020, 11, 11); let wrapper; + const createComponent = (propsData = {}) => { + wrapper = shallowMount(IssuableBody, { + propsData: { + ...issuableBodyProps, + ...propsData, + }, + stubs: { + IssuableTitle, + IssuableDescription, + IssuableEditForm, + TimeAgoTooltip, + }, + slots: { + 'status-badge': 'Open', + 'edit-form-actions': ` + + + `, + }, + }); + }; + + const findUpdatedLink = () => wrapper.findComponent(GlLink); + const findIssuableEditForm = () => wrapper.findComponent(IssuableEditForm); + const findIssuableEditFormButton = (type) => findIssuableEditForm().find(`button.js-${type}`); + const findIssuableTitle = () => wrapper.findComponent(IssuableTitle); + beforeEach(() => { - wrapper = createComponent(); + createComponent(); + TaskList.mockClear(); }); describe('computed', () => { - describe('isUpdated', () => { - it.each` - updatedAt | returnValue - ${mockIssuable.updatedAt} | ${true} - ${null} | ${false} - ${''} | ${false} - `( - 'returns $returnValue when value of `updateAt` prop is `$updatedAt`', - async ({ updatedAt, returnValue }) => { - wrapper.setProps({ - issuable: { - ...mockIssuable, - updatedAt, - }, - }); - - await nextTick(); - - expect(wrapper.vm.isUpdated).toBe(returnValue); - }, - ); - }); - describe('updatedBy', () => { it('returns value of `issuable.updatedBy`', () => { - expect(wrapper.vm.updatedBy).toBe(mockIssuable.updatedBy); + expect(findUpdatedLink().text()).toBe(mockIssuable.updatedBy.name); + expect(findUpdatedLink().attributes('href')).toBe(mockIssuable.updatedBy.webUrl); }); }); }); describe('watchers', () => { describe('editFormVisible', () => { - it('calls initTaskList in nextTick', async () => { - jest.spyOn(wrapper.vm, 'initTaskList'); - wrapper.setProps({ - editFormVisible: true, - }); - - await nextTick(); - - wrapper.setProps({ + it('calls initTaskList in nextTick', () => { + createComponent({ editFormVisible: false, }); - await nextTick(); - - expect(wrapper.vm.initTaskList).toHaveBeenCalled(); + expect(TaskList).toHaveBeenCalled(); }); }); }); describe('mounted', () => { it('initializes TaskList instance when enabledEdit and enableTaskList props are true', () => { - expect(wrapper.vm.taskList instanceof TaskList).toBe(true); - expect(wrapper.vm.taskList).toMatchObject({ + createComponent(); + expect(TaskList).toHaveBeenCalledWith({ dataType: 'issue', fieldName: 'description', lockVersion: issuableBodyProps.taskListLockVersion, @@ -114,14 +94,12 @@ describe('IssuableBody', () => { }); it('does not initialize TaskList instance when either enabledEdit or enableTaskList prop is false', () => { - const wrapperNoTaskList = createComponent({ + createComponent({ ...issuableBodyProps, enableTaskList: false, }); - expect(wrapperNoTaskList.vm.taskList).not.toBeDefined(); - - wrapperNoTaskList.destroy(); + expect(TaskList).toHaveBeenCalledTimes(0); }); }); @@ -150,10 +128,8 @@ describe('IssuableBody', () => { describe('template', () => { it('renders issuable-title component', () => { - const titleEl = wrapper.findComponent(IssuableTitle); - - expect(titleEl.exists()).toBe(true); - expect(titleEl.props()).toMatchObject({ + expect(findIssuableTitle().exists()).toBe(true); + expect(findIssuableTitle().props()).toMatchObject({ issuable: issuableBodyProps.issuable, statusIcon: issuableBodyProps.statusIcon, enableEdit: issuableBodyProps.enableEdit, @@ -168,42 +144,37 @@ describe('IssuableBody', () => { }); it('renders issuable edit info', () => { - const editedEl = wrapper.find('small'); - - expect(editedEl.text()).toMatchInterpolatedText('Edited 3 months ago by Administrator'); + expect(wrapper.find('small').text()).toMatchInterpolatedText( + 'Edited 3 months ago by Administrator', + ); }); - it('renders issuable-edit-form when `editFormVisible` prop is true', async () => { - wrapper.setProps({ + it('renders issuable-edit-form when `editFormVisible` prop is true', () => { + createComponent({ editFormVisible: true, }); - await nextTick(); - - const editFormEl = wrapper.findComponent(IssuableEditForm); - expect(editFormEl.exists()).toBe(true); - expect(editFormEl.props()).toMatchObject({ + expect(findIssuableEditForm().exists()).toBe(true); + expect(findIssuableEditForm().props()).toMatchObject({ issuable: issuableBodyProps.issuable, enableAutocomplete: issuableBodyProps.enableAutocomplete, descriptionPreviewPath: issuableBodyProps.descriptionPreviewPath, descriptionHelpPath: issuableBodyProps.descriptionHelpPath, }); - expect(editFormEl.find('button.js-save').exists()).toBe(true); - expect(editFormEl.find('button.js-cancel').exists()).toBe(true); + expect(findIssuableEditFormButton('save').exists()).toBe(true); + expect(findIssuableEditFormButton('cancel').exists()).toBe(true); }); describe('events', () => { it('component emits `edit-issuable` event bubbled via issuable-title', () => { - const issuableTitle = wrapper.findComponent(IssuableTitle); - - issuableTitle.vm.$emit('edit-issuable'); + findIssuableTitle().vm.$emit('edit-issuable'); expect(wrapper.emitted('edit-issuable')).toHaveLength(1); }); it.each(['keydown-title', 'keydown-description'])( 'component emits `%s` event with event object and issuableMeta params via issuable-edit-form', - async (eventName) => { + (eventName) => { const eventObj = { preventDefault: jest.fn(), stopPropagation: jest.fn(), @@ -213,15 +184,11 @@ describe('IssuableBody', () => { issuableDescription: 'foobar', }; - wrapper.setProps({ + createComponent({ editFormVisible: true, }); - await nextTick(); - - const issuableEditForm = wrapper.findComponent(IssuableEditForm); - - issuableEditForm.vm.$emit(eventName, eventObj, issuableMeta); + findIssuableEditForm().vm.$emit(eventName, eventObj, issuableMeta); expect(wrapper.emitted(eventName)).toHaveLength(1); expect(wrapper.emitted(eventName)[0]).toMatchObject([eventObj, issuableMeta]); diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js index 0d6cd1ad00b..4a52c2a8dad 100644 --- a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js @@ -165,7 +165,7 @@ describe('IssuableEditForm', () => { stopPropagation: jest.fn(), }; - it('component emits `keydown-title` event with event object and issuableMeta params via gl-form-input', async () => { + it('component emits `keydown-title` event with event object and issuableMeta params via gl-form-input', () => { const titleInputEl = wrapper.findComponent(GlFormInput); titleInputEl.vm.$emit('keydown', eventObj, 'title'); @@ -179,7 +179,7 @@ describe('IssuableEditForm', () => { ]); }); - it('component emits `keydown-description` event with event object and issuableMeta params via textarea', async () => { + it('component emits `keydown-description` event with event object and issuableMeta params via textarea', () => { const descriptionInputEl = wrapper.find('[data-testid="description"] textarea'); descriptionInputEl.trigger('keydown', eventObj, 'description'); diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js index d9f1b6c15a8..fa38ab8d44d 100644 --- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js +++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js @@ -1,4 +1,4 @@ -import { GlBadge, GlIcon, GlAvatarLabeled } from '@gitlab/ui'; +import { GlButton, GlBadge, GlIcon, GlAvatarLabeled, GlAvatarLink } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue'; @@ -13,7 +13,10 @@ const issuableHeaderProps = { describe('IssuableHeader', () => { let wrapper; + const findAvatar = () => wrapper.findByTestId('avatar'); const findTaskStatusEl = () => wrapper.findByTestId('task-status'); + const findButton = () => wrapper.findComponent(GlButton); + const findGlAvatarLink = () => wrapper.findComponent(GlAvatarLink); const createComponent = (props = {}, { stubs } = {}) => { wrapper = shallowMountExtended(IssuableHeader, { @@ -40,7 +43,7 @@ describe('IssuableHeader', () => { describe('authorId', () => { it('returns numeric ID from GraphQL ID of `author` prop', () => { createComponent(); - expect(wrapper.vm.authorId).toBe(1); + expect(findGlAvatarLink().attributes('data-user-id')).toBe('1'); }); }); }); @@ -52,12 +55,14 @@ describe('IssuableHeader', () => { it('dispatches `click` event on sidebar toggle button', () => { createComponent(); - wrapper.vm.toggleSidebarButtonEl = document.querySelector('.js-toggle-right-sidebar-button'); - jest.spyOn(wrapper.vm.toggleSidebarButtonEl, 'dispatchEvent').mockImplementation(jest.fn); + const toggleSidebarButtonEl = document.querySelector('.js-toggle-right-sidebar-button'); + const dispatchEvent = jest + .spyOn(toggleSidebarButtonEl, 'dispatchEvent') + .mockImplementation(jest.fn); - wrapper.vm.handleRightSidebarToggleClick(); + findButton().vm.$emit('click'); - expect(wrapper.vm.toggleSidebarButtonEl.dispatchEvent).toHaveBeenCalledWith( + expect(dispatchEvent).toHaveBeenCalledWith( expect.objectContaining({ type: 'click', }), @@ -77,7 +82,7 @@ describe('IssuableHeader', () => { expect(statusBoxEl.text()).toContain('Open'); }); - it('renders blocked icon when issuable is blocked', async () => { + it('renders blocked icon when issuable is blocked', () => { createComponent({ blocked: true, }); @@ -88,7 +93,7 @@ describe('IssuableHeader', () => { expect(blockedEl.findComponent(GlIcon).props('name')).toBe('lock'); }); - it('renders confidential icon when issuable is confidential', async () => { + it('renders confidential icon when issuable is confidential', () => { createComponent({ confidential: true, }); @@ -109,7 +114,7 @@ describe('IssuableHeader', () => { href: webUrl, target: '_blank', }; - const avatarEl = wrapper.findByTestId('avatar'); + const avatarEl = findAvatar(); expect(avatarEl.exists()).toBe(true); expect(avatarEl.attributes()).toMatchObject(avatarElAttrs); expect(avatarEl.findComponent(GlAvatarLabeled).attributes()).toMatchObject({ diff --git a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js index 6345393951c..5cdb6612487 100644 --- a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js +++ b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js @@ -8,7 +8,9 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import waitForPromises from 'helpers/wait_for_promises'; import { humanize } from '~/lib/utils/text_utility'; import { redirectTo } from '~/lib/utils/url_utility'; -import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue'; +import ManageViaMr, { + i18n, +} from '~/vue_shared/security_configuration/components/manage_via_mr.vue'; import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants'; import { buildConfigureSecurityFeatureMockFactory } from './apollo_mocks'; @@ -17,6 +19,7 @@ jest.mock('~/lib/utils/url_utility'); Vue.use(VueApollo); const projectFullPath = 'namespace/project'; +const ufErrorPrefix = 'Foo:'; describe('ManageViaMr component', () => { let wrapper; @@ -56,6 +59,10 @@ describe('ManageViaMr component', () => { ); } + beforeEach(() => { + gon.uf_error_prefix = ufErrorPrefix; + }); + // This component supports different report types/mutations depending on // whether it's in a CE or EE context. This makes sure we are only testing // the ones available in the current test context. @@ -72,15 +79,19 @@ describe('ManageViaMr component', () => { const buildConfigureSecurityFeatureMock = buildConfigureSecurityFeatureMockFactory( mutationId, ); - const successHandler = jest.fn(async () => buildConfigureSecurityFeatureMock()); - const noSuccessPathHandler = async () => + const successHandler = jest.fn().mockResolvedValue(buildConfigureSecurityFeatureMock()); + const noSuccessPathHandler = jest.fn().mockResolvedValue( buildConfigureSecurityFeatureMock({ successPath: '', - }); - const errorHandler = async () => - buildConfigureSecurityFeatureMock({ - errors: ['foo'], - }); + }), + ); + const errorHandler = (message = 'foo') => { + return Promise.resolve( + buildConfigureSecurityFeatureMock({ + errors: [message], + }), + ); + }; const pendingHandler = () => new Promise(() => {}); describe('when feature is configured', () => { @@ -147,9 +158,12 @@ describe('ManageViaMr component', () => { }); describe.each` - handler | message - ${noSuccessPathHandler} | ${`${featureName} merge request creation mutation failed`} - ${errorHandler} | ${'foo'} + handler | message + ${noSuccessPathHandler} | ${`${featureName} merge request creation mutation failed`} + ${errorHandler.bind(null, `${ufErrorPrefix} message`)} | ${'message'} + ${errorHandler.bind(null, 'Blah: message')} | ${i18n.genericErrorText} + ${errorHandler.bind(null, 'message')} | ${i18n.genericErrorText} + ${errorHandler} | ${i18n.genericErrorText} `('given an error response', ({ handler, message }) => { beforeEach(() => { const apolloProvider = createMockApolloProvider(mutation, handler); diff --git a/spec/frontend/webhooks/components/push_events_spec.js b/spec/frontend/webhooks/components/push_events_spec.js index ccb61c4049a..6889d48e904 100644 --- a/spec/frontend/webhooks/components/push_events_spec.js +++ b/spec/frontend/webhooks/components/push_events_spec.js @@ -61,7 +61,7 @@ describe('Webhook push events form editor component', () => { await nextTick(); }); - it('all_branches should be selected by default', async () => { + it('all_branches should be selected by default', () => { expect(findPushEventRulesGroup().element).toMatchSnapshot(); }); diff --git a/spec/frontend/webhooks/components/test_dropdown_spec.js b/spec/frontend/webhooks/components/test_dropdown_spec.js index 2f62ca13469..36777b0ba64 100644 --- a/spec/frontend/webhooks/components/test_dropdown_spec.js +++ b/spec/frontend/webhooks/components/test_dropdown_spec.js @@ -1,6 +1,6 @@ import { GlDisclosureDropdown } from '@gitlab/ui'; -import { mount } from '@vue/test-utils'; -import { getByRole } from '@testing-library/dom'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; + import HookTestDropdown from '~/webhooks/components/test_dropdown.vue'; const mockItems = [ @@ -14,17 +14,14 @@ describe('HookTestDropdown', () => { let wrapper; const findDisclosure = () => wrapper.findComponent(GlDisclosureDropdown); - const clickItem = (itemText) => { - const item = getByRole(wrapper.element, 'button', { name: itemText }); - item.dispatchEvent(new MouseEvent('click')); - }; const createComponent = (props) => { - wrapper = mount(HookTestDropdown, { + wrapper = mountExtended(HookTestDropdown, { propsData: { items: mockItems, ...props, }, + attachTo: document.body, }); }; @@ -55,7 +52,7 @@ describe('HookTestDropdown', () => { }); }); - clickItem(mockItems[0].text); + wrapper.findByTestId('disclosure-dropdown-item').find('a').trigger('click'); return railsEventPromise; }); diff --git a/spec/frontend/whats_new/utils/notification_spec.js b/spec/frontend/whats_new/utils/notification_spec.js index dac02ee07bd..8b5663ee764 100644 --- a/spec/frontend/whats_new/utils/notification_spec.js +++ b/spec/frontend/whats_new/utils/notification_spec.js @@ -1,4 +1,5 @@ -import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; +import htmlWhatsNewNotification from 'test_fixtures_static/whats_new_notification.html'; +import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures'; import { useLocalStorageSpy } from 'helpers/local_storage_helper'; import { setNotification, getVersionDigest } from '~/whats_new/utils/notification'; @@ -12,7 +13,7 @@ describe('~/whats_new/utils/notification', () => { const getAppEl = () => wrapper.querySelector('.app'); beforeEach(() => { - loadHTMLFixture('static/whats_new_notification.html'); + setHTMLFixture(htmlWhatsNewNotification); wrapper = document.querySelector('.whats-new-notification-fixture-root'); }); diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js index aef310319ab..3a84ba4bd5e 100644 --- a/spec/frontend/work_items/components/item_title_spec.js +++ b/spec/frontend/work_items/components/item_title_spec.js @@ -47,7 +47,7 @@ describe('ItemTitle', () => { expect(wrapper.emitted(eventName)).toBeDefined(); }); - it('renders only the text content from clipboard', async () => { + it('renders only the text content from clipboard', () => { const htmlContent = 'bold text'; const buildClipboardData = (data = {}) => ({ clipboardData: { diff --git a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js index 2a65e91a906..a97164f9dce 100644 --- a/spec/frontend/work_items/components/notes/work_item_add_note_spec.js +++ b/spec/frontend/work_items/components/notes/work_item_add_note_spec.js @@ -1,7 +1,6 @@ -import { GlButton } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import { mockTracking } from 'helpers/tracking_helper'; import waitForPromises from 'helpers/wait_for_promises'; @@ -28,7 +27,7 @@ jest.mock('~/lib/utils/autosave'); const workItemId = workItemQueryResponse.data.workItem.id; -describe('WorkItemCommentForm', () => { +describe('Work item add note', () => { let wrapper; Vue.use(VueApollo); @@ -38,6 +37,7 @@ describe('WorkItemCommentForm', () => { let workItemResponseHandler; const findCommentForm = () => wrapper.findComponent(WorkItemCommentForm); + const findTextarea = () => wrapper.findByTestId('note-reply-textarea'); const createComponent = async ({ mutationHandler = mutationSuccessHandler, @@ -50,7 +50,6 @@ describe('WorkItemCommentForm', () => { workItemType = 'Task', } = {}) => { workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse); - if (signedIn) { window.gon.current_user_id = '1'; window.gon.current_user_avatar_url = 'avatar.png'; @@ -76,7 +75,7 @@ describe('WorkItemCommentForm', () => { }); const { id } = workItemQueryResponse.data.workItem; - wrapper = shallowMount(WorkItemAddNote, { + wrapper = shallowMountExtended(WorkItemAddNote, { apolloProvider, propsData: { workItemId: id, @@ -84,6 +83,8 @@ describe('WorkItemCommentForm', () => { queryVariables, fetchByIid, workItemType, + markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem', + autocompleteDataSources: {}, }, stubs: { WorkItemCommentLocked, @@ -93,7 +94,7 @@ describe('WorkItemCommentForm', () => { await waitForPromises(); if (isEditing) { - wrapper.findComponent(GlButton).vm.$emit('click'); + findTextarea().trigger('click'); } }; @@ -209,6 +210,38 @@ describe('WorkItemCommentForm', () => { expect(wrapper.emitted('error')).toEqual([[error]]); }); + + it('ignores errors when mutation returns additional information as errors for quick actions', async () => { + await createComponent({ + isEditing: true, + mutationHandler: jest.fn().mockResolvedValue({ + data: { + createNote: { + note: { + id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122', + discussion: { + id: 'gid://gitlab/Discussion/c872ba2d7d3eb780d2255138d67ca8b04f65b122', + notes: { + nodes: [], + __typename: 'NoteConnection', + }, + __typename: 'Discussion', + }, + __typename: 'Note', + }, + __typename: 'CreateNotePayload', + errors: ['Commands only Removed assignee @foobar.', 'Command names ["unassign"]'], + }, + }, + }), + }); + + findCommentForm().vm.$emit('submitForm', 'updated desc'); + + await waitForPromises(); + + expect(clearDraft).toHaveBeenCalledWith('gid://gitlab/WorkItem/1-comment'); + }); }); it('calls the global ID work item query when `fetchByIid` prop is false', async () => { diff --git a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js index 23a9f285804..147f2904761 100644 --- a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js +++ b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js @@ -1,11 +1,23 @@ import { shallowMount } from '@vue/test-utils'; -import { nextTick } from 'vue'; +import Vue, { nextTick } from 'vue'; +import VueApollo from 'vue-apollo'; import waitForPromises from 'helpers/wait_for_promises'; import * as autosave from '~/lib/utils/autosave'; import { ESC_KEY, ENTER_KEY } from '~/lib/utils/keys'; +import { + STATE_OPEN, + STATE_CLOSED, + STATE_EVENT_REOPEN, + STATE_EVENT_CLOSE, +} from '~/work_items/constants'; import * as confirmViaGlModal from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal'; import WorkItemCommentForm from '~/work_items/components/notes/work_item_comment_form.vue'; import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql'; +import { updateWorkItemMutationResponse, workItemQueryResponse } from 'jest/work_items/mock_data'; + +Vue.use(VueApollo); const draftComment = 'draft comment'; @@ -18,6 +30,8 @@ jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => ({ confirmAction: jest.fn().mockResolvedValue(true), })); +const workItemId = 'gid://gitlab/WorkItem/1'; + describe('Work item comment form component', () => { let wrapper; @@ -27,14 +41,29 @@ describe('Work item comment form component', () => { const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]'); const findConfirmButton = () => wrapper.find('[data-testid="confirm-button"]'); - const createComponent = ({ isSubmitting = false, initialValue = '' } = {}) => { + const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse); + + const createComponent = ({ + isSubmitting = false, + initialValue = '', + isNewDiscussion = false, + workItemState = STATE_OPEN, + workItemType = 'Task', + mutationHandler = mutationSuccessHandler, + } = {}) => { wrapper = shallowMount(WorkItemCommentForm, { + apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]), propsData: { - workItemType: 'Issue', + workItemState, + workItemId, + workItemType, ariaLabel: 'test-aria-label', autosaveKey: mockAutosaveKey, isSubmitting, initialValue, + markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem', + autocompleteDataSources: {}, + isNewDiscussion, }, provide: { fullPath: 'test-project-path', @@ -42,11 +71,11 @@ describe('Work item comment form component', () => { }); }; - it('passes correct markdown preview path to markdown editor', () => { + it('passes markdown preview path to markdown editor', () => { createComponent(); expect(findMarkdownEditor().props('renderMarkdownPath')).toBe( - '/test-project-path/preview_markdown?target_type=Issue', + '/group/project/preview_markdown?target_type=WorkItem', ); }); @@ -99,7 +128,7 @@ describe('Work item comment form component', () => { expect(findMarkdownEditor().props('value')).toBe('new comment'); }); - it('calls `updateDraft` with correct parameters', async () => { + it('calls `updateDraft` with correct parameters', () => { findMarkdownEditor().vm.$emit('input', 'new comment'); expect(autosave.updateDraft).toHaveBeenCalledWith(mockAutosaveKey, 'new comment'); @@ -161,4 +190,63 @@ describe('Work item comment form component', () => { expect(wrapper.emitted('submitForm')).toEqual([[draftComment]]); }); + + describe('when used as a top level/is a new discussion', () => { + describe('cancel button text', () => { + it.each` + workItemState | workItemType | buttonText + ${STATE_OPEN} | ${'Task'} | ${'Close task'} + ${STATE_CLOSED} | ${'Task'} | ${'Reopen task'} + ${STATE_OPEN} | ${'Objective'} | ${'Close objective'} + ${STATE_CLOSED} | ${'Objective'} | ${'Reopen objective'} + ${STATE_OPEN} | ${'Key result'} | ${'Close key result'} + ${STATE_CLOSED} | ${'Key result'} | ${'Reopen key result'} + `( + 'is "$buttonText" when "$workItemType" state is "$workItemState"', + ({ workItemState, workItemType, buttonText }) => { + createComponent({ isNewDiscussion: true, workItemState, workItemType }); + + expect(findCancelButton().text()).toBe(buttonText); + }, + ); + }); + + describe('Close/reopen button click', () => { + it.each` + workItemState | stateEvent + ${STATE_OPEN} | ${STATE_EVENT_CLOSE} + ${STATE_CLOSED} | ${STATE_EVENT_REOPEN} + `( + 'calls mutation with "$stateEvent" when workItemState is "$workItemState"', + async ({ workItemState, stateEvent }) => { + createComponent({ isNewDiscussion: true, workItemState }); + + findCancelButton().vm.$emit('click'); + + await waitForPromises(); + + expect(mutationSuccessHandler).toHaveBeenCalledWith({ + input: { + id: workItemQueryResponse.data.workItem.id, + stateEvent, + }, + }); + }, + ); + + it('emits an error message when the mutation was unsuccessful', async () => { + createComponent({ + isNewDiscussion: true, + mutationHandler: jest.fn().mockRejectedValue('Error!'), + }); + findCancelButton().vm.$emit('click'); + + await waitForPromises(); + + expect(wrapper.emitted('error')).toEqual([ + ['Something went wrong while updating the task. Please try again.'], + ]); + }); + }); + }); }); diff --git a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js index 6b95da0910b..568b76150c4 100644 --- a/spec/frontend/work_items/components/notes/work_item_discussion_spec.js +++ b/spec/frontend/work_items/components/notes/work_item_discussion_spec.js @@ -41,6 +41,8 @@ describe('Work Item Discussion', () => { fetchByIid, fullPath, workItemType, + markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem', + autocompleteDataSources: {}, }, }); }; @@ -70,7 +72,7 @@ describe('Work Item Discussion', () => { expect(findToggleRepliesWidget().exists()).toBe(true); }); - it('the number of threads should be equal to the response length', async () => { + it('the number of threads should be equal to the response length', () => { expect(findAllThreads()).toHaveLength( mockWorkItemNotesWidgetResponseWithComments.discussions.nodes[0].notes.nodes.length, ); @@ -104,7 +106,7 @@ describe('Work Item Discussion', () => { await findWorkItemAddNote().vm.$emit('replying', 'reply text'); }); - it('should show optimistic behavior when replying', async () => { + it('should show optimistic behavior when replying', () => { expect(findAllThreads()).toHaveLength(2); expect(findWorkItemNoteReplying().exists()).toBe(true); }); diff --git a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js index b293127b6af..b406c9d843a 100644 --- a/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js +++ b/spec/frontend/work_items/components/notes/work_item_note_actions_spec.js @@ -1,3 +1,4 @@ +import { GlDropdown } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; @@ -17,6 +18,10 @@ describe('Work Item Note Actions', () => { const findReplyButton = () => wrapper.findComponent(ReplyButton); const findEditButton = () => wrapper.find('[data-testid="edit-work-item-note"]'); const findEmojiButton = () => wrapper.find('[data-testid="note-emoji-button"]'); + const findDropdown = () => wrapper.findComponent(GlDropdown); + const findDeleteNoteButton = () => wrapper.find('[data-testid="delete-note-action"]'); + const findCopyLinkButton = () => wrapper.find('[data-testid="copy-link-action"]'); + const findAssignUnassignButton = () => wrapper.find('[data-testid="assign-note-action"]'); const addEmojiMutationResolver = jest.fn().mockResolvedValue({ data: { @@ -29,13 +34,19 @@ describe('Work Item Note Actions', () => { template: '
    ', }; - const createComponent = ({ showReply = true, showEdit = true, showAwardEmoji = true } = {}) => { + const createComponent = ({ + showReply = true, + showEdit = true, + showAwardEmoji = true, + showAssignUnassign = false, + } = {}) => { wrapper = shallowMount(WorkItemNoteActions, { propsData: { showReply, showEdit, noteId, showAwardEmoji, + showAssignUnassign, }, provide: { glFeatures: { @@ -113,4 +124,75 @@ describe('Work Item Note Actions', () => { }); }); }); + + describe('delete note', () => { + it('should display the `Delete comment` dropdown item if user has a permission to delete a note', () => { + createComponent({ + showEdit: true, + }); + + expect(findDropdown().exists()).toBe(true); + expect(findDeleteNoteButton().exists()).toBe(true); + }); + + it('should not display the `Delete comment` dropdown item if user has no permission to delete a note', () => { + createComponent({ + showEdit: false, + }); + + expect(findDropdown().exists()).toBe(true); + expect(findDeleteNoteButton().exists()).toBe(false); + }); + + it('should emit `deleteNote` event when delete note action is clicked', () => { + createComponent({ + showEdit: true, + }); + + findDeleteNoteButton().vm.$emit('click'); + + expect(wrapper.emitted('deleteNote')).toEqual([[]]); + }); + }); + + describe('copy link', () => { + beforeEach(() => { + createComponent({}); + }); + it('should display Copy link always', () => { + expect(findCopyLinkButton().exists()).toBe(true); + }); + + it('should emit `notifyCopyDone` event when copy link note action is clicked', () => { + findCopyLinkButton().vm.$emit('click'); + + expect(wrapper.emitted('notifyCopyDone')).toEqual([[]]); + }); + }); + + describe('assign/unassign to commenting user', () => { + it('should not display assign/unassign by default', () => { + createComponent(); + + expect(findAssignUnassignButton().exists()).toBe(false); + }); + + it('should display assign/unassign when the props is true', () => { + createComponent({ + showAssignUnassign: true, + }); + + expect(findAssignUnassignButton().exists()).toBe(true); + }); + + it('should emit `assignUser` event when assign note action is clicked', () => { + createComponent({ + showAssignUnassign: true, + }); + + findAssignUnassignButton().vm.$emit('click'); + + expect(wrapper.emitted('assignUser')).toEqual([[]]); + }); + }); }); diff --git a/spec/frontend/work_items/components/notes/work_item_note_spec.js b/spec/frontend/work_items/components/notes/work_item_note_spec.js index 8e574dc1a81..69b7c7b0828 100644 --- a/spec/frontend/work_items/components/notes/work_item_note_spec.js +++ b/spec/frontend/work_items/components/notes/work_item_note_spec.js @@ -1,10 +1,9 @@ -import { GlAvatarLink, GlDropdown } from '@gitlab/ui'; import { shallowMount } from '@vue/test-utils'; import Vue, { nextTick } from 'vue'; import VueApollo from 'vue-apollo'; import mockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; -import { updateDraft } from '~/lib/utils/autosave'; +import { updateDraft, clearDraft } from '~/lib/utils/autosave'; import EditedAt from '~/issues/show/components/edited.vue'; import WorkItemNote from '~/work_items/components/notes/work_item_note.vue'; import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue'; @@ -12,8 +11,17 @@ import NoteBody from '~/work_items/components/notes/work_item_note_body.vue'; import NoteHeader from '~/notes/components/note_header.vue'; import NoteActions from '~/work_items/components/notes/work_item_note_actions.vue'; import WorkItemCommentForm from '~/work_items/components/notes/work_item_comment_form.vue'; +import workItemQuery from '~/work_items/graphql/work_item.query.graphql'; import updateWorkItemNoteMutation from '~/work_items/graphql/notes/update_work_item_note.mutation.graphql'; -import { mockWorkItemCommentNote } from 'jest/work_items/mock_data'; +import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql'; +import { + mockAssignees, + mockWorkItemCommentNote, + updateWorkItemMutationResponse, + workItemQueryResponse, +} from 'jest/work_items/mock_data'; +import { i18n, TRACKING_CATEGORY_SHOW } from '~/work_items/constants'; +import { mockTracking } from 'helpers/tracking_helper'; Vue.use(VueApollo); jest.mock('~/lib/utils/autosave'); @@ -22,6 +30,7 @@ describe('Work Item Note', () => { let wrapper; const updatedNoteText = '# Some title'; const updatedNoteBody = '

    Some title

    '; + const mockWorkItemId = workItemQueryResponse.data.workItem.id; const successHandler = jest.fn().mockResolvedValue({ data: { @@ -35,32 +44,51 @@ describe('Work Item Note', () => { }, }, }); + + const workItemResponseHandler = jest.fn().mockResolvedValue(workItemQueryResponse); + + const updateWorkItemMutationSuccessHandler = jest + .fn() + .mockResolvedValue(updateWorkItemMutationResponse); + const errorHandler = jest.fn().mockRejectedValue('Oops'); - const findAuthorAvatarLink = () => wrapper.findComponent(GlAvatarLink); const findTimelineEntryItem = () => wrapper.findComponent(TimelineEntryItem); const findNoteHeader = () => wrapper.findComponent(NoteHeader); const findNoteBody = () => wrapper.findComponent(NoteBody); const findNoteActions = () => wrapper.findComponent(NoteActions); - const findDropdown = () => wrapper.findComponent(GlDropdown); const findCommentForm = () => wrapper.findComponent(WorkItemCommentForm); const findEditedAt = () => wrapper.findComponent(EditedAt); - - const findDeleteNoteButton = () => wrapper.find('[data-testid="delete-note-action"]'); const findNoteWrapper = () => wrapper.find('[data-testid="note-wrapper"]'); const createComponent = ({ note = mockWorkItemCommentNote, isFirstNote = false, updateNoteMutationHandler = successHandler, + workItemId = mockWorkItemId, + updateWorkItemMutationHandler = updateWorkItemMutationSuccessHandler, + assignees = mockAssignees, + queryVariables = { id: mockWorkItemId }, + fetchByIid = false, } = {}) => { wrapper = shallowMount(WorkItemNote, { propsData: { + workItemId, note, isFirstNote, workItemType: 'Task', + markdownPreviewPath: '/group/project/preview_markdown?target_type=WorkItem', + autocompleteDataSources: {}, + assignees, + queryVariables, + fetchByIid, + fullPath: 'test-project-path', }, - apolloProvider: mockApollo([[updateWorkItemNoteMutation, updateNoteMutationHandler]]), + apolloProvider: mockApollo([ + [workItemQuery, workItemResponseHandler], + [updateWorkItemNoteMutation, updateNoteMutationHandler], + [updateWorkItemMutation, updateWorkItemMutationHandler], + ]), }); }; @@ -124,6 +152,7 @@ describe('Work Item Note', () => { await waitForPromises(); expect(findCommentForm().exists()).toBe(false); + expect(clearDraft).toHaveBeenCalledWith(`${mockWorkItemCommentNote.id}-comment`); }); describe('when mutation fails', () => { @@ -178,8 +207,7 @@ describe('Work Item Note', () => { }, }); - expect(findEditedAt().exists()).toBe(true); - expect(findEditedAt().props()).toEqual({ + expect(findEditedAt().props()).toMatchObject({ updatedAt: '2023-02-12T07:47:40Z', updatedByName: 'Administrator', updatedByPath: 'test-path', @@ -215,45 +243,62 @@ describe('Work Item Note', () => { expect(findNoteActions().exists()).toBe(true); }); - it('should have the Avatar link for comment threads', () => { - expect(findAuthorAvatarLink().exists()).toBe(true); - }); - it('should not have the reply button props', () => { expect(findNoteActions().props('showReply')).toBe(false); }); }); - it('should display the `Delete comment` dropdown item if user has a permission to delete a note', () => { - createComponent({ - note: { - ...mockWorkItemCommentNote, - userPermissions: { ...mockWorkItemCommentNote.userPermissions, adminNote: true }, - }, + describe('assign/unassign to commenting user', () => { + it('calls a mutation with correct variables', async () => { + createComponent({ assignees: mockAssignees }); + await waitForPromises(); + findNoteActions().vm.$emit('assignUser'); + + await waitForPromises(); + + expect(updateWorkItemMutationSuccessHandler).toHaveBeenCalledWith({ + input: { + id: mockWorkItemId, + assigneesWidget: { + assigneeIds: [mockAssignees[1].id], + }, + }, + }); }); - expect(findDropdown().exists()).toBe(true); - expect(findDeleteNoteButton().exists()).toBe(true); - }); + it('emits an error and resets assignees if mutation was rejected', async () => { + createComponent({ + updateWorkItemMutationHandler: errorHandler, + assignees: [mockAssignees[0]], + }); - it('should not display the `Delete comment` dropdown item if user has no permission to delete a note', () => { - createComponent(); + await waitForPromises(); - expect(findDropdown().exists()).toBe(true); - expect(findDeleteNoteButton().exists()).toBe(false); - }); + expect(findNoteActions().props('isAuthorAnAssignee')).toEqual(true); - it('should emit `deleteNote` event when delete note action is clicked', () => { - createComponent({ - note: { - ...mockWorkItemCommentNote, - userPermissions: { ...mockWorkItemCommentNote.userPermissions, adminNote: true }, - }, + findNoteActions().vm.$emit('assignUser'); + + await waitForPromises(); + + expect(wrapper.emitted('error')).toEqual([[i18n.updateError]]); + expect(findNoteActions().props('isAuthorAnAssignee')).toEqual(true); }); - findDeleteNoteButton().vm.$emit('click'); + it('tracks the event', async () => { + createComponent(); + await waitForPromises(); + const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); + + findNoteActions().vm.$emit('assignUser'); - expect(wrapper.emitted('deleteNote')).toEqual([[]]); + await waitForPromises(); + + expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'unassigned_user', { + category: TRACKING_CATEGORY_SHOW, + label: 'work_item_note_actions', + property: 'type_Task', + }); + }); }); }); }); diff --git a/spec/frontend/work_items/components/widget_wrapper_spec.js b/spec/frontend/work_items/components/widget_wrapper_spec.js index a87233300fc..87fbd1b3830 100644 --- a/spec/frontend/work_items/components/widget_wrapper_spec.js +++ b/spec/frontend/work_items/components/widget_wrapper_spec.js @@ -30,7 +30,7 @@ describe('WidgetWrapper component', () => { expect(findWidgetBody().exists()).toBe(false); }); - it('shows alert when list loading fails', () => { + it('shows an alert when list loading fails', () => { const error = 'Some error'; createComponent({ error }); diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js index a0db8172bf6..a5006b46063 100644 --- a/spec/frontend/work_items/components/work_item_actions_spec.js +++ b/spec/frontend/work_items/components/work_item_actions_spec.js @@ -1,17 +1,35 @@ -import { GlDropdownDivider, GlModal } from '@gitlab/ui'; +import { GlDropdownDivider, GlModal, GlToggle } from '@gitlab/ui'; +import Vue from 'vue'; +import VueApollo from 'vue-apollo'; +import createMockApollo from 'helpers/mock_apollo_helper'; +import waitForPromises from 'helpers/wait_for_promises'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { isLoggedIn } from '~/lib/utils/common_utils'; +import toast from '~/vue_shared/plugins/global_toast'; import WorkItemActions from '~/work_items/components/work_item_actions.vue'; +import { + TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION, + TEST_ID_NOTIFICATIONS_TOGGLE_ACTION, + TEST_ID_NOTIFICATIONS_TOGGLE_FORM, + TEST_ID_DELETE_ACTION, +} from '~/work_items/constants'; +import updateWorkItemNotificationsMutation from '~/work_items/graphql/update_work_item_notifications.mutation.graphql'; +import { workItemResponseFactory } from '../mock_data'; -const TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION = 'confidentiality-toggle-action'; -const TEST_ID_DELETE_ACTION = 'delete-action'; +jest.mock('~/lib/utils/common_utils'); +jest.mock('~/vue_shared/plugins/global_toast'); describe('WorkItemActions component', () => { + Vue.use(VueApollo); + let wrapper; let glModalDirective; const findModal = () => wrapper.findComponent(GlModal); const findConfidentialityToggleButton = () => wrapper.findByTestId(TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION); + const findNotificationsToggleButton = () => + wrapper.findByTestId(TEST_ID_NOTIFICATIONS_TOGGLE_ACTION); const findDeleteButton = () => wrapper.findByTestId(TEST_ID_DELETE_ACTION); const findDropdownItems = () => wrapper.findAll('[data-testid="work-item-actions-dropdown"] > *'); const findDropdownItemsActual = () => @@ -25,20 +43,27 @@ describe('WorkItemActions component', () => { text: x.text(), }; }); + const findNotificationsToggle = () => wrapper.findComponent(GlToggle); const createComponent = ({ canUpdate = true, canDelete = true, isConfidential = false, + subscribed = false, isParentConfidential = false, + notificationsMock = [updateWorkItemNotificationsMutation, jest.fn()], } = {}) => { + const handlers = [notificationsMock]; glModalDirective = jest.fn(); wrapper = shallowMountExtended(WorkItemActions, { + apolloProvider: createMockApollo(handlers), + isLoggedIn: isLoggedIn(), propsData: { - workItemId: '123', + workItemId: 'gid://gitlab/WorkItem/1', canUpdate, canDelete, isConfidential, + subscribed, isParentConfidential, workItemType: 'Task', }, @@ -52,6 +77,10 @@ describe('WorkItemActions component', () => { }); }; + beforeEach(() => { + isLoggedIn.mockReturnValue(true); + }); + it('renders modal', () => { createComponent(); @@ -63,6 +92,13 @@ describe('WorkItemActions component', () => { createComponent(); expect(findDropdownItemsActual()).toEqual([ + { + testId: TEST_ID_NOTIFICATIONS_TOGGLE_FORM, + text: '', + }, + { + divider: true, + }, { testId: TEST_ID_CONFIDENTIALITY_TOGGLE_ACTION, text: 'Turn on confidentiality', @@ -133,7 +169,110 @@ describe('WorkItemActions component', () => { }); expect(findDeleteButton().exists()).toBe(false); - expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false); + }); + }); + + describe('notifications action', () => { + const errorMessage = 'Failed to subscribe'; + const notificationToggledOffMessage = 'Notifications turned off.'; + const notificationToggledOnMessage = 'Notifications turned on.'; + + const workItemQueryResponse = workItemResponseFactory({ canUpdate: true, canDelete: true }); + const inputVariablesOff = { + id: workItemQueryResponse.data.workItem.id, + notificationsWidget: { + subscribed: false, + }, + }; + + const inputVariablesOn = { + id: workItemQueryResponse.data.workItem.id, + notificationsWidget: { + subscribed: true, + }, + }; + + const notificationsOffExpectedResponse = workItemResponseFactory({ + subscribed: false, + }); + + const toggleNotificationsOffHandler = jest.fn().mockResolvedValue({ + data: { + workItemUpdate: { + workItem: notificationsOffExpectedResponse.data.workItem, + errors: [], + }, + }, + }); + + const notificationsOnExpectedResponse = workItemResponseFactory({ + subscribed: true, + }); + + const toggleNotificationsOnHandler = jest.fn().mockResolvedValue({ + data: { + workItemUpdate: { + workItem: notificationsOnExpectedResponse.data.workItem, + errors: [], + }, + }, + }); + + const toggleNotificationsFailureHandler = jest.fn().mockRejectedValue(new Error(errorMessage)); + + const notificationsOffMock = [ + updateWorkItemNotificationsMutation, + toggleNotificationsOffHandler, + ]; + + const notificationsOnMock = [updateWorkItemNotificationsMutation, toggleNotificationsOnHandler]; + + const notificationsFailureMock = [ + updateWorkItemNotificationsMutation, + toggleNotificationsFailureHandler, + ]; + + beforeEach(() => { + createComponent(); + isLoggedIn.mockReturnValue(true); + }); + + it('renders toggle button', () => { + expect(findNotificationsToggleButton().exists()).toBe(true); + }); + + it.each` + scenario | subscribedToNotifications | notificationsMock | inputVariables | toastMessage + ${'turned off'} | ${false} | ${notificationsOffMock} | ${inputVariablesOff} | ${notificationToggledOffMessage} + ${'turned on'} | ${true} | ${notificationsOnMock} | ${inputVariablesOn} | ${notificationToggledOnMessage} + `( + 'calls mutation and displays toast when notification toggle is $scenario', + async ({ subscribedToNotifications, notificationsMock, inputVariables, toastMessage }) => { + createComponent({ notificationsMock }); + + await waitForPromises(); + + findNotificationsToggle().vm.$emit('change', subscribedToNotifications); + + await waitForPromises(); + + expect(notificationsMock[1]).toHaveBeenCalledWith({ + input: inputVariables, + }); + expect(toast).toHaveBeenCalledWith(toastMessage); + }, + ); + + it('emits error when the update notification mutation fails', async () => { + createComponent({ notificationsMock: notificationsFailureMock }); + + await waitForPromises(); + + findNotificationsToggle().vm.$emit('change', false); + + await waitForPromises(); + + expect(wrapper.emitted('error')).toEqual([[errorMessage]]); }); }); }); diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js index 2a8159f7294..af97b3680f9 100644 --- a/spec/frontend/work_items/components/work_item_assignees_spec.js +++ b/spec/frontend/work_items/components/work_item_assignees_spec.js @@ -318,7 +318,7 @@ describe('WorkItemAssignees component', () => { return waitForPromises(); }); - it('renders `Assign myself` button', async () => { + it('renders `Assign myself` button', () => { findTokenSelector().trigger('mouseover'); expect(findAssignSelfButton().exists()).toBe(true); }); diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js index b4b7b8989ea..099c45ac683 100644 --- a/spec/frontend/work_items/components/work_item_description_spec.js +++ b/spec/frontend/work_items/components/work_item_description_spec.js @@ -116,10 +116,7 @@ describe('WorkItemDescription', () => { supportsQuickActions: true, renderMarkdownPath: markdownPreviewPath(fullPath, iid), quickActionsDocsPath: wrapper.vm.$options.quickActionsDocsPath, - }); - - expect(findMarkdownEditor().vm.$attrs).toMatchObject({ - 'autocomplete-data-sources': autocompleteDataSources(fullPath, iid), + autocompleteDataSources: autocompleteDataSources(fullPath, iid), }); }); }); @@ -179,7 +176,7 @@ describe('WorkItemDescription', () => { }), }); - expect(findEditedAt().props()).toEqual({ + expect(findEditedAt().props()).toMatchObject({ updatedAt: lastEditedAt, updatedByName: lastEditedBy.name, updatedByPath: lastEditedBy.webPath, diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js index fe7556f8ec6..8e5b607cee7 100644 --- a/spec/frontend/work_items/components/work_item_detail_spec.js +++ b/spec/frontend/work_items/components/work_item_detail_spec.js @@ -29,12 +29,13 @@ import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal. import { i18n } from '~/work_items/constants'; import workItemQuery from '~/work_items/graphql/work_item.query.graphql'; import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql'; -import workItemDatesSubscription from '~/work_items/graphql/work_item_dates.subscription.graphql'; +import workItemDatesSubscription from '~/graphql_shared/subscriptions/work_item_dates.subscription.graphql'; import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql'; import workItemAssigneesSubscription from '~/work_items/graphql/work_item_assignees.subscription.graphql'; import workItemMilestoneSubscription from '~/work_items/graphql/work_item_milestone.subscription.graphql'; import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql'; import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql'; + import { mockParent, workItemDatesSubscriptionResponse, @@ -337,7 +338,7 @@ describe('WorkItemDetail component', () => { expect(findLoadingIcon().exists()).toBe(false); }); - it('shows alert message when mutation fails', async () => { + it('shows an alert when mutation fails', async () => { createComponent({ handler: handlerMock, confidentialityMock: confidentialityFailureMock, @@ -388,11 +389,12 @@ describe('WorkItemDetail component', () => { expect(findParent().exists()).toBe(false); }); - it('shows work item type if there is not a parent', async () => { + it('shows work item type with reference when there is no a parent', async () => { createComponent({ handler: jest.fn().mockResolvedValue(workItemQueryResponseWithoutParent) }); await waitForPromises(); expect(findWorkItemType().exists()).toBe(true); + expect(findWorkItemType().text()).toBe('Task #1'); }); describe('with parent', () => { @@ -407,7 +409,7 @@ describe('WorkItemDetail component', () => { expect(findParent().exists()).toBe(true); }); - it('does not show work item type', async () => { + it('does not show work item type', () => { expect(findWorkItemType().exists()).toBe(false); }); diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js index e693ccfb156..07efb1c5ac8 100644 --- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js +++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js @@ -1,4 +1,4 @@ -import { GlLabel, GlAvatarsInline } from '@gitlab/ui'; +import { GlAvatarsInline } from '@gitlab/ui'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; @@ -8,10 +8,9 @@ import WorkItemLinkChildMetadata from '~/work_items/components/work_item_links/w import { workItemObjectiveMetadataWidgets } from '../../mock_data'; describe('WorkItemLinkChildMetadata', () => { - const { MILESTONE, ASSIGNEES, LABELS } = workItemObjectiveMetadataWidgets; + const { MILESTONE, ASSIGNEES } = workItemObjectiveMetadataWidgets; const mockMilestone = MILESTONE.milestone; const mockAssignees = ASSIGNEES.assignees.nodes; - const mockLabels = LABELS.labels.nodes; let wrapper; const createComponent = ({ metadataWidgets = workItemObjectiveMetadataWidgets } = {}) => { @@ -53,18 +52,4 @@ describe('WorkItemLinkChildMetadata', () => { badgeSrOnlyText: '', }); }); - - it('renders labels', () => { - const labels = wrapper.findAllComponents(GlLabel); - const mockLabel = mockLabels[0]; - - expect(labels).toHaveLength(mockLabels.length); - expect(labels.at(0).props()).toMatchObject({ - title: mockLabel.title, - backgroundColor: mockLabel.color, - description: mockLabel.description, - scoped: false, - }); - expect(labels.at(1).props('scoped')).toBe(true); // Second label is scoped - }); }); diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js index 721436e217e..106f9d46513 100644 --- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js +++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js @@ -1,4 +1,4 @@ -import { GlIcon } from '@gitlab/ui'; +import { GlLabel, GlIcon } from '@gitlab/ui'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createMockApollo from 'helpers/mock_apollo_helper'; @@ -11,6 +11,7 @@ import { createAlert } from '~/alert'; import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip.vue'; import getWorkItemTreeQuery from '~/work_items/graphql/work_item_tree.query.graphql'; +import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql'; import WorkItemLinkChild from '~/work_items/components/work_item_links/work_item_link_child.vue'; import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue'; import WorkItemTreeChildren from '~/work_items/components/work_item_links/work_item_tree_children.vue'; @@ -29,6 +30,8 @@ import { workItemHierarchyTreeResponse, workItemHierarchyTreeFailureResponse, workItemObjectiveMetadataWidgets, + changeIndirectWorkItemParentMutationResponse, + workItemUpdateFailureResponse, } from '../../mock_data'; jest.mock('~/alert'); @@ -37,6 +40,14 @@ describe('WorkItemLinkChild', () => { const WORK_ITEM_ID = 'gid://gitlab/WorkItem/2'; let wrapper; let getWorkItemTreeQueryHandler; + let mutationChangeParentHandler; + const { LABELS } = workItemObjectiveMetadataWidgets; + const mockLabels = LABELS.labels.nodes; + + const $toast = { + show: jest.fn(), + hide: jest.fn(), + }; Vue.use(VueApollo); @@ -49,10 +60,17 @@ describe('WorkItemLinkChild', () => { apolloProvider = null, } = {}) => { getWorkItemTreeQueryHandler = jest.fn().mockResolvedValue(workItemHierarchyTreeResponse); + mutationChangeParentHandler = jest + .fn() + .mockResolvedValue(changeIndirectWorkItemParentMutationResponse); wrapper = shallowMountExtended(WorkItemLinkChild, { apolloProvider: - apolloProvider || createMockApollo([[getWorkItemTreeQuery, getWorkItemTreeQueryHandler]]), + apolloProvider || + createMockApollo([ + [getWorkItemTreeQuery, getWorkItemTreeQueryHandler], + [updateWorkItemMutation, mutationChangeParentHandler], + ]), propsData: { projectPath, canUpdate, @@ -60,6 +78,9 @@ describe('WorkItemLinkChild', () => { childItem, workItemType, }, + mocks: { + $toast, + }, }); }; @@ -165,8 +186,6 @@ describe('WorkItemLinkChild', () => { expect(metadataEl.props()).toMatchObject({ metadataWidgets: workItemObjectiveMetadataWidgets, }); - - expect(wrapper.find('[data-testid="links-child"]').classes()).toContain('gl-py-3'); }); it('does not render item metadata component when item has no metadata present', () => { @@ -176,8 +195,20 @@ describe('WorkItemLinkChild', () => { }); expect(findMetadataComponent().exists()).toBe(false); + }); + + it('renders labels', () => { + const labels = wrapper.findAllComponents(GlLabel); + const mockLabel = mockLabels[0]; - expect(wrapper.find('[data-testid="links-child"]').classes()).toContain('gl-py-0'); + expect(labels).toHaveLength(mockLabels.length); + expect(labels.at(0).props()).toMatchObject({ + title: mockLabel.title, + backgroundColor: mockLabel.color, + description: mockLabel.description, + scoped: false, + }); + expect(labels.at(1).props('scoped')).toBe(true); // Second label is scoped }); }); @@ -216,6 +247,13 @@ describe('WorkItemLinkChild', () => { const findExpandButton = () => wrapper.findByTestId('expand-child'); const findTreeChildren = () => wrapper.findComponent(WorkItemTreeChildren); + const getWidgetHierarchy = () => + workItemHierarchyTreeResponse.data.workItem.widgets.find( + (widget) => widget.type === WIDGET_TYPE_HIERARCHY, + ); + const getChildrenNodes = () => getWidgetHierarchy().children.nodes; + const findFirstItemId = () => getChildrenNodes()[0].id; + beforeEach(() => { getWorkItemTreeQueryHandler.mockClear(); createComponent({ @@ -238,10 +276,8 @@ describe('WorkItemLinkChild', () => { expect(getWorkItemTreeQueryHandler).toHaveBeenCalled(); expect(findTreeChildren().exists()).toBe(true); - const widgetHierarchy = workItemHierarchyTreeResponse.data.workItem.widgets.find( - (widget) => widget.type === WIDGET_TYPE_HIERARCHY, - ); - expect(findTreeChildren().props('children')).toEqual(widgetHierarchy.children.nodes); + const childrenNodes = getChildrenNodes(); + expect(findTreeChildren().props('children')).toEqual(childrenNodes); }); it('does not fetch children if already fetched once while clicking expand button', async () => { @@ -290,5 +326,74 @@ describe('WorkItemLinkChild', () => { expect(wrapper.emitted('click')).toEqual([['event']]); }); + + it('shows toast on removing child item', async () => { + findExpandButton().vm.$emit('click'); + await waitForPromises(); + + findTreeChildren().vm.$emit('removeChild', findFirstItemId()); + await waitForPromises(); + + expect($toast.show).toHaveBeenCalledWith('Child removed', { + action: { onClick: expect.any(Function), text: 'Undo' }, + }); + }); + + it('renders correct number of children after the removal', async () => { + findExpandButton().vm.$emit('click'); + await waitForPromises(); + + const childrenNodes = getChildrenNodes(); + expect(findTreeChildren().props('children')).toEqual(childrenNodes); + + findTreeChildren().vm.$emit('removeChild', findFirstItemId()); + await waitForPromises(); + + expect(findTreeChildren().props('children')).toEqual([]); + }); + + it('calls correct mutation with correct variables', async () => { + const firstItemId = findFirstItemId(); + + findExpandButton().vm.$emit('click'); + await waitForPromises(); + + findTreeChildren().vm.$emit('removeChild', firstItemId); + + expect(mutationChangeParentHandler).toHaveBeenCalledWith({ + input: { + id: firstItemId, + hierarchyWidget: { + parentId: null, + }, + }, + }); + }); + + it('shows the alert when workItem update fails', async () => { + mutationChangeParentHandler = jest.fn().mockRejectedValue(workItemUpdateFailureResponse); + const apolloProvider = createMockApollo([ + [getWorkItemTreeQuery, getWorkItemTreeQueryHandler], + [updateWorkItemMutation, mutationChangeParentHandler], + ]); + + createComponent({ + childItem: workItemObjectiveWithChild, + workItemType: WORK_ITEM_TYPE_VALUE_OBJECTIVE, + apolloProvider, + }); + + findExpandButton().vm.$emit('click'); + await waitForPromises(); + + findTreeChildren().vm.$emit('removeChild', findFirstItemId()); + await waitForPromises(); + + expect(createAlert).toHaveBeenCalledWith({ + captureError: true, + error: expect.any(Object), + message: 'Something went wrong while removing child.', + }); + }); }); }); diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js index 4e53fc2987b..f02a9fbd021 100644 --- a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js +++ b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js @@ -13,7 +13,7 @@ describe('WorkItemLinksMenu', () => { const findDropdown = () => wrapper.findComponent(GlDropdown); const findRemoveDropdownItem = () => wrapper.findComponent(GlDropdownItem); - beforeEach(async () => { + beforeEach(() => { createComponent(); }); diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js index 99e44b4d89c..e97c2328b83 100644 --- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js +++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js @@ -179,7 +179,7 @@ describe('WorkItemLinks', () => { expect(findWorkItemLinkChildItems()).toHaveLength(4); }); - it('shows alert when list loading fails', async () => { + it('shows an alert when list loading fails', async () => { const errorMessage = 'Some error'; await createComponent({ fetchHandler: jest.fn().mockRejectedValue(new Error(errorMessage)), diff --git a/spec/frontend/work_items/components/work_item_notes_spec.js b/spec/frontend/work_items/components/work_item_notes_spec.js index a067923b9fc..3cc6a9813fc 100644 --- a/spec/frontend/work_items/components/work_item_notes_spec.js +++ b/spec/frontend/work_items/components/work_item_notes_spec.js @@ -18,6 +18,7 @@ import workItemNoteUpdatedSubscription from '~/work_items/graphql/notes/work_ite import workItemNoteDeletedSubscription from '~/work_items/graphql/notes/work_item_note_deleted.subscription.graphql'; import { DEFAULT_PAGE_SIZE_NOTES, WIDGET_TYPE_NOTES } from '~/work_items/constants'; import { ASC, DESC } from '~/notes/constants'; +import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils'; import { mockWorkItemNotesResponse, workItemQueryResponse, @@ -30,6 +31,7 @@ import { } from '../mock_data'; const mockWorkItemId = workItemQueryResponse.data.workItem.id; +const mockWorkItemIid = workItemQueryResponse.data.workItem.iid; const mockNotesWidgetResponse = mockWorkItemNotesResponse.data.workItem.widgets.find( (widget) => widget.type === WIDGET_TYPE_NOTES, ); @@ -92,6 +94,7 @@ describe('WorkItemNotes component', () => { const createComponent = ({ workItemId = mockWorkItemId, fetchByIid = false, + workItemIid = mockWorkItemIid, defaultWorkItemNotesQueryHandler = workItemNotesQueryHandler, deleteWINoteMutationHandler = deleteWorkItemNoteMutationSuccessHandler, } = {}) => { @@ -106,6 +109,7 @@ describe('WorkItemNotes component', () => { ]), propsData: { workItemId, + workItemIid, queryVariables: { id: workItemId, }, @@ -119,7 +123,7 @@ describe('WorkItemNotes component', () => { }); }; - beforeEach(async () => { + beforeEach(() => { createComponent(); }); @@ -258,9 +262,11 @@ describe('WorkItemNotes component', () => { const commentIndex = 0; const firstCommentNote = findWorkItemCommentNoteAtIndex(commentIndex); - expect(firstCommentNote.props('discussion')).toEqual( - mockDiscussions[commentIndex].notes.nodes, - ); + expect(firstCommentNote.props()).toMatchObject({ + discussion: mockDiscussions[commentIndex].notes.nodes, + autocompleteDataSources: autocompleteDataSources('test-path', mockWorkItemIid), + markdownPreviewPath: markdownPreviewPath('test-path', mockWorkItemIid), + }); }); }); diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js index fecf98b2651..c3376556d6e 100644 --- a/spec/frontend/work_items/mock_data.js +++ b/spec/frontend/work_items/mock_data.js @@ -82,6 +82,7 @@ export const workItemQueryResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, widgets: [ @@ -183,6 +184,7 @@ export const updateWorkItemMutationResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, widgets: [ @@ -286,6 +288,8 @@ export const objectiveType = { export const workItemResponseFactory = ({ canUpdate = false, canDelete = false, + notificationsWidgetPresent = true, + subscribed = true, allowsMultipleAssignees = true, assigneesWidgetPresent = true, datesWidgetPresent = true, @@ -313,7 +317,7 @@ export const workItemResponseFactory = ({ workItem: { __typename: 'WorkItem', id: 'gid://gitlab/WorkItem/1', - iid: 1, + iid: '1', title: 'Updated title', state: 'OPEN', description: 'description', @@ -332,6 +336,7 @@ export const workItemResponseFactory = ({ userPermissions: { deleteWorkItem: canDelete, updateWorkItem: canUpdate, + setWorkItemMetadata: canUpdate, __typename: 'WorkItemPermissions', }, widgets: [ @@ -469,6 +474,13 @@ export const workItemResponseFactory = ({ type: 'NOTES', } : { type: 'MOCK TYPE' }, + notificationsWidgetPresent + ? { + __typename: 'WorkItemWidgetNotifications', + type: 'NOTIFICATIONS', + subscribed, + } + : { type: 'MOCK TYPE' }, ], }, }, @@ -542,6 +554,7 @@ export const createWorkItemMutationResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, widgets: [], @@ -591,6 +604,7 @@ export const createWorkItemFromTaskMutationResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, widgets: [ @@ -632,6 +646,7 @@ export const createWorkItemFromTaskMutationResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, widgets: [], @@ -834,7 +849,7 @@ export const workItemHierarchyEmptyResponse = { data: { workItem: { id: 'gid://gitlab/WorkItem/1', - iid: 1, + iid: '1', state: 'OPEN', workItemType: { id: 'gid://gitlab/WorkItems::Type/1', @@ -857,6 +872,7 @@ export const workItemHierarchyEmptyResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, confidential: false, @@ -881,7 +897,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = { data: { workItem: { id: 'gid://gitlab/WorkItem/1', - iid: 1, + iid: '1', state: 'OPEN', workItemType: { id: 'gid://gitlab/WorkItems::Type/6', @@ -898,6 +914,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = { userPermissions: { deleteWorkItem: false, updateWorkItem: false, + setWorkItemMetadata: false, __typename: 'WorkItemPermissions', }, project: { @@ -1039,6 +1056,7 @@ export const workItemHierarchyResponse = { userPermissions: { deleteWorkItem: true, updateWorkItem: true, + setWorkItemMetadata: true, __typename: 'WorkItemPermissions', }, author: { @@ -1128,6 +1146,7 @@ export const workItemObjectiveWithChild = { userPermissions: { deleteWorkItem: true, updateWorkItem: true, + setWorkItemMetadata: true, __typename: 'WorkItemPermissions', }, author: { @@ -1195,6 +1214,7 @@ export const workItemHierarchyTreeResponse = { userPermissions: { deleteWorkItem: true, updateWorkItem: true, + setWorkItemMetadata: true, __typename: 'WorkItemPermissions', }, confidential: false, @@ -1258,6 +1278,68 @@ export const workItemHierarchyTreeFailureResponse = { ], }; +export const changeIndirectWorkItemParentMutationResponse = { + data: { + workItemUpdate: { + workItem: { + __typename: 'WorkItem', + workItemType: { + id: 'gid://gitlab/WorkItems::Type/2411', + name: 'Objective', + iconName: 'issue-type-objective', + __typename: 'WorkItemType', + }, + userPermissions: { + deleteWorkItem: true, + updateWorkItem: true, + setWorkItemMetadata: true, + __typename: 'WorkItemPermissions', + }, + description: null, + id: 'gid://gitlab/WorkItem/13', + iid: '13', + state: 'OPEN', + title: 'Objective 2', + confidential: false, + createdAt: '2022-08-03T12:41:54Z', + updatedAt: null, + closedAt: null, + author: { + ...mockAssignees[0], + }, + project: { + __typename: 'Project', + id: '1', + fullPath: 'test-project-path', + archived: false, + }, + widgets: [ + { + __typename: 'WorkItemWidgetHierarchy', + type: 'HIERARCHY', + parent: null, + hasChildren: false, + children: { + nodes: [], + }, + }, + ], + }, + errors: [], + __typename: 'WorkItemUpdatePayload', + }, + }, +}; + +export const workItemUpdateFailureResponse = { + data: {}, + errors: [ + { + message: 'Something went wrong', + }, + ], +}; + export const changeWorkItemParentMutationResponse = { data: { workItemUpdate: { @@ -1272,6 +1354,7 @@ export const changeWorkItemParentMutationResponse = { userPermissions: { deleteWorkItem: true, updateWorkItem: true, + setWorkItemMetadata: true, __typename: 'WorkItemPermissions', }, description: null, diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js index 37326910e13..c480affe484 100644 --- a/spec/frontend/work_items/pages/work_item_root_spec.js +++ b/spec/frontend/work_items/pages/work_item_root_spec.js @@ -75,7 +75,7 @@ describe('Work items root component', () => { expect(visitUrl).toHaveBeenCalledWith(issuesListPath); }); - it('shows alert if delete fails', async () => { + it('shows an alert if delete fails', async () => { const deleteWorkItemHandler = jest.fn().mockRejectedValue(deleteWorkItemFailureResponse); createComponent({ diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js index 5dad7f7c43f..bd75c5be6f1 100644 --- a/spec/frontend/work_items/router_spec.js +++ b/spec/frontend/work_items/router_spec.js @@ -13,7 +13,7 @@ import { } from 'jest/work_items/mock_data'; import App from '~/work_items/components/app.vue'; import workItemQuery from '~/work_items/graphql/work_item.query.graphql'; -import workItemDatesSubscription from '~/work_items/graphql/work_item_dates.subscription.graphql'; +import workItemDatesSubscription from '~/graphql_shared/subscriptions/work_item_dates.subscription.graphql'; import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql'; import workItemAssigneesSubscription from '~/work_items/graphql/work_item_assignees.subscription.graphql'; import workItemLabelsSubscription from 'ee_else_ce/work_items/graphql/work_item_labels.subscription.graphql'; diff --git a/spec/frontend_integration/content_editor/content_editor_integration_spec.js b/spec/frontend_integration/content_editor/content_editor_integration_spec.js index a80c4db19b5..b3997fdb676 100644 --- a/spec/frontend_integration/content_editor/content_editor_integration_spec.js +++ b/spec/frontend_integration/content_editor/content_editor_integration_spec.js @@ -66,7 +66,7 @@ describe('content_editor', () => { expect(wrapper.findByTestId('content-editor-loading-indicator').exists()).toBe(false); }); - it('displays the initial content', async () => { + it('displays the initial content', () => { expect(wrapper.html()).toContain(initialContent); }); }); diff --git a/spec/frontend_integration/ide/user_opens_ide_spec.js b/spec/frontend_integration/ide/user_opens_ide_spec.js index d4656b1098e..2f89b3c0612 100644 --- a/spec/frontend_integration/ide/user_opens_ide_spec.js +++ b/spec/frontend_integration/ide/user_opens_ide_spec.js @@ -23,7 +23,7 @@ describe('IDE: User opens IDE', () => { resetHTMLFixture(); }); - it('shows loading indicator while the IDE is loading', async () => { + it('shows loading indicator while the IDE is loading', () => { vm = startWebIDE(container); expect(container.querySelectorAll('.multi-file-loading-container')).toHaveLength(3); @@ -52,7 +52,7 @@ describe('IDE: User opens IDE', () => { await screen.findByText('README'); // wait for file tree to load }); - it('shows a list of files in the left sidebar', async () => { + it('shows a list of files in the left sidebar', () => { expect(ideHelper.getFilesList()).toEqual( expect.arrayContaining(['README', 'LICENSE', 'CONTRIBUTING.md']), ); diff --git a/spec/frontend_integration/snippets/snippets_notes_spec.js b/spec/frontend_integration/snippets/snippets_notes_spec.js index 5e9eaa1aada..27be7793ce6 100644 --- a/spec/frontend_integration/snippets/snippets_notes_spec.js +++ b/spec/frontend_integration/snippets/snippets_notes_spec.js @@ -1,12 +1,13 @@ import $ from 'jquery'; +import htmlSnippetsShow from 'test_fixtures/snippets/show.html'; import axios from '~/lib/utils/axios_utils'; import initGFMInput from '~/behaviors/markdown/gfm_auto_complete'; import initDeprecatedNotes from '~/init_deprecated_notes'; -import { loadHTMLFixture } from 'helpers/fixtures'; +import { setHTMLFixture } from 'helpers/fixtures'; describe('Integration Snippets notes', () => { - beforeEach(async () => { - loadHTMLFixture('snippets/show.html'); + beforeEach(() => { + setHTMLFixture(htmlSnippetsShow); // Check if we have to Load GFM Input const $gfmInputs = $('.js-gfm-input:not(.js-gfm-input-initialized)'); diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb index 00b5aec366e..a8a37289ddd 100644 --- a/spec/graphql/graphql_triggers_spec.rb +++ b/spec/graphql/graphql_triggers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe GraphqlTriggers do +RSpec.describe GraphqlTriggers, feature_category: :shared do let_it_be(:issuable, refind: true) { create(:work_item) } describe '.issuable_assignees_updated' do @@ -12,9 +12,9 @@ RSpec.describe GraphqlTriggers do issuable.update!(assignees: assignees) end - it 'triggers the issuableAssigneesUpdated subscription' do + it 'triggers the issuable_assignees_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableAssigneesUpdated', + :issuable_assignees_updated, { issuable_id: issuable.to_gid }, issuable ) @@ -24,9 +24,9 @@ RSpec.describe GraphqlTriggers do end describe '.issuable_title_updated' do - it 'triggers the issuableTitleUpdated subscription' do + it 'triggers the issuable_title_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableTitleUpdated', + :issuable_title_updated, { issuable_id: issuable.to_gid }, issuable ).and_call_original @@ -36,9 +36,9 @@ RSpec.describe GraphqlTriggers do end describe '.issuable_description_updated' do - it 'triggers the issuableDescriptionUpdated subscription' do + it 'triggers the issuable_description_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableDescriptionUpdated', + :issuable_description_updated, { issuable_id: issuable.to_gid }, issuable ).and_call_original @@ -54,9 +54,9 @@ RSpec.describe GraphqlTriggers do issuable.update!(labels: labels) end - it 'triggers the issuableLabelsUpdated subscription' do + it 'triggers the issuable_labels_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableLabelsUpdated', + :issuable_labels_updated, { issuable_id: issuable.to_gid }, issuable ) @@ -66,9 +66,9 @@ RSpec.describe GraphqlTriggers do end describe '.issuable_dates_updated' do - it 'triggers the issuableDatesUpdated subscription' do + it 'triggers the issuable_dates_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableDatesUpdated', + :issuable_dates_updated, { issuable_id: issuable.to_gid }, issuable ).and_call_original @@ -78,9 +78,9 @@ RSpec.describe GraphqlTriggers do end describe '.issuable_milestone_updated' do - it 'triggers the issuableMilestoneUpdated subscription' do + it 'triggers the issuable_milestone_updated subscription' do expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'issuableMilestoneUpdated', + :issuable_milestone_updated, { issuable_id: issuable.to_gid }, issuable ).and_call_original @@ -90,11 +90,11 @@ RSpec.describe GraphqlTriggers do end describe '.merge_request_reviewers_updated' do - it 'triggers the mergeRequestReviewersUpdated subscription' do + it 'triggers the merge_request_reviewers_updated subscription' do merge_request = build_stubbed(:merge_request) expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'mergeRequestReviewersUpdated', + :merge_request_reviewers_updated, { issuable_id: merge_request.to_gid }, merge_request ).and_call_original @@ -104,25 +104,39 @@ RSpec.describe GraphqlTriggers do end describe '.merge_request_merge_status_updated' do - it 'triggers the mergeRequestMergeStatusUpdated subscription' do + it 'triggers the merge_request_merge_status_updated subscription' do merge_request = build_stubbed(:merge_request) expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'mergeRequestMergeStatusUpdated', + :merge_request_merge_status_updated, { issuable_id: merge_request.to_gid }, merge_request ).and_call_original GraphqlTriggers.merge_request_merge_status_updated(merge_request) end + + context 'when realtime_mr_status_change feature flag is disabled' do + before do + stub_feature_flags(realtime_mr_status_change: false) + end + + it 'does not trigger realtime_mr_status_change subscription' do + merge_request = build_stubbed(:merge_request) + + expect(GitlabSchema.subscriptions).not_to receive(:trigger) + + GraphqlTriggers.merge_request_merge_status_updated(merge_request) + end + end end describe '.merge_request_approval_state_updated' do - it 'triggers the mergeRequestApprovalStateUpdated subscription' do + it 'triggers the merge_request_approval_state_updated subscription' do merge_request = build_stubbed(:merge_request) expect(GitlabSchema.subscriptions).to receive(:trigger).with( - 'mergeRequestApprovalStateUpdated', + :merge_request_approval_state_updated, { issuable_id: merge_request.to_gid }, merge_request ).and_call_original diff --git a/spec/graphql/mutations/achievements/delete_spec.rb b/spec/graphql/mutations/achievements/delete_spec.rb new file mode 100644 index 00000000000..0eb6f5a2e6f --- /dev/null +++ b/spec/graphql/mutations/achievements/delete_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::Achievements::Delete, feature_category: :user_profile do + include GraphqlHelpers + + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:recipient) { create(:user) } + let_it_be(:group) { create(:group) } + + let(:achievement) { create(:achievement, namespace: group) } + + describe '#resolve' do + subject(:resolve_mutation) do + described_class.new(object: nil, context: { current_user: current_user }, field: nil).resolve( + achievement_id: achievement&.to_global_id + ) + end + + before_all do + group.add_developer(developer) + group.add_maintainer(maintainer) + end + + context 'when the user does not have permission' do + let(:current_user) { developer } + + it 'raises an error' do + expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) + .with_message(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR) + end + end + + context 'when the user has permission' do + let(:current_user) { maintainer } + + context 'when the params are invalid' do + let(:achievement) { nil } + + it 'returns the validation error' do + expect { resolve_mutation }.to raise_error { Gitlab::Graphql::Errors::ArgumentError } + end + end + + it 'deletes the achievement' do + resolve_mutation + + expect(Achievements::Achievement.find_by(id: achievement.id)).to be_nil + end + end + end + + specify { expect(described_class).to require_graphql_authorizations(:admin_achievement) } +end diff --git a/spec/graphql/mutations/achievements/update_spec.rb b/spec/graphql/mutations/achievements/update_spec.rb new file mode 100644 index 00000000000..b69c8bef478 --- /dev/null +++ b/spec/graphql/mutations/achievements/update_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::Achievements::Update, feature_category: :user_profile do + include GraphqlHelpers + + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:recipient) { create(:user) } + let_it_be(:group) { create(:group) } + + let(:achievement) { create(:achievement, namespace: group) } + let(:name) { 'Hero' } + + describe '#resolve' do + subject(:resolve_mutation) do + described_class.new(object: nil, context: { current_user: current_user }, field: nil).resolve( + achievement_id: achievement&.to_global_id, name: name + ) + end + + before_all do + group.add_developer(developer) + group.add_maintainer(maintainer) + end + + context 'when the user does not have permission' do + let(:current_user) { developer } + + it 'raises an error' do + expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) + .with_message(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR) + end + end + + context 'when the user has permission' do + let(:current_user) { maintainer } + + context 'when the params are invalid' do + let(:achievement) { nil } + + it 'returns the validation error' do + expect { resolve_mutation }.to raise_error { Gitlab::Graphql::Errors::ArgumentError } + end + end + + it 'updates the achievement' do + resolve_mutation + + expect(Achievements::Achievement.find_by(id: achievement.id).name).to eq(name) + end + end + end + + specify { expect(described_class).to require_graphql_authorizations(:admin_achievement) } +end diff --git a/spec/graphql/mutations/concerns/mutations/finds_by_gid_spec.rb b/spec/graphql/mutations/concerns/mutations/finds_by_gid_spec.rb deleted file mode 100644 index 451f6d1fe06..00000000000 --- a/spec/graphql/mutations/concerns/mutations/finds_by_gid_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Mutations::FindsByGid do - include GraphqlHelpers - - let(:mutation_class) do - Class.new(Mutations::BaseMutation) do - authorize :read_user - - include Mutations::FindsByGid - end - end - - let(:query) { query_double(schema: GitlabSchema) } - let(:context) { GraphQL::Query::Context.new(query: query, object: nil, values: { current_user: user }) } - let(:user) { create(:user) } - let(:gid) { user.to_global_id } - - subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) } - - it 'calls GitlabSchema.find_by_gid to find objects during authorized_find!' do - expect(mutation.authorized_find!(id: gid)).to eq(user) - end -end diff --git a/spec/graphql/mutations/container_repositories/destroy_spec.rb b/spec/graphql/mutations/container_repositories/destroy_spec.rb index 50e83ccdd30..85e0ac96e55 100644 --- a/spec/graphql/mutations/container_repositories/destroy_spec.rb +++ b/spec/graphql/mutations/container_repositories/destroy_spec.rb @@ -25,7 +25,7 @@ RSpec.describe Mutations::ContainerRepositories::Destroy do .to receive(:new).with(nil, user, event_name: :delete_repository, scope: :container).and_call_original expect(DeleteContainerRepositoryWorker).not_to receive(:perform_async) - expect { subject }.to change { ::Packages::Event.count }.by(1) + subject expect(container_repository.reload.delete_scheduled?).to be true end end diff --git a/spec/graphql/mutations/container_repositories/destroy_tags_spec.rb b/spec/graphql/mutations/container_repositories/destroy_tags_spec.rb index 3e5f28ee244..96dd1754155 100644 --- a/spec/graphql/mutations/container_repositories/destroy_tags_spec.rb +++ b/spec/graphql/mutations/container_repositories/destroy_tags_spec.rb @@ -39,7 +39,7 @@ RSpec.describe Mutations::ContainerRepositories::DestroyTags do it 'creates a package event' do expect(::Packages::CreateEventService) .to receive(:new).with(nil, user, event_name: :delete_tag_bulk, scope: :tag).and_call_original - expect { subject }.to change { ::Packages::Event.count }.by(1) + subject end end @@ -87,7 +87,7 @@ RSpec.describe Mutations::ContainerRepositories::DestroyTags do it 'does not create a package event' do expect(::Packages::CreateEventService).not_to receive(:new) - expect { subject }.not_to change { ::Packages::Event.count } + subject end end end diff --git a/spec/graphql/mutations/work_items/update_spec.rb b/spec/graphql/mutations/work_items/update_spec.rb new file mode 100644 index 00000000000..3acb06346a4 --- /dev/null +++ b/spec/graphql/mutations/work_items/update_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::WorkItems::Update, feature_category: :portfolio_management do + let_it_be(:project) { create(:project) } + let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } } + let_it_be(:current_work_item) { create(:work_item, :task, project: project) } + let_it_be(:parent_work_item) { create(:work_item, project: project) } + + subject(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) } + + describe '#ready?' do + let(:current_user) { developer } + let(:current_gid) { current_work_item.to_gid.to_s } + let(:parent_gid) { parent_work_item.to_gid.to_s } + let(:valid_arguments) { { id: current_gid, parent_id: parent_gid } } + + it { is_expected.to be_ready(**valid_arguments) } + end +end diff --git a/spec/graphql/resolvers/achievements/achievements_resolver_spec.rb b/spec/graphql/resolvers/achievements/achievements_resolver_spec.rb index 666610dca33..a70c89aa7c7 100644 --- a/spec/graphql/resolvers/achievements/achievements_resolver_spec.rb +++ b/spec/graphql/resolvers/achievements/achievements_resolver_spec.rb @@ -6,15 +6,25 @@ RSpec.describe Resolvers::Achievements::AchievementsResolver, feature_category: include GraphqlHelpers let_it_be(:group) { create(:group, :public) } - let_it_be(:achievement) { create(:achievement, namespace: group) } + let_it_be(:achievements) { create_list(:achievement, 3, namespace: group) } + + let(:args) { {} } specify do expect(described_class).to have_nullable_graphql_type(Types::Achievements::AchievementType.connection_type) end describe '#resolve' do - it 'is not empty' do - expect(resolve_achievements).not_to be_empty + it 'returns all achievements' do + expect(resolve_achievements.items).to match_array(achievements) + end + + context 'with ids argument' do + let(:args) { { ids: [achievements[0].to_global_id, achievements[1].to_global_id] } } + + it 'returns the specified achievement' do + expect(resolve_achievements.items).to contain_exactly(achievements[0], achievements[1]) + end end context 'when `achievements` feature flag is diabled' do @@ -29,6 +39,6 @@ RSpec.describe Resolvers::Achievements::AchievementsResolver, feature_category: end def resolve_achievements - resolve(described_class, obj: group) + resolve(described_class, args: args, obj: group) end end diff --git a/spec/graphql/resolvers/blobs_resolver_spec.rb b/spec/graphql/resolvers/blobs_resolver_spec.rb index a666ed2a9fc..26eb6dc0abe 100644 --- a/spec/graphql/resolvers/blobs_resolver_spec.rb +++ b/spec/graphql/resolvers/blobs_resolver_spec.rb @@ -71,6 +71,14 @@ RSpec.describe Resolvers::BlobsResolver do end end + context 'when specifying HEAD ref' do + let(:ref) { 'HEAD' } + + it 'returns the specified blobs for HEAD' do + is_expected.to contain_exactly(have_attributes(path: 'README.md')) + end + end + context 'when specifying an invalid ref' do let(:ref) { 'ma:in' } diff --git a/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb index 5c632ed3443..fddc73fadfe 100644 --- a/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Resolvers::Ci::AllJobsResolver do +RSpec.describe Resolvers::Ci::AllJobsResolver, feature_category: :continuous_integration do include GraphqlHelpers let_it_be(:successful_job) { create(:ci_build, :success, name: 'Job One') } diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb index 581652a8cea..1e9559b738b 100644 --- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Resolvers::Ci::JobsResolver do +RSpec.describe Resolvers::Ci::JobsResolver, feature_category: :continuous_integration do include GraphqlHelpers let_it_be(:project) { create(:project, :repository, :public) } diff --git a/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb index 6c69cdc19cc..44203fb2912 100644 --- a/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/runner_projects_resolver_spec.rb @@ -27,6 +27,28 @@ RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :runner_ end end + context 'with sort argument' do + let(:args) { { sort: sort } } + + context 'when :id_asc' do + let(:sort) { :id_asc } + + it 'returns a lazy value with projects sorted by :id_asc' do + expect(subject).to be_a(GraphQL::Execution::Lazy) + expect(subject.value.items).to eq([project1, project2, project3]) + end + end + + context 'when :id_desc' do + let(:sort) { :id_desc } + + it 'returns a lazy value with projects sorted by :id_desc' do + expect(subject).to be_a(GraphQL::Execution::Lazy) + expect(subject.value.items).to eq([project3, project2, project1]) + end + end + end + context 'with supported arguments' do let(:args) { { membership: true, search_namespaces: true, topics: %w[xyz] } } @@ -47,9 +69,9 @@ RSpec.describe Resolvers::Ci::RunnerProjectsResolver, feature_category: :runner_ end context 'without arguments' do - it 'returns a lazy value with all projects' do + it 'returns a lazy value with all projects sorted by :id_asc' do expect(subject).to be_a(GraphQL::Execution::Lazy) - expect(subject.value).to contain_exactly(project1, project2, project3) + expect(subject.value.items).to eq([project1, project2, project3]) end end end diff --git a/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb index 2bea256856d..49163d9fa80 100644 --- a/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/runner_status_resolver_spec.rb @@ -17,10 +17,22 @@ RSpec.describe Resolvers::Ci::RunnerStatusResolver, feature_category: :runner_fl { legacy_mode: '14.5' } end - it 'calls runner.status with specified legacy_mode' do - expect(runner).to receive(:status).with('14.5').once.and_return(:online) + it 'calls runner.status with nil' do + expect(runner).to receive(:status).with(nil).once.and_return(:stale) + + expect(resolve_subject).to eq(:stale) + end + + context 'when disable_runner_graphql_legacy_mode feature is disabled' do + before do + stub_feature_flags(disable_runner_graphql_legacy_mode: false) + end + + it 'calls runner.status with specified legacy_mode' do + expect(runner).to receive(:status).with('14.5').once.and_return(:online) - expect(resolve_subject).to eq(:online) + expect(resolve_subject).to eq(:online) + end end end @@ -29,7 +41,7 @@ RSpec.describe Resolvers::Ci::RunnerStatusResolver, feature_category: :runner_fl { legacy_mode: nil } end - it 'calls runner.status with specified legacy_mode' do + it 'calls runner.status with nil' do expect(runner).to receive(:status).with(nil).once.and_return(:stale) expect(resolve_subject).to eq(:stale) diff --git a/spec/graphql/resolvers/ci/variables_resolver_spec.rb b/spec/graphql/resolvers/ci/variables_resolver_spec.rb index 1bfc63df71d..42227df1fe5 100644 --- a/spec/graphql/resolvers/ci/variables_resolver_spec.rb +++ b/spec/graphql/resolvers/ci/variables_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Resolvers::Ci::VariablesResolver, feature_category: :pipeline_composition do +RSpec.describe Resolvers::Ci::VariablesResolver, feature_category: :secrets_management do include GraphqlHelpers describe '#resolve' do diff --git a/spec/graphql/resolvers/data_transfer/group_data_transfer_resolver_spec.rb b/spec/graphql/resolvers/data_transfer/group_data_transfer_resolver_spec.rb new file mode 100644 index 00000000000..4ea3d287454 --- /dev/null +++ b/spec/graphql/resolvers/data_transfer/group_data_transfer_resolver_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::DataTransfer::GroupDataTransferResolver, feature_category: :source_code_management do + include GraphqlHelpers + + let_it_be(:group) { create(:group) } + let_it_be(:current_user) { create(:user) } + + let(:from) { Date.new(2022, 1, 1) } + let(:to) { Date.new(2023, 1, 1) } + let(:finder_results) do + [ + build(:project_data_transfer, date: to, repository_egress: 250000) + ] + end + + context 'with anonymous access' do + let_it_be(:current_user) { nil } + + it 'does not raise an error and returns no data' do + expect { resolve_egress }.not_to raise_error + expect(resolve_egress).to be_nil + end + end + + context 'with authorized user but without enough permissions' do + it 'does not raise an error and returns no data' do + group.add_developer(current_user) + + expect { resolve_egress }.not_to raise_error + expect(resolve_egress).to be_nil + end + end + + context 'when user has permissions to see data transfer' do + before do + group.add_owner(current_user) + end + + include_examples 'Data transfer resolver' + + context 'when data_transfer_monitoring_mock_data is disabled' do + let(:finder) { instance_double(::DataTransfer::GroupDataTransferFinder) } + + before do + stub_feature_flags(data_transfer_monitoring_mock_data: false) + end + + it 'calls GroupDataTransferFinder with expected arguments' do + expect(::DataTransfer::GroupDataTransferFinder).to receive(:new).with( + group: group, from: from, to: to, user: current_user + ).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return(finder_results) + + expect(resolve_egress).to eq({ egress_nodes: finder_results.map(&:attributes) }) + end + end + end + + def resolve_egress + resolve(described_class, obj: group, args: { from: from, to: to }, ctx: { current_user: current_user }) + end +end diff --git a/spec/graphql/resolvers/data_transfer/project_data_transfer_resolver_spec.rb b/spec/graphql/resolvers/data_transfer/project_data_transfer_resolver_spec.rb new file mode 100644 index 00000000000..7307c1a54a9 --- /dev/null +++ b/spec/graphql/resolvers/data_transfer/project_data_transfer_resolver_spec.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Resolvers::DataTransfer::ProjectDataTransferResolver, feature_category: :source_code_management do + include GraphqlHelpers + + let_it_be(:project) { create(:project) } + let_it_be(:current_user) { create(:user) } + + let(:from) { Date.new(2022, 1, 1) } + let(:to) { Date.new(2023, 1, 1) } + let(:finder_results) do + [ + { + date: to, + repository_egress: 250000 + } + ] + end + + context 'with anonymous access' do + let_it_be(:current_user) { nil } + + it 'does not raise an error and returns no data' do + expect { resolve_egress }.not_to raise_error + expect(resolve_egress).to be_nil + end + end + + context 'with authorized user but without enough permissions' do + it 'does not raise an error and returns no data' do + project.add_developer(current_user) + + expect { resolve_egress }.not_to raise_error + expect(resolve_egress).to be_nil + end + end + + context 'when user has permissions to see data transfer' do + before do + project.add_owner(current_user) + end + + include_examples 'Data transfer resolver' + + context 'when data_transfer_monitoring_mock_data is disabled' do + let(:finder) { instance_double(::DataTransfer::ProjectDataTransferFinder) } + + before do + stub_feature_flags(data_transfer_monitoring_mock_data: false) + end + + it 'calls ProjectDataTransferFinder with expected arguments' do + expect(::DataTransfer::ProjectDataTransferFinder).to receive(:new).with( + project: project, from: from, to: to, user: current_user + ).once.and_return(finder) + allow(finder).to receive(:execute).once.and_return(finder_results) + + expect(resolve_egress).to eq({ egress_nodes: finder_results }) + end + end + end + + def resolve_egress + resolve(described_class, obj: project, args: { from: from, to: to }, ctx: { current_user: current_user }) + end +end diff --git a/spec/graphql/resolvers/data_transfer_resolver_spec.rb b/spec/graphql/resolvers/data_transfer_resolver_spec.rb deleted file mode 100644 index f5a088dc1c3..00000000000 --- a/spec/graphql/resolvers/data_transfer_resolver_spec.rb +++ /dev/null @@ -1,31 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Resolvers::DataTransferResolver, feature_category: :source_code_management do - include GraphqlHelpers - - describe '.source' do - context 'with base DataTransferResolver' do - it 'raises NotImplementedError' do - expect { described_class.source }.to raise_error ::NotImplementedError - end - end - - context 'with projects DataTransferResolver' do - let(:source) { described_class.project.source } - - it 'outputs "Project"' do - expect(source).to eq 'Project' - end - end - - context 'with groups DataTransferResolver' do - let(:source) { described_class.group.source } - - it 'outputs "Group"' do - expect(source).to eq 'Group' - end - end - end -end diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb index 71290885e6b..b0129cc3d98 100644 --- a/spec/graphql/resolvers/group_labels_resolver_spec.rb +++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb @@ -48,6 +48,67 @@ RSpec.describe Resolvers::GroupLabelsResolver do end end + describe 'association preloading', :saas do + let(:params) do + { + include_ancestor_groups: true, + include_descendant_groups: true, + only_group_labels: false + } + end + + before do + group.add_developer(current_user) + + stub_feature_flags(preload_max_access_levels_for_labels_finder: flag_enabled) + + # warmup + resolve_labels(group, params).to_a + end + + context 'when the preload_max_access_levels_for_labels_finder FF is on' do + let(:flag_enabled) { true } + + it 'prevents N+1 queries' do + control = Gitlab::WithRequestStore.with_request_store do + ActiveRecord::QueryRecorder.new { resolve_labels(group, params).to_a } + end + + another_project = create(:project, :private, group: sub_subgroup) + another_subgroup = create(:group, :private, parent: group) + create(:label, project: another_project, name: 'another project feature') + create(:group_label, group: another_subgroup, name: 'another group feature') + + expect do + Gitlab::WithRequestStore.with_request_store do + resolve_labels(group, params).to_a + end + end.not_to exceed_query_limit(control.count) + end + end + + context 'when the preload_max_access_levels_for_labels_finder FF is off' do + let(:flag_enabled) { false } + + it 'creates N+1 queries' do + control = Gitlab::WithRequestStore.with_request_store do + ActiveRecord::QueryRecorder.new { resolve_labels(group, params).to_a } + end + + another_project = create(:project, :private, group: sub_subgroup) + another_subgroup = create(:group, :private, parent: group) + create(:label, project: another_project, name: 'another project feature') + create(:group_label, group: another_subgroup, name: 'another group feature') + + expect do + Gitlab::WithRequestStore.with_request_store do + resolve_labels(group, params).to_a + end + end.to exceed_query_limit(control.count) + end + end + end + context 'at group level' do before_all do group.add_developer(current_user) diff --git a/spec/graphql/resolvers/labels_resolver_spec.rb b/spec/graphql/resolvers/labels_resolver_spec.rb index efd2596b9eb..99955bda405 100644 --- a/spec/graphql/resolvers/labels_resolver_spec.rb +++ b/spec/graphql/resolvers/labels_resolver_spec.rb @@ -48,6 +48,66 @@ RSpec.describe Resolvers::LabelsResolver do end end + describe 'association preloading' do + let_it_be(:project) { create(:project, :private, group: sub_subgroup) } + + let(:params) do + { + include_ancestor_groups: true + } + end + + before do + group.add_developer(current_user) + + stub_feature_flags(preload_max_access_levels_for_labels_finder: flag_enabled) + + # warmup + resolve_labels(project, params).to_a + end + + context 'when the preload_max_access_levels_for_labels_finder FF is on' do + let(:flag_enabled) { true } + + it 'prevents N+1 queries' do + control = Gitlab::WithRequestStore.with_request_store do + ActiveRecord::QueryRecorder.new { resolve_labels(project, params).to_a } + end + + another_project = create(:project, :private, group: subgroup) + another_subgroup = create(:group, :private, parent: group) + create(:label, project: another_project, name: 'another project feature') + create(:group_label, group: another_subgroup, name: 'another group feature') + + expect do + Gitlab::WithRequestStore.with_request_store do + resolve_labels(project, params).to_a + end + end.not_to exceed_query_limit(control.count) + end + end + + context 'when the preload_max_access_levels_for_labels_finder FF is off' do + let(:flag_enabled) { false } + + it 'creates N+1 queries' do + control = Gitlab::WithRequestStore.with_request_store do + ActiveRecord::QueryRecorder.new { resolve_labels(project, params).to_a } + end + + another_project = create(:project, :private, group: subgroup) + create(:label, project: another_project, name: 'another project feature') + create(:group_label, group: subgroup, name: 'another group feature') + + expect do + Gitlab::WithRequestStore.with_request_store do + resolve_labels(project, params).to_a + end + end.to exceed_query_limit(control.count) + end + end + end + context 'with a parent project' do before_all do group.add_developer(current_user) diff --git a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb index 9a04b716001..931d4ba132c 100644 --- a/spec/graphql/resolvers/paginated_tree_resolver_spec.rb +++ b/spec/graphql/resolvers/paginated_tree_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Resolvers::PaginatedTreeResolver do +RSpec.describe Resolvers::PaginatedTreeResolver, feature_category: :source_code_management do include GraphqlHelpers let_it_be(:project) { create(:project, :repository) } @@ -61,6 +61,16 @@ RSpec.describe Resolvers::PaginatedTreeResolver do end end + context 'when repository is empty' do + before do + allow(repository).to receive(:empty?).and_return(true) + end + + it 'returns nil' do + is_expected.to be(nil) + end + end + describe 'Cursor pagination' do context 'when cursor is invalid' do let(:args) { super().merge(after: 'invalid') } diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb index cd52308d895..5177873321c 100644 --- a/spec/graphql/resolvers/timelog_resolver_spec.rb +++ b/spec/graphql/resolvers/timelog_resolver_spec.rb @@ -214,7 +214,11 @@ RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do let_it_be(:timelog3) { create(:merge_request_timelog, merge_request: merge_request, user: current_user) } it 'blah' do - expect(timelogs).to contain_exactly(timelog1, timelog3) + if user_found + expect(timelogs).to contain_exactly(timelog1, timelog3) + else + expect(timelogs).to be_empty + end end end @@ -250,16 +254,28 @@ RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do let(:object) { current_user } let(:extra_args) { {} } let(:args) { {} } + let(:user_found) { true } it_behaves_like 'with a user' end context 'with a user filter' do let(:object) { nil } - let(:extra_args) { { username: current_user.username } } let(:args) { {} } - it_behaves_like 'with a user' + context 'when the user has timelogs' do + let(:extra_args) { { username: current_user.username } } + let(:user_found) { true } + + it_behaves_like 'with a user' + end + + context 'when the user doest not have timelogs' do + let(:extra_args) { { username: 'not_existing_user' } } + let(:user_found) { false } + + it_behaves_like 'with a user' + end end context 'when no object or arguments provided' do diff --git a/spec/graphql/types/achievements/user_achievement_type_spec.rb b/spec/graphql/types/achievements/user_achievement_type_spec.rb index 6b1512ff841..b7fe4d815f7 100644 --- a/spec/graphql/types/achievements/user_achievement_type_spec.rb +++ b/spec/graphql/types/achievements/user_achievement_type_spec.rb @@ -20,5 +20,5 @@ RSpec.describe GitlabSchema.types['UserAchievement'], feature_category: :user_pr it { expect(described_class.graphql_name).to eq('UserAchievement') } it { expect(described_class).to have_graphql_fields(fields) } - it { expect(described_class).to require_graphql_authorizations(:read_achievement) } + it { expect(described_class).to require_graphql_authorizations(:read_user_achievement) } end diff --git a/spec/graphql/types/ci/catalog/resource_type_spec.rb b/spec/graphql/types/ci/catalog/resource_type_spec.rb new file mode 100644 index 00000000000..d0bb45a4f1d --- /dev/null +++ b/spec/graphql/types/ci/catalog/resource_type_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_composition do + specify { expect(described_class.graphql_name).to eq('CiCatalogResource') } + + it 'exposes the expected fields' do + expected_fields = %i[ + id + name + description + icon + ] + + expect(described_class).to have_graphql_fields(*expected_fields) + end +end diff --git a/spec/graphql/types/ci/config/include_type_enum_spec.rb b/spec/graphql/types/ci/config/include_type_enum_spec.rb index a88316ae6f2..a75b9018a2e 100644 --- a/spec/graphql/types/ci/config/include_type_enum_spec.rb +++ b/spec/graphql/types/ci/config/include_type_enum_spec.rb @@ -6,6 +6,6 @@ RSpec.describe GitlabSchema.types['CiConfigIncludeType'] do it { expect(described_class.graphql_name).to eq('CiConfigIncludeType') } it 'exposes all the existing include types' do - expect(described_class.values.keys).to match_array(%w[remote local file template]) + expect(described_class.values.keys).to match_array(%w[remote local file template component]) end end diff --git a/spec/graphql/types/ci/job_trace_type_spec.rb b/spec/graphql/types/ci/job_trace_type_spec.rb new file mode 100644 index 00000000000..71803aa9ece --- /dev/null +++ b/spec/graphql/types/ci/job_trace_type_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_integration do + include GraphqlHelpers + + let_it_be(:job) { create(:ci_build) } + + it 'has the correct fields' do + expected_fields = [:html_summary] + + expect(described_class).to have_graphql_fields(*expected_fields) + end + + it 'shows the correct trace contents' do + job.trace.set('BUILD TRACE') + + expect_next_instance_of(Gitlab::Ci::Trace) do |trace| + expect(trace).to receive(:html).with(last_lines: 10).and_call_original + end + + resolved_field = resolve_field(:html_summary, job.trace) + + expect(resolved_field).to eq("BUILD TRACE") + end +end diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb index a761a256899..7715ccdd075 100644 --- a/spec/graphql/types/ci/job_type_spec.rb +++ b/spec/graphql/types/ci/job_type_spec.rb @@ -40,7 +40,7 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do refPath retryable retried - runnerMachine + runnerManager scheduledAt schedulingType shortSha @@ -55,6 +55,7 @@ RSpec.describe Types::Ci::JobType, feature_category: :continuous_integration do playPath canPlayJob scheduled + trace ] expect(described_class).to have_graphql_fields(*expected_fields) diff --git a/spec/graphql/types/ci/runner_machine_type_spec.rb b/spec/graphql/types/ci/runner_machine_type_spec.rb deleted file mode 100644 index 289cc52e27b..00000000000 --- a/spec/graphql/types/ci/runner_machine_type_spec.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe GitlabSchema.types['CiRunnerMachine'], feature_category: :runner_fleet do - specify { expect(described_class.graphql_name).to eq('CiRunnerMachine') } - - specify { expect(described_class).to require_graphql_authorizations(:read_runner_machine) } - - it 'contains attributes related to a runner machine' do - expected_fields = %w[ - architecture_name contacted_at created_at executor_name id ip_address platform_name revision - runner status system_id version - ] - - expect(described_class).to have_graphql_fields(*expected_fields) - end -end diff --git a/spec/graphql/types/ci/runner_manager_type_spec.rb b/spec/graphql/types/ci/runner_manager_type_spec.rb new file mode 100644 index 00000000000..240e1edbf78 --- /dev/null +++ b/spec/graphql/types/ci/runner_manager_type_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['CiRunnerManager'], feature_category: :runner_fleet do + specify { expect(described_class.graphql_name).to eq('CiRunnerManager') } + + specify { expect(described_class).to require_graphql_authorizations(:read_runner_manager) } + + it 'contains attributes related to a runner manager' do + expected_fields = %w[ + architecture_name contacted_at created_at executor_name id ip_address platform_name revision + runner status system_id version + ] + + expect(described_class).to have_graphql_fields(*expected_fields) + end +end diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb index 9e360f44a4f..dc664f281b7 100644 --- a/spec/graphql/types/ci/runner_type_spec.rb +++ b/spec/graphql/types/ci/runner_type_spec.rb @@ -9,11 +9,11 @@ RSpec.describe GitlabSchema.types['CiRunner'], feature_category: :runner do it 'contains attributes related to a runner' do expected_fields = %w[ - id description created_by created_at contacted_at machines maximum_timeout access_level active paused status + id description created_by created_at contacted_at managers maximum_timeout access_level active paused status version short_sha revision locked run_untagged ip_address runner_type tag_list project_count job_count admin_url edit_admin_url register_admin_url user_permissions executor_name architecture_name platform_name maintenance_note maintenance_note_html groups projects jobs token_expires_at - owner_project job_execution_status ephemeral_authentication_token + owner_project job_execution_status ephemeral_authentication_token ephemeral_register_url ] expect(described_class).to include_graphql_fields(*expected_fields) diff --git a/spec/graphql/types/ci/variable_sort_enum_spec.rb b/spec/graphql/types/ci/variable_sort_enum_spec.rb index 0a86597b70d..8bfe6dde915 100644 --- a/spec/graphql/types/ci/variable_sort_enum_spec.rb +++ b/spec/graphql/types/ci/variable_sort_enum_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Types::Ci::VariableSortEnum, feature_category: :pipeline_composition do +RSpec.describe Types::Ci::VariableSortEnum, feature_category: :secrets_management do it 'exposes the available order methods' do expect(described_class.values).to match( 'KEY_ASC' => have_attributes(value: :key_asc), diff --git a/spec/graphql/types/clusters/agent_activity_event_type_spec.rb b/spec/graphql/types/clusters/agent_activity_event_type_spec.rb index cae75485846..f89bd877920 100644 --- a/spec/graphql/types/clusters/agent_activity_event_type_spec.rb +++ b/spec/graphql/types/clusters/agent_activity_event_type_spec.rb @@ -6,6 +6,6 @@ RSpec.describe GitlabSchema.types['ClusterAgentActivityEvent'] do let(:fields) { %i[recorded_at kind level user agent_token] } it { expect(described_class.graphql_name).to eq('ClusterAgentActivityEvent') } - it { expect(described_class).to require_graphql_authorizations(:read_cluster) } + it { expect(described_class).to require_graphql_authorizations(:read_cluster_agent) } it { expect(described_class).to have_graphql_fields(fields) } end diff --git a/spec/graphql/types/clusters/agent_token_type_spec.rb b/spec/graphql/types/clusters/agent_token_type_spec.rb index 1ca6d690c80..e04b33f92f8 100644 --- a/spec/graphql/types/clusters/agent_token_type_spec.rb +++ b/spec/graphql/types/clusters/agent_token_type_spec.rb @@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['ClusterAgentToken'] do it { expect(described_class.graphql_name).to eq('ClusterAgentToken') } - it { expect(described_class).to require_graphql_authorizations(:read_cluster) } + it { expect(described_class).to require_graphql_authorizations(:read_cluster_agent) } it { expect(described_class).to have_graphql_fields(fields) } end diff --git a/spec/graphql/types/clusters/agent_type_spec.rb b/spec/graphql/types/clusters/agent_type_spec.rb index bb1006c55c0..4bae0ea5602 100644 --- a/spec/graphql/types/clusters/agent_type_spec.rb +++ b/spec/graphql/types/clusters/agent_type_spec.rb @@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['ClusterAgent'] do it { expect(described_class.graphql_name).to eq('ClusterAgent') } - it { expect(described_class).to require_graphql_authorizations(:read_cluster) } + it { expect(described_class).to require_graphql_authorizations(:read_cluster_agent) } it { expect(described_class).to include_graphql_fields(*fields) } end diff --git a/spec/graphql/types/data_transfer/project_data_transfer_type_spec.rb b/spec/graphql/types/data_transfer/project_data_transfer_type_spec.rb new file mode 100644 index 00000000000..a93da279b7f --- /dev/null +++ b/spec/graphql/types/data_transfer/project_data_transfer_type_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['ProjectDataTransfer'], feature_category: :source_code_management do + include GraphqlHelpers + + it 'includes the specific fields' do + expect(described_class).to have_graphql_fields( + :total_egress, :egress_nodes) + end + + describe '#total_egress' do + let_it_be(:project) { create(:project) } + let(:from) { Date.new(2022, 1, 1) } + let(:to) { Date.new(2023, 1, 1) } + let(:finder_result) { 40_000_000 } + + it 'returns mock data' do + expect(resolve_field(:total_egress, { from: from, to: to }, extras: { parent: project }, + arg_style: :internal)).to eq(finder_result) + end + + context 'when data_transfer_monitoring_mock_data is disabled' do + let(:relation) { instance_double(ActiveRecord::Relation) } + + before do + allow(relation).to receive(:sum).and_return(10) + stub_feature_flags(data_transfer_monitoring_mock_data: false) + end + + it 'calls sum on active record relation' do + expect(resolve_field(:total_egress, { egress_nodes: relation }, extras: { parent: project }, + arg_style: :internal)).to eq(10) + end + end + end +end diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb index 7c6cf137a1e..87f99878a4d 100644 --- a/spec/graphql/types/issue_type_spec.rb +++ b/spec/graphql/types/issue_type_spec.rb @@ -265,7 +265,10 @@ RSpec.describe GitlabSchema.types['Issue'] do context 'for an incident' do before do - issue.update!(issue_type: Issue.issue_types[:incident]) + issue.update!( + issue_type: Issue.issue_types[:incident], + work_item_type: WorkItems::Type.default_by_type(:incident) + ) end it { is_expected.to be_nil } diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb index 8a4c89fc340..4d4c4d3cade 100644 --- a/spec/graphql/types/merge_request_type_spec.rb +++ b/spec/graphql/types/merge_request_type_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe GitlabSchema.types['MergeRequest'] do +RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_review_workflow do include GraphqlHelpers specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::MergeRequest) } @@ -36,7 +36,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do commit_count current_user_todos conflicts auto_merge_enabled approved_by source_branch_protected squash_on_merge available_auto_merge_strategies has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message - auto_merge_strategy merge_user + auto_merge_strategy merge_user award_emoji ] expect(described_class).to have_graphql_fields(*expected_fields).at_least diff --git a/spec/graphql/types/permission_types/work_item_spec.rb b/spec/graphql/types/permission_types/work_item_spec.rb index db6d78b1538..72e9dad3bea 100644 --- a/spec/graphql/types/permission_types/work_item_spec.rb +++ b/spec/graphql/types/permission_types/work_item_spec.rb @@ -5,7 +5,8 @@ require 'spec_helper' RSpec.describe Types::PermissionTypes::WorkItem do it do expected_permissions = [ - :read_work_item, :update_work_item, :delete_work_item, :admin_work_item + :read_work_item, :update_work_item, :delete_work_item, :admin_work_item, + :admin_parent_link ] expected_permissions.each do |permission| diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb index 0bfca9a290b..80cb0ea67da 100644 --- a/spec/graphql/types/project_type_spec.rb +++ b/spec/graphql/types/project_type_spec.rb @@ -333,6 +333,7 @@ RSpec.describe GitlabSchema.types['Project'] do :target_branches, :state, :draft, + :approved, :labels, :before, :after, @@ -676,8 +677,8 @@ RSpec.describe GitlabSchema.types['Project'] do subject { GitlabSchema.execute(query, context: { current_user: user }).as_json } before do - allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?) { true } - allow(::Gitlab::ServiceDeskEmail).to receive(:address_for_key) { 'address-suffix@example.com' } + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:enabled?) { true } + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:address_for_key) { 'address-suffix@example.com' } end context 'when a user can admin issues' do diff --git a/spec/graphql/types/timelog_type_spec.rb b/spec/graphql/types/timelog_type_spec.rb index 59a0e373c5d..aa05c5ffd94 100644 --- a/spec/graphql/types/timelog_type_spec.rb +++ b/spec/graphql/types/timelog_type_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe GitlabSchema.types['Timelog'], feature_category: :team_planning do - let_it_be(:fields) { %i[id spent_at time_spent user issue merge_request note summary userPermissions] } + let_it_be(:fields) { %i[id spent_at time_spent user issue merge_request note summary userPermissions project] } it { expect(described_class.graphql_name).to eq('Timelog') } it { expect(described_class).to have_graphql_fields(fields) } diff --git a/spec/graphql/types/work_item_type_spec.rb b/spec/graphql/types/work_item_type_spec.rb index 42d56598944..ef7f9c88445 100644 --- a/spec/graphql/types/work_item_type_spec.rb +++ b/spec/graphql/types/work_item_type_spec.rb @@ -18,6 +18,7 @@ RSpec.describe GitlabSchema.types['WorkItem'] do id iid lock_version + namespace project state title title_html diff --git a/spec/graphql/types/work_items/available_export_fields_enum_spec.rb b/spec/graphql/types/work_items/available_export_fields_enum_spec.rb index 5aa51160880..9010aabe3cc 100644 --- a/spec/graphql/types/work_items/available_export_fields_enum_spec.rb +++ b/spec/graphql/types/work_items/available_export_fields_enum_spec.rb @@ -12,6 +12,7 @@ RSpec.describe GitlabSchema.types['AvailableExportFields'], feature_category: :t 'ID' | 'id' 'TYPE' | 'type' 'TITLE' | 'title' + 'DESCRIPTION' | 'description' 'AUTHOR' | 'author' 'AUTHOR_USERNAME' | 'author username' 'CREATED_AT' | 'created_at' diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb index d1dcfb961cb..d955ec5023e 100644 --- a/spec/graphql/types/work_items/widget_interface_spec.rb +++ b/spec/graphql/types/work_items/widget_interface_spec.rb @@ -15,12 +15,14 @@ RSpec.describe Types::WorkItems::WidgetInterface do using RSpec::Parameterized::TableSyntax where(:widget_class, :widget_type_name) do - WorkItems::Widgets::Description | Types::WorkItems::Widgets::DescriptionType - WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType - WorkItems::Widgets::Assignees | Types::WorkItems::Widgets::AssigneesType - WorkItems::Widgets::Labels | Types::WorkItems::Widgets::LabelsType - WorkItems::Widgets::Notes | Types::WorkItems::Widgets::NotesType - WorkItems::Widgets::Notifications | Types::WorkItems::Widgets::NotificationsType + WorkItems::Widgets::Description | Types::WorkItems::Widgets::DescriptionType + WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType + WorkItems::Widgets::Assignees | Types::WorkItems::Widgets::AssigneesType + WorkItems::Widgets::Labels | Types::WorkItems::Widgets::LabelsType + WorkItems::Widgets::Notes | Types::WorkItems::Widgets::NotesType + WorkItems::Widgets::Notifications | Types::WorkItems::Widgets::NotificationsType + WorkItems::Widgets::CurrentUserTodos | Types::WorkItems::Widgets::CurrentUserTodosType + WorkItems::Widgets::AwardEmoji | Types::WorkItems::Widgets::AwardEmojiType end with_them do diff --git a/spec/graphql/types/work_items/widgets/award_emoji_type_spec.rb b/spec/graphql/types/work_items/widgets/award_emoji_type_spec.rb new file mode 100644 index 00000000000..493e628ac83 --- /dev/null +++ b/spec/graphql/types/work_items/widgets/award_emoji_type_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Types::WorkItems::Widgets::AwardEmojiType, feature_category: :team_planning do + it 'exposes the expected fields' do + expected_fields = %i[award_emoji downvotes upvotes type] + + expect(described_class.graphql_name).to eq('WorkItemWidgetAwardEmoji') + expect(described_class).to have_graphql_fields(*expected_fields) + end +end diff --git a/spec/graphql/types/work_items/widgets/current_user_todos_input_type_spec.rb b/spec/graphql/types/work_items/widgets/current_user_todos_input_type_spec.rb new file mode 100644 index 00000000000..0ae660ffac0 --- /dev/null +++ b/spec/graphql/types/work_items/widgets/current_user_todos_input_type_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Types::WorkItems::Widgets::CurrentUserTodosInputType, feature_category: :team_planning do + it { expect(described_class.graphql_name).to eq('WorkItemWidgetCurrentUserTodosInput') } + + it { expect(described_class.arguments.keys).to match_array(%w[action todoId]) } +end diff --git a/spec/graphql/types/work_items/widgets/current_user_todos_type_spec.rb b/spec/graphql/types/work_items/widgets/current_user_todos_type_spec.rb new file mode 100644 index 00000000000..b39adefbd87 --- /dev/null +++ b/spec/graphql/types/work_items/widgets/current_user_todos_type_spec.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Types::WorkItems::Widgets::CurrentUserTodosType, feature_category: :team_planning do + it 'exposes the expected fields' do + expected_fields = %i[current_user_todos type] + + expect(described_class).to have_graphql_fields(*expected_fields) + end +end diff --git a/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb b/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb index 6221580605e..0d4d31faee1 100644 --- a/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb +++ b/spec/graphql/types/work_items/widgets/hierarchy_update_input_type_spec.rb @@ -5,5 +5,11 @@ require 'spec_helper' RSpec.describe ::Types::WorkItems::Widgets::HierarchyUpdateInputType do it { expect(described_class.graphql_name).to eq('WorkItemWidgetHierarchyUpdateInput') } - it { expect(described_class.arguments.keys).to match_array(%w[parentId childrenIds]) } + it 'accepts documented arguments' do + expect(described_class.arguments.keys).to match_array(%w[parentId childrenIds adjacentWorkItemId relativePosition]) + end + + it 'sets the type of relative_position argument to RelativePositionTypeEnum' do + expect(described_class.arguments['relativePosition'].type).to eq(Types::RelativePositionTypeEnum) + end end diff --git a/spec/haml_lint/linter/no_plain_nodes_spec.rb b/spec/haml_lint/linter/no_plain_nodes_spec.rb index eeb0e4ea96f..235e742bc54 100644 --- a/spec/haml_lint/linter/no_plain_nodes_spec.rb +++ b/spec/haml_lint/linter/no_plain_nodes_spec.rb @@ -6,7 +6,7 @@ require 'haml_lint/spec' require_relative '../../../haml_lint/linter/no_plain_nodes' -RSpec.describe HamlLint::Linter::NoPlainNodes do +RSpec.describe HamlLint::Linter::NoPlainNodes, feature_category: :tooling do include_context 'linter' context 'reports when a tag has an inline plain node' do @@ -68,27 +68,27 @@ RSpec.describe HamlLint::Linter::NoPlainNodes do end context 'does not report multiline when one or more html entities' do - %w( > © »).each do |elem| - let(:haml) { <<-HAML } - %tag - #{elem} - HAML - - it elem do - is_expected.not_to report_lint + %w[ > © »].each do |elem| + context "with #{elem}" do + let(:haml) { <<-HAML } + %tag + #{elem} + HAML + + it { is_expected.not_to report_lint } end end end context 'does report multiline when one or more html entities amidst plain text' do - %w( Test Test> ©Hello  Hello»).each do |elem| - let(:haml) { <<-HAML } - %tag - #{elem} - HAML - - it elem do - is_expected.to report_lint + %w[ Test Test> ©Hello  Hello»].each do |elem| + context "with #{elem}" do + let(:haml) { <<-HAML } + %tag + #{elem} + HAML + + it { is_expected.to report_lint } end end end diff --git a/spec/helpers/abuse_reports_helper_spec.rb b/spec/helpers/abuse_reports_helper_spec.rb new file mode 100644 index 00000000000..6d381b7eb56 --- /dev/null +++ b/spec/helpers/abuse_reports_helper_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe AbuseReportsHelper, feature_category: :insider_threat do + describe '#valid_image_mimetypes' do + subject(:valid_image_mimetypes) { helper.valid_image_mimetypes } + + it { + is_expected.to eq('image/png, image/jpg, image/jpeg, image/gif, image/bmp, image/tiff, image/ico or image/webp') + } + end +end diff --git a/spec/helpers/access_tokens_helper_spec.rb b/spec/helpers/access_tokens_helper_spec.rb index d34251d03db..a466b2a0d3b 100644 --- a/spec/helpers/access_tokens_helper_spec.rb +++ b/spec/helpers/access_tokens_helper_spec.rb @@ -37,7 +37,7 @@ RSpec.describe AccessTokensHelper do disable_feed_token: false, static_objects_external_storage_enabled?: true ) - allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) allow(helper).to receive_messages( current_user: user, reset_feed_token_profile_path: feed_token_reset_path, diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb index bb1a4d57cc0..ae84331dcdb 100644 --- a/spec/helpers/application_helper_spec.rb +++ b/spec/helpers/application_helper_spec.rb @@ -696,14 +696,45 @@ RSpec.describe ApplicationHelper do end describe 'stylesheet_link_tag_defer' do - it 'uses print stylesheet by default' do + it 'uses print stylesheet when feature flag disabled' do + stub_feature_flags(remove_startup_css: false) + expect(helper.stylesheet_link_tag_defer('test')).to eq( '') end + it 'uses regular stylesheet when feature flag disabled' do + stub_feature_flags(remove_startup_css: true) + + expect(helper.stylesheet_link_tag_defer('test')).to eq( '') + end + it 'uses regular stylesheet when no_startup_css param present' do allow(helper.controller).to receive(:params).and_return({ no_startup_css: '' }) expect(helper.stylesheet_link_tag_defer('test')).to eq( '') end end + + describe 'sign_in_with_redirect?' do + context 'when on the sign-in page that redirects afterwards' do + before do + allow(helper).to receive(:current_page?).and_return(true) + session[:user_return_to] = true + end + + it 'returns true' do + expect(helper.sign_in_with_redirect?).to be_truthy + end + end + + context 'when on a non sign-in page' do + before do + allow(helper).to receive(:current_page?).and_return(false) + end + + it 'returns false' do + expect(helper.sign_in_with_redirect?).to be_falsey + end + end + end end diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb index cef72d24c43..6eb97a99264 100644 --- a/spec/helpers/avatars_helper_spec.rb +++ b/spec/helpers/avatars_helper_spec.rb @@ -297,22 +297,26 @@ RSpec.describe AvatarsHelper do subject { helper.user_avatar_without_link(options) } it 'displays user avatar' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 16), - data: { container: 'body' }, - class: 'avatar s16 has-tooltip', - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 16), + data: { container: 'body' }, + class: 'avatar s16 has-tooltip', + title: user.name + ) end context 'with css_class parameter' do let(:options) { { user: user, css_class: '.cat-pics' } } it 'uses provided css_class' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 16), - data: { container: 'body' }, - class: "avatar s16 #{options[:css_class]} has-tooltip", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 16), + data: { container: 'body' }, + class: "avatar s16 #{options[:css_class]} has-tooltip", + title: user.name + ) end end @@ -320,11 +324,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, size: 99 } } it 'uses provided size' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, options[:size]), - data: { container: 'body' }, - class: "avatar s#{options[:size]} has-tooltip", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, options[:size]), + data: { container: 'body' }, + class: "avatar s#{options[:size]} has-tooltip", + title: user.name + ) end end @@ -332,11 +338,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, url: '/over/the/rainbow.png' } } it 'uses provided url' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: options[:url], - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: options[:url], + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: user.name + ) end end @@ -344,11 +352,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, lazy: true } } it 'adds `lazy` class to class list, sets `data-src` with avatar URL and `src` with placeholder image' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: LazyImageTagHelper.placeholder_image, - data: { container: 'body', src: avatar_icon_for_user(user, 16) }, - class: "avatar s16 has-tooltip lazy", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: LazyImageTagHelper.placeholder_image, + data: { container: 'body', src: avatar_icon_for_user(user, 16) }, + class: "avatar s16 has-tooltip lazy", + title: user.name + ) end end @@ -357,11 +367,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, has_tooltip: true } } it 'adds has-tooltip' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 16), - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 16), + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: user.name + ) end end @@ -369,10 +381,12 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, has_tooltip: false } } it 'does not add has-tooltip or data container' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 16), - class: "avatar s16", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 16), + class: "avatar s16", + title: user.name + ) end end end @@ -384,20 +398,24 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user, user_name: 'Tinky Winky' } } it 'prefers user parameter' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 16), - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 16), + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: user.name + ) end end it 'uses user_name and user_email parameter if user is not present' do - is_expected.to eq tag.img(alt: "#{options[:user_name]}'s avatar", - src: helper.avatar_icon_for_email(options[:user_email], 16), - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: options[:user_name]) + is_expected.to eq tag.img( + alt: "#{options[:user_name]}'s avatar", + src: helper.avatar_icon_for_email(options[:user_email], 16), + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: options[:user_name] + ) end end @@ -408,11 +426,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user: user_with_avatar, only_path: false } } it 'will return avatar with a full path' do - is_expected.to eq tag.img(alt: "#{user_with_avatar.name}'s avatar", - src: avatar_icon_for_user(user_with_avatar, 16, only_path: false), - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: user_with_avatar.name) + is_expected.to eq tag.img( + alt: "#{user_with_avatar.name}'s avatar", + src: avatar_icon_for_user(user_with_avatar, 16, only_path: false), + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: user_with_avatar.name + ) end end @@ -420,11 +440,13 @@ RSpec.describe AvatarsHelper do let(:options) { { user_email: user_with_avatar.email, user_name: user_with_avatar.username, only_path: false } } it 'will return avatar with a full path' do - is_expected.to eq tag.img(alt: "#{user_with_avatar.username}'s avatar", - src: helper.avatar_icon_for_email(user_with_avatar.email, 16, only_path: false), - data: { container: 'body' }, - class: "avatar s16 has-tooltip", - title: user_with_avatar.username) + is_expected.to eq tag.img( + alt: "#{user_with_avatar.username}'s avatar", + src: helper.avatar_icon_for_email(user_with_avatar.email, 16, only_path: false), + data: { container: 'body' }, + class: "avatar s16 has-tooltip", + title: user_with_avatar.username + ) end end end @@ -447,11 +469,13 @@ RSpec.describe AvatarsHelper do let(:resource) { user.namespace } it 'displays user avatar' do - is_expected.to eq tag.img(alt: "#{user.name}'s avatar", - src: avatar_icon_for_user(user, 32), - data: { container: 'body' }, - class: 'avatar s32 has-tooltip', - title: user.name) + is_expected.to eq tag.img( + alt: "#{user.name}'s avatar", + src: avatar_icon_for_user(user, 32), + data: { container: 'body' }, + class: 'avatar s32 has-tooltip', + title: user.name + ) end end diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb index dac0d3fe182..1fd953d52d8 100644 --- a/spec/helpers/blob_helper_spec.rb +++ b/spec/helpers/blob_helper_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe BlobHelper do include TreeHelper + include FakeBlobHelpers describe "#sanitize_svg_data" do let(:input_svg_path) { File.join(Rails.root, 'spec', 'fixtures', 'unsanitized.svg') } @@ -57,8 +58,6 @@ RSpec.describe BlobHelper do end describe "#relative_raw_path" do - include FakeBlobHelpers - let_it_be(:project) { create(:project) } before do @@ -82,8 +81,6 @@ RSpec.describe BlobHelper do end context 'viewer related' do - include FakeBlobHelpers - let_it_be(:project) { create(:project, lfs_enabled: true) } before do @@ -526,4 +523,25 @@ RSpec.describe BlobHelper do it { is_expected.to be_truthy } end end + + describe '#vue_blob_app_data' do + let(:blob) { fake_blob(path: 'file.md', size: 2.megabytes) } + let(:project) { build_stubbed(:project) } + let(:user) { build_stubbed(:user) } + let(:ref) { 'main' } + + it 'returns data related to blob app' do + allow(helper).to receive(:current_user).and_return(user) + assign(:ref, ref) + + expect(helper.vue_blob_app_data(project, blob, ref)).to include({ + blob_path: blob.path, + project_path: project.full_path, + resource_id: project.to_global_id, + user_id: user.to_global_id, + target_branch: ref, + original_branch: ref + }) + end + end end diff --git a/spec/helpers/ci/catalog/resources_helper_spec.rb b/spec/helpers/ci/catalog/resources_helper_spec.rb index c4abdebd12e..e873b9379fe 100644 --- a/spec/helpers/ci/catalog/resources_helper_spec.rb +++ b/spec/helpers/ci/catalog/resources_helper_spec.rb @@ -3,14 +3,15 @@ require 'spec_helper' RSpec.describe Ci::Catalog::ResourcesHelper, feature_category: :pipeline_composition do + include Devise::Test::ControllerHelpers + let_it_be(:project) { build(:project) } - describe 'can_view_private_catalog?' do - subject { helper.can_view_private_catalog?(project) } + describe '#can_view_namespace_catalog?' do + subject { helper.can_view_namespace_catalog?(project) } before do - allow(helper).to receive(:can_collaborate_with_project?).and_return(true) - stub_licensed_features(ci_private_catalog: false) + stub_licensed_features(ci_namespace_catalog: false) end it 'user cannot view the Catalog in CE regardless of permissions' do @@ -20,11 +21,12 @@ RSpec.describe Ci::Catalog::ResourcesHelper, feature_category: :pipeline_composi describe '#js_ci_catalog_data' do let(:project) { build(:project, :repository) } + let(:default_helper_data) do {} end - subject(:catalog_data) { helper.js_ci_catalog_data } + subject(:catalog_data) { helper.js_ci_catalog_data(project) } it 'returns catalog data' do expect(catalog_data).to eq(default_helper_data) diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb index 535e8f3170e..6463da7c53f 100644 --- a/spec/helpers/ci/pipelines_helper_spec.rb +++ b/spec/helpers/ci/pipelines_helper_spec.rb @@ -121,35 +121,7 @@ RSpec.describe Ci::PipelinesHelper do :has_gitlab_ci, :pipeline_editor_path, :suggested_ci_templates, - :ci_runner_settings_path]) - end - - describe 'the `any_runners_available` attribute' do - subject { data[:any_runners_available] } - - context 'when the `runners_availability_section` experiment variant is control' do - before do - stub_experiments(runners_availability_section: :control) - end - - it { is_expected.to be_nil } - end - - context 'when the `runners_availability_section` experiment variant is candidate' do - before do - stub_experiments(runners_availability_section: :candidate) - end - - context 'when there are no runners' do - it { is_expected.to eq('false') } - end - - context 'when there are runners' do - let!(:runner) { create(:ci_runner, :project, projects: [project]) } - - it { is_expected.to eq('true') } - end - end + :full_path]) end describe 'when the project is eligible for the `ios_specific_templates` experiment' do diff --git a/spec/helpers/ci/variables_helper_spec.rb b/spec/helpers/ci/variables_helper_spec.rb index da727fd1b6b..9c3236ace72 100644 --- a/spec/helpers/ci/variables_helper_spec.rb +++ b/spec/helpers/ci/variables_helper_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::VariablesHelper, feature_category: :pipeline_composition do +RSpec.describe Ci::VariablesHelper, feature_category: :secrets_management do describe '#ci_variable_maskable_raw_regex' do it 'converts to a javascript regex' do expect(helper.ci_variable_maskable_raw_regex).to eq("^\\S{8,}$") diff --git a/spec/helpers/emoji_helper_spec.rb b/spec/helpers/emoji_helper_spec.rb index 6f4c962c0fb..e16c96c86ed 100644 --- a/spec/helpers/emoji_helper_spec.rb +++ b/spec/helpers/emoji_helper_spec.rb @@ -12,10 +12,12 @@ RSpec.describe EmojiHelper do subject { helper.emoji_icon(emoji_text, options) } it 'has no options' do - is_expected.to include(' 'false', 'use-new-web-ide' => 'false', 'user-preferences-path' => profile_preferences_path, + 'sign-in-path' => 'test-sign-in-path', 'project' => nil, 'preview-markdown-path' => nil } @@ -29,6 +30,7 @@ RSpec.describe IdeHelper, feature_category: :web_ide do before do allow(helper).to receive(:current_user).and_return(user) allow(helper).to receive(:content_security_policy_nonce).and_return('test-csp-nonce') + allow(helper).to receive(:new_session_path).and_return('test-sign-in-path') end it 'returns hash' do @@ -99,6 +101,7 @@ RSpec.describe IdeHelper, feature_category: :web_ide do 'can-use-new-web-ide' => 'true', 'use-new-web-ide' => 'true', 'user-preferences-path' => profile_preferences_path, + 'sign-in-path' => 'test-sign-in-path', 'new-web-ide-help-page-path' => help_page_path('user/project/web_ide/index.md', anchor: 'vscode-reimplementation'), 'csp-nonce' => 'test-csp-nonce', diff --git a/spec/helpers/integrations_helper_spec.rb b/spec/helpers/integrations_helper_spec.rb index 9822f9fac05..8be847e1c6c 100644 --- a/spec/helpers/integrations_helper_spec.rb +++ b/spec/helpers/integrations_helper_spec.rb @@ -165,7 +165,8 @@ RSpec.describe IntegrationsHelper do with_them do before do - issue.update!(issue_type: issue_type) + issue.assign_attributes(issue_type: issue_type, work_item_type: WorkItems::Type.default_by_type(issue_type)) + issue.save!(validate: false) end it "return the correct i18n issue type" do diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index fd10b204e50..960a7e03e49 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -416,7 +416,7 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do initialTitleText: issue.title, initialDescriptionHtml: '

    issue text

    ', initialDescriptionText: 'issue text', - initialTaskStatus: '0 of 0 checklist items completed', + initialTaskCompletionStatus: { completed_count: 0, count: 0 }, issueType: 'issue', iid: issue.iid.to_s, isHidden: false @@ -695,4 +695,37 @@ RSpec.describe IssuablesHelper, feature_category: :team_planning do end end end + + describe '#issuable_type_selector_data' do + using RSpec::Parameterized::TableSyntax + + let_it_be(:project) { create(:project) } + + where(:issuable_type, :issuable_display_type, :is_issue_allowed, :is_incident_allowed) do + :issue | 'issue' | true | false + :incident | 'incident' | false | true + end + + with_them do + let(:issuable) { build_stubbed(issuable_type) } + + before do + allow(helper).to receive(:create_issue_type_allowed?).with(project, :issue).and_return(is_issue_allowed) + allow(helper).to receive(:create_issue_type_allowed?).with(project, :incident).and_return(is_incident_allowed) + assign(:project, project) + end + + it 'returns the correct data for the issuable type selector' do + expected_data = { + selected_type: issuable_display_type, + is_issue_allowed: is_issue_allowed.to_s, + is_incident_allowed: is_incident_allowed.to_s, + issue_path: new_project_issue_path(project), + incident_path: new_project_issue_path(project, { issuable_template: 'incident', issue: { issue_type: 'incident' } }) + } + + expect(helper.issuable_type_selector_data(issuable)).to match(expected_data) + end + end + end end diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb index 994a1ff4f75..d940c696fb3 100644 --- a/spec/helpers/issues_helper_spec.rb +++ b/spec/helpers/issues_helper_spec.rb @@ -6,18 +6,6 @@ RSpec.describe IssuesHelper do let_it_be(:project) { create(:project) } let_it_be_with_reload(:issue) { create(:issue, project: project) } - describe '#work_item_type_icon' do - it 'returns icon of all standard base types' do - WorkItems::Type.base_types.each do |type| - expect(work_item_type_icon(type[0])).to eq "issue-type-#{type[0].to_s.dasherize}" - end - end - - it 'defaults to issue icon if type is unknown' do - expect(work_item_type_icon('invalid')).to eq 'issue-type-issue' - end - end - describe '#award_user_list' do it 'returns a comma-separated list of the first X users' do user = build_stubbed(:user, name: 'Joe') @@ -228,8 +216,8 @@ RSpec.describe IssuesHelper do let!(:new_issue) { create(:issue, author: User.support_bot, project: project2) } before do - allow(Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } old_issue.update!(moved_to: new_issue) end diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb index 6b43e97a0b4..3a06e7ad089 100644 --- a/spec/helpers/merge_requests_helper_spec.rb +++ b/spec/helpers/merge_requests_helper_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do include ProjectForksHelper + include IconsHelper describe '#format_mr_branch_names' do describe 'within the same project' do @@ -28,7 +29,7 @@ RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do end describe '#merge_path_description' do - let(:project) { create(:project) } + let_it_be(:project) { create(:project) } let(:forked_project) { fork_project(project) } let(:merge_request_forked) { create(:merge_request, source_project: forked_project, target_project: project) } let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } @@ -150,4 +151,27 @@ RSpec.describe MergeRequestsHelper, feature_category: :code_review_workflow do end end end + + describe '#merge_request_source_branch' do + let_it_be(:project) { create(:project) } + let(:forked_project) { fork_project(project) } + let(:merge_request_forked) { create(:merge_request, source_project: forked_project, target_project: project) } + let(:merge_request) { create(:merge_request, source_project: project, target_project: project) } + + context 'when merge request is a fork' do + subject { merge_request_source_branch(merge_request_forked) } + + it 'does show the fork icon' do + expect(subject).to match(/fork/) + end + end + + context 'when merge request is not a fork' do + subject { merge_request_source_branch(merge_request) } + + it 'does not show the fork icon' do + expect(subject).not_to match(/fork/) + end + end + end end diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb index 3e6780d6831..e288a604be6 100644 --- a/spec/helpers/namespaces_helper_spec.rb +++ b/spec/helpers/namespaces_helper_spec.rb @@ -6,38 +6,35 @@ RSpec.describe NamespacesHelper do let!(:admin) { create(:admin) } let!(:admin_project_creation_level) { nil } let!(:admin_group) do - create(:group, - :private, - project_creation_level: admin_project_creation_level) + create(:group, :private, project_creation_level: admin_project_creation_level) end let!(:user) { create(:user) } let!(:user_project_creation_level) { nil } let!(:user_group) do - create(:group, - :private, - project_creation_level: user_project_creation_level) + create(:group, :private, project_creation_level: user_project_creation_level) end let!(:subgroup1) do - create(:group, - :private, - parent: admin_group, - project_creation_level: nil) + create(:group, :private, parent: admin_group, project_creation_level: nil) end let!(:subgroup2) do - create(:group, - :private, - parent: admin_group, - project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) + create( + :group, + :private, + parent: admin_group, + project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS + ) end let!(:subgroup3) do - create(:group, - :private, - parent: admin_group, - project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS) + create( + :group, + :private, + parent: admin_group, + project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS + ) end before do diff --git a/spec/helpers/notify_helper_spec.rb b/spec/helpers/notify_helper_spec.rb index 09da2b89dff..bc1b927cc93 100644 --- a/spec/helpers/notify_helper_spec.rb +++ b/spec/helpers/notify_helper_spec.rb @@ -64,10 +64,19 @@ RSpec.describe NotifyHelper do mr_link_style = "font-weight: 600;color:#3777b0;text-decoration:none" reviewer_avatar_style = "border-radius:12px;margin:-7px 0 -7px 3px;" mr_link = link_to(merge_request.to_reference, merge_request_url(merge_request), style: mr_link_style).html_safe - reviewer_avatar = content_tag(:img, nil, height: "24", src: avatar_icon_for_user, style: reviewer_avatar_style, \ - width: "24", alt: "Avatar", class: "avatar").html_safe - reviewer_link = link_to(reviewer.name, user_url(reviewer), style: "color:#333333;text-decoration:none;", \ - class: "muted").html_safe + reviewer_avatar = content_tag( + :img, + nil, + height: "24", + src: avatar_icon_for_user, + style: reviewer_avatar_style, + width: "24", + alt: "Avatar", + class: "avatar" + ).html_safe + reviewer_link = link_to( + reviewer.name, user_url(reviewer), style: "color:#333333;text-decoration:none;", class: "muted" + ).html_safe result = helper.merge_request_hash_param(merge_request, reviewer) expect(result[:mr_highlight]).to eq ''.html_safe expect(result[:highlight_end]).to eq ''.html_safe diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb index b6546a2eaf3..dcc5e336253 100644 --- a/spec/helpers/packages_helper_spec.rb +++ b/spec/helpers/packages_helper_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe PackagesHelper, feature_category: :package_registry do using RSpec::Parameterized::TableSyntax + include AdminModeHelper let_it_be_with_reload(:project) { create(:project) } let_it_be(:base_url) { "#{Gitlab.config.gitlab.url}/api/v4/" } @@ -127,4 +128,72 @@ RSpec.describe PackagesHelper, feature_category: :package_registry do it { is_expected.to eq(expected_result) } end end + + describe '#show_group_package_registry_settings' do + let_it_be(:group) { create(:group) } + let_it_be(:user) { create(:user) } + let_it_be(:admin) { create(:admin) } + + before do + allow(helper).to receive(:current_user) { user } + end + + subject { helper.show_group_package_registry_settings(group) } + + context 'with package registry config enabled' do + before do + stub_config(packages: { enabled: true }) + end + + context "with admin", :enable_admin_mode do + before do + allow(helper).to receive(:current_user) { admin } + end + + it { is_expected.to be(true) } + end + + context "with owner" do + before do + group.add_owner(user) + end + + it { is_expected.to be(true) } + end + + %i[maintainer developer reporter guest].each do |role| + context "with #{role}" do + before do + group.public_send("add_#{role}", user) + end + + it { is_expected.to be(false) } + end + end + end + + context 'with package registry config disabled' do + before do + stub_config(packages: { enabled: false }) + end + + context "with admin", :enable_admin_mode do + before do + allow(helper).to receive(:current_user) { admin } + end + + it { is_expected.to be(false) } + end + + %i[owner maintainer developer reporter guest].each do |role| + context "with #{role}" do + before do + group.public_send("add_#{role}", user) + end + + it { is_expected.to be(false) } + end + end + end + end end diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb index eb42ce18da0..9694921e223 100644 --- a/spec/helpers/page_layout_helper_spec.rb +++ b/spec/helpers/page_layout_helper_spec.rb @@ -128,12 +128,14 @@ RSpec.describe PageLayoutHelper do describe 'a bare controller' do it 'returns an empty context' do - expect(search_context).to have_attributes(project: nil, - group: nil, - snippets: [], - project_metadata: {}, - group_metadata: {}, - search_url: '/search') + expect(search_context).to have_attributes( + project: nil, + group: nil, + snippets: [], + project_metadata: {}, + group_metadata: {}, + search_url: '/search' + ) end end end diff --git a/spec/helpers/projects/ml/experiments_helper_spec.rb b/spec/helpers/projects/ml/experiments_helper_spec.rb index 8ef81c49fa7..9b3c23e1f87 100644 --- a/spec/helpers/projects/ml/experiments_helper_spec.rb +++ b/spec/helpers/projects/ml/experiments_helper_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do let_it_be(:project) { create(:project, :private) } let_it_be(:experiment) { create(:ml_experiments, user_id: project.creator, project: project) } let_it_be(:candidate0) do - create(:ml_candidates, :with_artifact, experiment: experiment, user: project.creator).tap do |c| + create(:ml_candidates, :with_artifact, experiment: experiment, user: project.creator, project: project).tap do |c| c.params.build([{ name: 'param1', value: 'p1' }, { name: 'param2', value: 'p2' }]) c.metrics.create!( [{ name: 'metric1', value: 0.1 }, { name: 'metric2', value: 0.2 }, { name: 'metric3', value: 0.3 }] @@ -18,7 +18,8 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do end let_it_be(:candidate1) do - create(:ml_candidates, experiment: experiment, user: project.creator, name: 'candidate1').tap do |c| + create(:ml_candidates, experiment: experiment, user: project.creator, name: 'candidate1', + project: project).tap do |c| c.params.build([{ name: 'param2', value: 'p3' }, { name: 'param3', value: 'p4' }]) c.metrics.create!(name: 'metric3', value: 0.4) end @@ -77,6 +78,16 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do end end + describe '#experiment_as_data' do + subject { Gitlab::Json.parse(helper.experiment_as_data(experiment)) } + + it do + is_expected.to eq( + { 'name' => experiment.name, 'path' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}" } + ) + end + end + describe '#show_candidate_view_model' do let(:candidate) { candidate0 } @@ -103,7 +114,8 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do 'path_to_artifact' => "/#{project.full_path}/-/packages/#{candidate.artifact.id}", 'experiment_name' => candidate.experiment.name, 'path_to_experiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}", - 'status' => 'running' + 'status' => 'running', + 'path' => "/#{project.full_path}/-/ml/candidates/#{candidate.iid}" } expect(subject['info']).to include(expected_info) diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb index 35045aaef2a..ef52c4dd425 100644 --- a/spec/helpers/projects/pipeline_helper_spec.rb +++ b/spec/helpers/projects/pipeline_helper_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Projects::PipelineHelper do expect(pipeline_tabs_data).to include({ failed_jobs_count: pipeline.failed_builds.count, failed_jobs_summary: prepare_failed_jobs_summary_data(pipeline.failed_builds), - full_path: project.full_path, + project_path: project.full_path, graphql_resource_etag: graphql_etag_pipeline_path(pipeline), metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, format: :json), pipeline_iid: pipeline.iid, diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb index 93352715ff4..7cf2de820fc 100644 --- a/spec/helpers/projects_helper_spec.rb +++ b/spec/helpers/projects_helper_spec.rb @@ -1363,18 +1363,26 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do source_project = project_with_repo allow(helper).to receive(:visible_fork_source).with(project).and_return(source_project) + allow(helper).to receive(:can_user_create_mr_in_fork).with(source_project).and_return(false) + allow(helper).to receive(:current_user).and_return(user) ahead_path = "/#{project.full_path}/-/compare/#{source_project.default_branch}...ref?from_project_id=#{source_project.id}" behind_path = "/#{source_project.full_path}/-/compare/ref...#{source_project.default_branch}?from_project_id=#{project.id}" + create_mr_path = "/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=ref&merge_request%5Btarget_branch%5D=#{source_project.default_branch}&merge_request%5Btarget_project_id%5D=#{source_project.id}" expect(helper.vue_fork_divergence_data(project, 'ref')).to eq({ + project_path: project.full_path, + selected_branch: 'ref', source_name: source_project.full_name, source_path: project_path(source_project), + can_sync_branch: 'false', ahead_compare_path: ahead_path, behind_compare_path: behind_path, - source_default_branch: source_project.default_branch + source_default_branch: source_project.default_branch, + create_mr_path: create_mr_path, + can_user_create_mr_in_fork: false }) end end diff --git a/spec/helpers/routing/pseudonymization_helper_spec.rb b/spec/helpers/routing/pseudonymization_helper_spec.rb index eb2cb548f35..784579dc895 100644 --- a/spec/helpers/routing/pseudonymization_helper_spec.rb +++ b/spec/helpers/routing/pseudonymization_helper_spec.rb @@ -26,17 +26,19 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'with controller for MR' do let(:masked_url) { "http://localhost/namespace#{group.id}/project#{project.id}/-/merge_requests/#{merge_request.id}" } let(:request) do - double(:Request, - path_parameters: { - controller: "projects/merge_requests", - action: "show", - namespace_id: group.name, - project_id: project.name, - id: merge_request.id.to_s - }, - protocol: 'http', - host: 'localhost', - query_string: '') + double( + :Request, + path_parameters: { + controller: "projects/merge_requests", + action: "show", + namespace_id: group.name, + project_id: project.name, + id: merge_request.id.to_s + }, + protocol: 'http', + host: 'localhost', + query_string: '' + ) end before do @@ -49,17 +51,19 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'with controller for issue' do let(:masked_url) { "http://localhost/namespace#{group.id}/project#{project.id}/-/issues/#{issue.id}" } let(:request) do - double(:Request, - path_parameters: { - controller: "projects/issues", - action: "show", - namespace_id: group.name, - project_id: project.name, - id: issue.id.to_s - }, - protocol: 'http', - host: 'localhost', - query_string: '') + double( + :Request, + path_parameters: { + controller: "projects/issues", + action: "show", + namespace_id: group.name, + project_id: project.name, + id: issue.id.to_s + }, + protocol: 'http', + host: 'localhost', + query_string: '' + ) end before do @@ -74,16 +78,18 @@ RSpec.describe ::Routing::PseudonymizationHelper do let(:group) { subgroup } let(:project) { subproject } let(:request) do - double(:Request, - path_parameters: { - controller: 'projects', - action: 'show', - namespace_id: subgroup.name, - id: subproject.name - }, - protocol: 'http', - host: 'localhost', - query_string: '') + double( + :Request, + path_parameters: { + controller: 'projects', + action: 'show', + namespace_id: subgroup.name, + id: subproject.name + }, + protocol: 'http', + host: 'localhost', + query_string: '' + ) end before do @@ -97,15 +103,17 @@ RSpec.describe ::Routing::PseudonymizationHelper do let(:masked_url) { "http://localhost/groups/namespace#{subgroup.id}/-/shared" } let(:group) { subgroup } let(:request) do - double(:Request, - path_parameters: { - controller: 'groups', - action: 'show', - id: subgroup.name - }, - protocol: 'http', - host: 'localhost', - query_string: '') + double( + :Request, + path_parameters: { + controller: 'groups', + action: 'show', + id: subgroup.name + }, + protocol: 'http', + host: 'localhost', + query_string: '' + ) end before do @@ -118,17 +126,19 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'with controller for blob with file path' do let(:masked_url) { "http://localhost/namespace#{group.id}/project#{project.id}/-/blob/:repository_path" } let(:request) do - double(:Request, - path_parameters: { - controller: 'projects/blob', - action: 'show', - namespace_id: group.name, - project_id: project.name, - id: 'master/README.md' - }, - protocol: 'http', - host: 'localhost', - query_string: '') + double( + :Request, + path_parameters: { + controller: 'projects/blob', + action: 'show', + namespace_id: group.name, + project_id: project.name, + id: 'master/README.md' + }, + protocol: 'http', + host: 'localhost', + query_string: '' + ) end before do @@ -141,14 +151,16 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'when assignee_username is present' do let(:masked_url) { "http://localhost/dashboard/issues?assignee_username=masked_assignee_username" } let(:request) do - double(:Request, - path_parameters: { - controller: 'dashboard', - action: 'issues' - }, - protocol: 'http', - host: 'localhost', - query_string: 'assignee_username=root') + double( + :Request, + path_parameters: { + controller: 'dashboard', + action: 'issues' + }, + protocol: 'http', + host: 'localhost', + query_string: 'assignee_username=root' + ) end before do @@ -161,14 +173,16 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'when author_username is present' do let(:masked_url) { "http://localhost/dashboard/issues?author_username=masked_author_username&scope=all&state=opened" } let(:request) do - double(:Request, - path_parameters: { - controller: 'dashboard', - action: 'issues' - }, - protocol: 'http', - host: 'localhost', - query_string: 'author_username=root&scope=all&state=opened') + double( + :Request, + path_parameters: { + controller: 'dashboard', + action: 'issues' + }, + protocol: 'http', + host: 'localhost', + query_string: 'author_username=root&scope=all&state=opened' + ) end before do @@ -181,14 +195,16 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'when some query params are not required to be masked' do let(:masked_url) { "http://localhost/dashboard/issues?author_username=masked_author_username&scope=all&state=masked_state&tab=2" } let(:request) do - double(:Request, - path_parameters: { - controller: 'dashboard', - action: 'issues' - }, - protocol: 'http', - host: 'localhost', - query_string: 'author_username=root&scope=all&state=opened&tab=2') + double( + :Request, + path_parameters: { + controller: 'dashboard', + action: 'issues' + }, + protocol: 'http', + host: 'localhost', + query_string: 'author_username=root&scope=all&state=opened&tab=2' + ) end before do @@ -202,14 +218,16 @@ RSpec.describe ::Routing::PseudonymizationHelper do context 'when query string has keys with the same names as path params' do let(:masked_url) { "http://localhost/dashboard/issues?action=masked_action&scope=all&state=opened" } let(:request) do - double(:Request, - path_parameters: { - controller: 'dashboard', - action: 'issues' - }, - protocol: 'http', - host: 'localhost', - query_string: 'action=foobar&scope=all&state=opened') + double( + :Request, + path_parameters: { + controller: 'dashboard', + action: 'issues' + }, + protocol: 'http', + host: 'localhost', + query_string: 'action=foobar&scope=all&state=opened' + ) end before do @@ -223,16 +241,18 @@ RSpec.describe ::Routing::PseudonymizationHelper do describe 'when url has no params to mask' do let(:original_url) { 'http://localhost/-/security/vulnerabilities' } let(:request) do - double(:Request, - path_parameters: { - controller: 'security/vulnerabilities', - action: 'index' - }, - protocol: 'http', - host: 'localhost', - query_string: '', - original_fullpath: '/-/security/vulnerabilities', - original_url: original_url) + double( + :Request, + path_parameters: { + controller: 'security/vulnerabilities', + action: 'index' + }, + protocol: 'http', + host: 'localhost', + query_string: '', + original_fullpath: '/-/security/vulnerabilities', + original_url: original_url + ) end before do @@ -247,15 +267,17 @@ RSpec.describe ::Routing::PseudonymizationHelper do describe 'when it raises exception' do context 'calls error tracking' do let(:request) do - double(:Request, - path_parameters: { - controller: 'dashboard', - action: 'issues' - }, - protocol: 'http', - host: 'localhost', - query_string: 'assignee_username=root', - original_fullpath: '/dashboard/issues?assignee_username=root') + double( + :Request, + path_parameters: { + controller: 'dashboard', + action: 'issues' + }, + protocol: 'http', + host: 'localhost', + query_string: 'assignee_username=root', + original_fullpath: '/dashboard/issues?assignee_username=root' + ) end before do diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb index ba703914049..2cea577a852 100644 --- a/spec/helpers/search_helper_spec.rb +++ b/spec/helpers/search_helper_spec.rb @@ -306,6 +306,46 @@ RSpec.describe SearchHelper, feature_category: :global_search do end end + describe 'projects_autocomplete' do + let_it_be(:user) { create(:user, name: "madelein") } + let_it_be(:project_1) { create(:project, name: 'test 1') } + let_it_be(:project_2) { create(:project, name: 'test 2') } + let(:search_term) { 'test' } + + before do + allow(self).to receive(:current_user).and_return(user) + end + + context 'when the user does not have access to projects' do + it 'does not return any results' do + expect(projects_autocomplete(search_term)).to eq([]) + end + end + + context 'when the user has access to one project' do + before do + project_2.add_developer(user) + end + + it 'returns the project' do + expect(projects_autocomplete(search_term).pluck(:id)).to eq([project_2.id]) + end + + context 'when a project namespace matches the search term but the project does not' do + let_it_be(:group) { create(:group, name: 'test group') } + let_it_be(:project_3) { create(:project, name: 'nothing', namespace: group) } + + before do + group.add_owner(user) + end + + it 'returns all projects matching the term' do + expect(projects_autocomplete(search_term).pluck(:id)).to match_array([project_2.id, project_3.id]) + end + end + end + end + describe 'search_entries_info' do using RSpec::Parameterized::TableSyntax diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb index dbb6f9bd9f3..5323b041a9e 100644 --- a/spec/helpers/sidebars_helper_spec.rb +++ b/spec/helpers/sidebars_helper_spec.rb @@ -65,14 +65,16 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do let_it_be(:user) { build(:user) } let_it_be(:group) { build(:group) } let_it_be(:panel) { {} } + let_it_be(:panel_type) { 'project' } subject do - helper.super_sidebar_context(user, group: group, project: nil, panel: panel) + helper.super_sidebar_context(user, group: group, project: nil, panel: panel, panel_type: panel_type) end before do allow(helper).to receive(:current_user) { user } allow(helper).to receive(:can?).and_return(true) + allow(helper).to receive(:header_search_context).and_return({ some: "search data" }) allow(panel).to receive(:super_sidebar_menu_items).and_return(nil) allow(panel).to receive(:super_sidebar_context_header).and_return(nil) allow(user).to receive(:assigned_open_issues_count).and_return(1) @@ -80,6 +82,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do allow(user).to receive(:review_requested_open_merge_requests_count).and_return(0) allow(user).to receive(:todos_pending_count).and_return(3) allow(user).to receive(:total_merge_requests_count).and_return(4) + allow(user).to receive(:pinned_nav_items).and_return({ panel_type => %w[foo bar], 'another_panel' => %w[baz] }) end it 'returns sidebar values from user', :use_clean_rails_memory_store_caching do @@ -111,12 +114,12 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do }, can_sign_out: helper.current_user_menu?(:sign_out), sign_out_link: destroy_user_session_path, - assigned_open_issues_count: 1, + assigned_open_issues_count: "1", todos_pending_count: 3, issues_dashboard_path: issues_dashboard_path(assignee_username: user.username), - total_merge_requests_count: 4, - projects_path: projects_path, - groups_path: groups_path, + total_merge_requests_count: "4", + projects_path: dashboard_projects_path, + groups_path: dashboard_groups_path, support_path: helper.support_url, display_whats_new: helper.display_whats_new?, whats_new_most_recent_release_items_count: helper.whats_new_most_recent_release_items_count, @@ -126,7 +129,34 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do gitlab_version_check: helper.gitlab_version_check, gitlab_com_but_not_canary: Gitlab.com_but_not_canary?, gitlab_com_and_canary: Gitlab.com_and_canary?, - canary_toggle_com_url: Gitlab::Saas.canary_toggle_com_url + canary_toggle_com_url: Gitlab::Saas.canary_toggle_com_url, + search: { + search_path: search_path, + issues_path: issues_dashboard_path, + mr_path: merge_requests_dashboard_path, + autocomplete_path: search_autocomplete_path, + search_context: helper.header_search_context + }, + pinned_items: %w[foo bar], + panel_type: panel_type, + update_pins_url: pins_url, + shortcut_links: [ + { + title: _('Milestones'), + href: dashboard_milestones_path, + css_class: 'dashboard-shortcuts-milestones' + }, + { + title: _('Snippets'), + href: dashboard_snippets_path, + css_class: 'dashboard-shortcuts-snippets' + }, + { + title: _('Activity'), + href: activity_dashboard_path, + css_class: 'dashboard-shortcuts-activity' + } + ] }) end @@ -138,12 +168,24 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do { text: _('Assigned'), href: merge_requests_dashboard_path(assignee_username: user.username), - count: 4 + count: 4, + extraAttrs: { + 'data-track-action': 'click_link', + 'data-track-label': 'merge_requests_assigned', + 'data-track-property': 'nav_core_menu', + class: 'dashboard-shortcuts-merge_requests' + } }, { text: _('Review requests'), href: merge_requests_dashboard_path(reviewer_username: user.username), - count: 0 + count: 0, + extraAttrs: { + 'data-track-action': 'click_link', + 'data-track-label': 'merge_requests_to_review', + 'data-track-property': 'nav_core_menu', + class: 'dashboard-shortcuts-review_requests' + } } ] } @@ -151,19 +193,42 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do end it 'returns "Create new" menu groups without headers', :use_clean_rails_memory_store_caching do + extra_attrs = ->(id) { + { + "data-track-label": id, + "data-track-action": "click_link", + "data-track-property": "nav_create_menu", + "data-qa-selector": 'create_menu_item', + "data-qa-create-menu-item": id + } + } + expect(subject[:create_new_menu_groups]).to eq([ { name: "", items: [ - { href: "/projects/new", text: "New project/repository" }, - { href: "/groups/new", text: "New group" }, - { href: "/-/snippets/new", text: "New snippet" } + { href: "/projects/new", text: "New project/repository", + extraAttrs: extra_attrs.call("general_new_project") }, + { href: "/groups/new", text: "New group", + extraAttrs: extra_attrs.call("general_new_group") }, + { href: "/-/snippets/new", text: "New snippet", + extraAttrs: extra_attrs.call("general_new_snippet") } ] } ]) end it 'returns "Create new" menu groups with headers', :use_clean_rails_memory_store_caching do + extra_attrs = ->(id) { + { + "data-track-label": id, + "data-track-action": "click_link", + "data-track-property": "nav_create_menu", + "data-qa-selector": 'create_menu_item', + "data-qa-create-menu-item": id + } + } + allow(group).to receive(:persisted?).and_return(true) allow(helper).to receive(:can?).and_return(true) @@ -171,28 +236,49 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do a_hash_including( name: "In this group", items: array_including( - { href: "/projects/new", text: "New project/repository" }, - { href: "/groups/new#create-group-pane", text: "New subgroup" }, - { href: '', text: "Invite members" } + { href: "/projects/new", text: "New project/repository", + extraAttrs: extra_attrs.call("new_project") }, + { href: "/groups/new#create-group-pane", text: "New subgroup", + extraAttrs: extra_attrs.call("new_subgroup") }, + { href: "", text: "Invite members", + extraAttrs: extra_attrs.call("invite") } ) ), a_hash_including( name: "In GitLab", items: array_including( - { href: "/projects/new", text: "New project/repository" }, - { href: "/groups/new", text: "New group" }, - { href: "/-/snippets/new", text: "New snippet" } + { href: "/projects/new", text: "New project/repository", + extraAttrs: extra_attrs.call("general_new_project") }, + { href: "/groups/new", text: "New group", + extraAttrs: extra_attrs.call("general_new_group") }, + { href: "/-/snippets/new", text: "New snippet", + extraAttrs: extra_attrs.call("general_new_snippet") } ) ) ) end + context 'when counts are high' do + before do + allow(user).to receive(:assigned_open_issues_count).and_return(1000) + allow(user).to receive(:assigned_open_merge_requests_count).and_return(50) + allow(user).to receive(:review_requested_open_merge_requests_count).and_return(50) + end + + it 'caps counts to USER_BAR_COUNT_LIMIT and appends a "+" to them' do + expect(subject).to include( + assigned_open_issues_count: "99+", + total_merge_requests_count: "99+" + ) + end + end + describe 'current context' do context 'when current context is a project' do let_it_be(:project) { build(:project) } subject do - helper.super_sidebar_context(user, group: nil, project: project, panel: panel) + helper.super_sidebar_context(user, group: nil, project: project, panel: panel, panel_type: panel_type) end before do @@ -215,7 +301,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do context 'when current context is a group' do subject do - helper.super_sidebar_context(user, group: group, project: nil, panel: panel) + helper.super_sidebar_context(user, group: group, project: nil, panel: panel, panel_type: panel_type) end before do @@ -238,7 +324,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do context 'when current context is not tracked' do subject do - helper.super_sidebar_context(user, group: nil, project: nil, panel: panel) + helper.super_sidebar_context(user, group: nil, project: nil, panel: panel, panel_type: panel_type) end it 'returns no context' do @@ -246,6 +332,55 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do end end end + + describe 'context switcher persistent links' do + let_it_be(:public_link) do + [ + { title: s_('Navigation|Your work'), link: '/', icon: 'work' }, + { title: s_('Navigation|Explore'), link: '/explore', icon: 'compass' } + ] + end + + subject do + helper.super_sidebar_context(user, group: nil, project: nil, panel: panel, panel_type: panel_type) + end + + context 'when user is not an admin' do + it 'returns only the public links' do + expect(subject[:context_switcher_links]).to eq(public_link) + end + end + + context 'when user is an admin' do + before do + allow(user).to receive(:can_admin_all_resources?).and_return(true) + end + + it 'returns public links and admin area link' do + expect(subject[:context_switcher_links]).to eq([ + *public_link, + { title: s_('Navigation|Admin'), link: '/admin', icon: 'admin' } + ]) + end + end + end + + describe 'impersonation data' do + it 'sets is_impersonating to `false` when not impersonating' do + expect(subject[:is_impersonating]).to be(false) + end + + it 'passes the stop_impersonation_path property' do + expect(subject[:stop_impersonation_path]).to eq(admin_impersonation_path) + end + + describe 'when impersonating' do + it 'sets is_impersonating to `true`' do + expect(helper).to receive(:session).and_return({ impersonator_id: 1 }) + expect(subject[:is_impersonating]).to be(true) + end + end + end end describe '#super_sidebar_nav_panel' do @@ -256,7 +391,8 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do before do allow(helper).to receive(:project_sidebar_context_data).and_return( { current_user: nil, container: project, can_view_pipeline_editor: false, learn_gitlab_enabled: false }) - allow(helper).to receive(:group_sidebar_context_data).and_return({ current_user: nil, container: group }) + allow(helper).to receive(:group_sidebar_context_data).and_return( + { current_user: nil, container: group, show_discover_group_security: false }) allow(group).to receive(:to_global_id).and_return(5) Rails.cache.write(['users', user.id, 'assigned_open_issues_count'], 1) @@ -282,10 +418,18 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do expect(helper.super_sidebar_nav_panel(nav: 'user_profile')).to be_a(Sidebars::UserProfile::Panel) end + it 'returns Admin Panel for admin nav' do + expect(helper.super_sidebar_nav_panel(nav: 'admin')).to be_a(Sidebars::Admin::Panel) + end + it 'returns "Your Work" Panel for your_work nav', :use_clean_rails_memory_store_caching do expect(helper.super_sidebar_nav_panel(nav: 'your_work', user: user)).to be_a(Sidebars::YourWork::Panel) end + it 'returns Search Panel for search nav' do + expect(helper.super_sidebar_nav_panel(nav: 'search', user: user)).to be_a(Sidebars::Search::Panel) + end + it 'returns "Your Work" Panel as a fallback', :use_clean_rails_memory_store_caching do expect(helper.super_sidebar_nav_panel(user: user)).to be_a(Sidebars::YourWork::Panel) end diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb index 6c0f1034d65..d62da2ca714 100644 --- a/spec/helpers/storage_helper_spec.rb +++ b/spec/helpers/storage_helper_spec.rb @@ -24,18 +24,22 @@ RSpec.describe StorageHelper do describe "#storage_counters_details" do let_it_be(:namespace) { create(:namespace) } let_it_be(:project) do - create(:project, - namespace: namespace, - statistics: build(:project_statistics, - namespace: namespace, - repository_size: 10.kilobytes, - wiki_size: 10.bytes, - lfs_objects_size: 20.gigabytes, - build_artifacts_size: 30.megabytes, - pipeline_artifacts_size: 11.megabytes, - snippets_size: 40.megabytes, - packages_size: 12.megabytes, - uploads_size: 15.megabytes)) + create( + :project, + namespace: namespace, + statistics: build( + :project_statistics, + namespace: namespace, + repository_size: 10.kilobytes, + wiki_size: 10.bytes, + lfs_objects_size: 20.gigabytes, + build_artifacts_size: 30.megabytes, + pipeline_artifacts_size: 11.megabytes, + snippets_size: 40.megabytes, + packages_size: 12.megabytes, + uploads_size: 15.megabytes + ) + ) end let(:message) { 'Repository: 10 KB / Wikis: 10 Bytes / Build Artifacts: 30 MB / Pipeline Artifacts: 11 MB / LFS: 20 GB / Snippets: 40 MB / Packages: 12 MB / Uploads: 15 MB' } diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb index 8d24e9576e0..ea39147e3f6 100644 --- a/spec/helpers/todos_helper_spec.rb +++ b/spec/helpers/todos_helper_spec.rb @@ -9,20 +9,21 @@ RSpec.describe TodosHelper do let_it_be(:issue) { create(:issue, title: 'Issue 1', project: project) } let_it_be(:design) { create(:design, issue: issue) } let_it_be(:note) do - create(:note, - project: issue.project, - note: 'I am note, hear me roar') + create(:note, project: issue.project, note: 'I am note, hear me roar') end let_it_be(:group) { create(:group, :public, name: 'Group 1') } let_it_be(:design_todo) do - create(:todo, :mentioned, - user: user, - project: project, - target: design, - author: author, - note: note) + create( + :todo, + :mentioned, + user: user, + project: project, + target: design, + author: author, + note: note + ) end let_it_be(:alert_todo) do @@ -93,11 +94,14 @@ RSpec.describe TodosHelper do context 'when given a non-design todo' do let(:todo) do - build_stubbed(:todo, :assigned, - user: user, - project: issue.project, - target: issue, - author: author) + build_stubbed( + :todo, + :assigned, + user: user, + project: issue.project, + target: issue, + author: author + ) end it 'returns the title' do @@ -154,11 +158,13 @@ RSpec.describe TodosHelper do context 'when a user requests access to group' do let_it_be(:group_access_request_todo) do - create(:todo, - target_id: group.id, - target_type: group.class.polymorphic_name, - group: group, - action: Todo::MEMBER_ACCESS_REQUESTED) + create( + :todo, + target_id: group.id, + target_type: group.class.polymorphic_name, + group: group, + action: Todo::MEMBER_ACCESS_REQUESTED + ) end it 'responds with access requests tab' do @@ -295,7 +301,7 @@ RSpec.describe TodosHelper do end describe '#no_todos_messages' do - context 'when getting todos messsages' do + context 'when getting todos messages' do it 'return these sentences' do expected_sentences = [ s_('Todos|Good job! Looks like you don\'t have anything left on your To-Do List'), diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb index c40284ee933..01dacf5fcad 100644 --- a/spec/helpers/tree_helper_spec.rb +++ b/spec/helpers/tree_helper_spec.rb @@ -3,6 +3,7 @@ require 'spec_helper' RSpec.describe TreeHelper do + include Devise::Test::ControllerHelpers let_it_be(:project) { create(:project, :repository) } let(:repository) { project.repository } let(:sha) { 'c1c67abbaf91f624347bb3ae96eabe3a1b742478' } diff --git a/spec/helpers/users/group_callouts_helper_spec.rb b/spec/helpers/users/group_callouts_helper_spec.rb index da67c4921b3..c6679069c49 100644 --- a/spec/helpers/users/group_callouts_helper_spec.rb +++ b/spec/helpers/users/group_callouts_helper_spec.rb @@ -70,10 +70,12 @@ RSpec.describe Users::GroupCalloutsHelper do context 'when the invite_members_banner has been dismissed' do before do - create(:group_callout, - user: user, - group: group, - feature_name: described_class::INVITE_MEMBERS_BANNER) + create( + :group_callout, + user: user, + group: group, + feature_name: described_class::INVITE_MEMBERS_BANNER + ) end it { is_expected.to eq(false) } diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb index 2829236f7d1..e99bb4859d5 100644 --- a/spec/helpers/users_helper_spec.rb +++ b/spec/helpers/users_helper_spec.rb @@ -520,8 +520,59 @@ RSpec.describe UsersHelper do followees: 3, followers: 2, user_calendar_path: '/users/root/calendar.json', - utc_offset: 0 + utc_offset: 0, + user_id: user.id }) end end + + describe '#load_max_project_member_accesses' do + let_it_be(:projects) { create_list(:project, 3) } + + before(:all) do + projects.first.add_developer(user) + end + + context 'without current_user' do + before do + allow(helper).to receive(:current_user).and_return(nil) + end + + it 'executes no queries' do + sample = ActiveRecord::QueryRecorder.new do + helper.load_max_project_member_accesses(projects) + end + + expect(sample).not_to exceed_query_limit(0) + end + end + + context 'when current_user is present', :request_store do + before do + allow(helper).to receive(:current_user).and_return(user) + end + + it 'preloads ProjectPolicy#lookup_access_level! and UsersHelper#max_member_project_member_access for current_user in two queries', :aggregate_failures do + preload_queries = ActiveRecord::QueryRecorder.new do + helper.load_max_project_member_accesses(projects) + end + + helper_queries = ActiveRecord::QueryRecorder.new do + projects.each do |project| + helper.max_project_member_access(project) + end + end + + access_queries = ActiveRecord::QueryRecorder.new do + projects.each do |project| + user.can?(:read_code, project) + end + end + + expect(preload_queries).not_to exceed_query_limit(2) + expect(helper_queries).not_to exceed_query_limit(0) + expect(access_queries).not_to exceed_query_limit(0) + end + end + end end diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb index 2aac0cae0c6..8f37bf29a4b 100644 --- a/spec/helpers/visibility_level_helper_spec.rb +++ b/spec/helpers/visibility_level_helper_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe VisibilityLevelHelper do +RSpec.describe VisibilityLevelHelper, feature_category: :system_access do include ProjectForksHelper let(:project) { build(:project) } @@ -78,6 +78,23 @@ RSpec.describe VisibilityLevelHelper do expect(descriptions.uniq.size).to eq(descriptions.size) expect(descriptions).to all match /group/i end + + it 'returns default description for public group' do + expect(descriptions[2]).to eq('The group and any public projects can be viewed without any authentication.') + end + + context 'when application setting `should_check_namespace_plan` is true', if: Gitlab.ee? do + let(:group) { create(:group) } + let(:public_option_description) { visibility_level_description(Gitlab::VisibilityLevel::PUBLIC, group) } + + before do + allow(Gitlab::CurrentSettings.current_application_settings).to receive(:should_check_namespace_plan?) { true } + end + + it 'returns updated description for public visibility option in group general settings' do + expect(public_option_description).to match /^The group, any public projects, and any of their members, issues, and merge requests can be viewed without authentication./ + end + end end end @@ -161,8 +178,10 @@ RSpec.describe VisibilityLevelHelper do end before do - stub_application_setting(restricted_visibility_levels: restricted_levels, - default_project_visibility: global_default_level) + stub_application_setting( + restricted_visibility_levels: restricted_levels, + default_project_visibility: global_default_level + ) end with_them do diff --git a/spec/initializers/check_forced_decomposition_spec.rb b/spec/initializers/check_forced_decomposition_spec.rb index 64cb1184e7a..23fa3de297a 100644 --- a/spec/initializers/check_forced_decomposition_spec.rb +++ b/spec/initializers/check_forced_decomposition_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'check_forced_decomposition initializer', feature_category: :pods do +RSpec.describe 'check_forced_decomposition initializer', feature_category: :cell do subject(:check_forced_decomposition) do load Rails.root.join('config/initializers/check_forced_decomposition.rb') end diff --git a/spec/initializers/doorkeeper_openid_connect_patch_spec.rb b/spec/initializers/doorkeeper_openid_connect_patch_spec.rb new file mode 100644 index 00000000000..c04d7d95de6 --- /dev/null +++ b/spec/initializers/doorkeeper_openid_connect_patch_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative '../../config/initializers/doorkeeper_openid_connect_patch' + +RSpec.describe 'doorkeeper_openid_connect_patch', feature_category: :integrations do + describe '.signing_key' do + let(:config) { Doorkeeper::OpenidConnect::Config.new } + + before do + allow(config).to receive(:signing_key).and_return(key) + allow(config).to receive(:signing_algorithm).and_return(algorithm) + allow(Doorkeeper::OpenidConnect).to receive(:configuration).and_return(config) + end + + context 'with RS256 algorithm' do + let(:algorithm) { :RS256 } + # Taken from https://github.com/doorkeeper-gem/doorkeeper-openid_connect/blob/01903c81a2b6237a3bf576ed45864f69ef20184e/spec/dummy/config/initializers/doorkeeper_openid_connect.rb#L6-L34 + let(:key) do + <<~KEY + -----BEGIN RSA PRIVATE KEY----- + MIIEpgIBAAKCAQEAsjdnSA6UWUQQHf6BLIkIEUhMRNBJC1NN/pFt1EJmEiI88GS0 + ceROO5B5Ooo9Y3QOWJ/n+u1uwTHBz0HCTN4wgArWd1TcqB5GQzQRP4eYnWyPfi4C + feqAHzQp+v4VwbcK0LW4FqtW5D0dtrFtI281FDxLhARzkhU2y7fuYhL8fVw5rUhE + 8uwvHRZ5CEZyxf7BSHxIvOZAAymhuzNLATt2DGkDInU1BmF75tEtBJAVLzWG/j4L + PZh1EpSdfezqaXQlcy9PJi916UzTl0P7Yy+ulOdUsMlB6yo8qKTY1+AbZ5jzneHb + GDU/O8QjYvii1WDmJ60t0jXicmOkGrOhruOptwIDAQABAoIBAQChYNwMeu9IugJi + NsEf4+JDTBWMRpOuRrwcpfIvQAUPrKNEB90COPvCoju0j9OxCDmpdPtq1K/zD6xx + khlw485FVAsKufSp4+g6GJ75yT6gZtq1JtKo1L06BFFzb7uh069eeP7+wB6JxPHw + KlAqwxvsfADhxeolQUKCTMb3Vjv/Aw2cO/nn6RAOeftw2aDmFy8Xl+oTUtSxyib0 + YCdU9cK8MxsxDdmowwHp04xRTm/wfG5hLEn7HMz1PP86iP9BiFsCqTId9dxEUTS1 + K+VAt9FbxRAq5JlBocxUMHNxLigb94Ca2FOMR7F6l/tronLfHD801YoObF0fN9qW + Cgw4aTO5AoGBAOR79hiZVM7/l1cBid7hKSeMWKUZ/nrwJsVfNpu1H9xt9uDu+79U + mcGfM7pm7L2qCNGg7eeWBHq2CVg/XQacRNtcTlomFrw4tDXUkFN1hE56t1iaTs9m + dN9IDr6jFgf6UaoOxxoPT9Q1ZtO46l043Nzrkoz8cBEBaBY20bUDwCYjAoGBAMet + tt1ImGF1cx153KbOfjl8v54VYUVkmRNZTa1E821nL/EMpoONSqJmRVsX7grLyPL1 + QyZe245NOvn63YM0ng0rn2osoKsMVJwYBEYjHL61iF6dPtW5p8FIs7auRnC3NrG0 + XxHATZ4xhHD0iIn14iXh0XIhUVk+nGktHU1gbmVdAoGBANniwKdqqS6RHKBTDkgm + Dhnxw6MGa+CO3VpA1xGboxuRHeoY3KfzpIC5MhojBsZDvQ8zWUwMio7+w2CNZEfm + g99wYiOjyPCLXocrAssj+Rzh97AdzuQHf5Jh4/W2Dk9jTbdPSl02ltj2Z+2lnJFz + pWNjnqimHrSI09rDQi5NulJjAoGBAImquujVpDmNQFCSNA7NTzlTSMk09FtjgCZW + 67cKUsqa2fLXRfZs84gD+s1TMks/NMxNTH6n57e0h3TSAOb04AM0kDQjkKJdXfhA + lrHEg4z4m4yf3TJ9Tat09HJ+tRIBPzRFp0YVz23Btg4qifiUDdcQWdbWIb/l6vCY + qhsu4O4BAoGBANbceYSDYRdT7a5QjJGibkC90Z3vFe4rDTBgZWg7xG0cpSU4JNg7 + SFR3PjWQyCg7aGGXiooCM38YQruACTj0IFub24MFRA4ZTXvrACvpsVokJlQiG0Z4 + tuQKYki41JvYqPobcq/rLE/AM7PKJftW35nqFuj0MrsUwPacaVwKBf5J + -----END RSA PRIVATE KEY----- + KEY + end + + it 'returns the private key as JWK instance' do + expect(Doorkeeper::OpenidConnect.signing_key).to be_a ::JWT::JWK::KeyBase + expect(Doorkeeper::OpenidConnect.signing_key.kid).to eq 'IqYwZo2cE6hsyhs48cU8QHH4GanKIx0S4Dc99kgTIMA' + end + + it 'matches json-jwt implementation' do + json_jwt_key = OpenSSL::PKey::RSA.new(key).public_key.to_jwk.slice(:kty, :kid, :e, :n) + expect(Doorkeeper::OpenidConnect.signing_key.export.sort.to_json).to eq(json_jwt_key.sort.to_json) + end + end + + context 'with HS512 algorithm' do + let(:algorithm) { :HS512 } + let(:key) { 'the_greatest_secret_key' } + + it 'returns the HMAC public key parameters' do + expect(Doorkeeper::OpenidConnect.signing_key_normalized).to eq( + kty: 'oct', + kid: 'lyAW7LdxryFWQtLdgxZpOrI87APHrzJKgWLT0BkWVog' + ) + end + end + end +end diff --git a/spec/initializers/load_balancing_spec.rb b/spec/initializers/load_balancing_spec.rb index 66aaa52eef2..eddedcb2f38 100644 --- a/spec/initializers/load_balancing_spec.rb +++ b/spec/initializers/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'load_balancing', :delete, :reestablished_active_record_base, feature_category: :pods do +RSpec.describe 'load_balancing', :delete, :reestablished_active_record_base, feature_category: :cell do subject(:initialize_load_balancer) do load Rails.root.join('config/initializers/load_balancing.rb') end diff --git a/spec/initializers/net_http_patch_spec.rb b/spec/initializers/net_http_patch_spec.rb index d56730917f1..82f896e1fa7 100644 --- a/spec/initializers/net_http_patch_spec.rb +++ b/spec/initializers/net_http_patch_spec.rb @@ -8,6 +8,12 @@ require_relative '../../config/initializers/net_http_patch' RSpec.describe 'Net::HTTP patch proxy user and password encoding' do let(:net_http) { Net::HTTP.new('hostname.example') } + before do + # This file can be removed once Ruby 3.0 is no longer supported: + # https://gitlab.com/gitlab-org/gitlab/-/issues/396223 + skip if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new(3.1) + end + describe '#proxy_user' do subject { net_http.proxy_user } diff --git a/spec/initializers/net_http_response_patch_spec.rb b/spec/initializers/net_http_response_patch_spec.rb index 3bd0d8c3907..eee0747a02a 100644 --- a/spec/initializers/net_http_response_patch_spec.rb +++ b/spec/initializers/net_http_response_patch_spec.rb @@ -2,15 +2,15 @@ require 'spec_helper' -RSpec.describe 'Net::HTTPResponse patch header read timeout' do +RSpec.describe 'Net::HTTPResponse patch header read timeout', feature_category: :integrations do describe '.each_response_header' do let(:server_response) do - <<~EOS + <<~HTTP Content-Type: text/html Header-Two: foo Hello World - EOS + HTTP end before do @@ -30,14 +30,12 @@ RSpec.describe 'Net::HTTPResponse patch header read timeout' do end context 'when the response contains many consecutive spaces' do - before do + it 'has no regex backtracking issues' do expect(socket).to receive(:readuntil).and_return( "a: #{' ' * 100_000} b", '' ) - end - it 'has no regex backtracking issues' do Timeout.timeout(1) do each_response_header end diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb index 8264db8344d..06ec0396ab1 100644 --- a/spec/lib/api/ci/helpers/runner_spec.rb +++ b/spec/lib/api/ci/helpers/runner_spec.rb @@ -67,74 +67,44 @@ RSpec.describe API::Ci::Helpers::Runner do end end - describe '#current_runner_machine', :freeze_time, feature_category: :runner_fleet do + describe '#current_runner_manager', :freeze_time, feature_category: :runner_fleet do let(:runner) { create(:ci_runner, token: 'foo') } - let(:runner_machine) { create(:ci_runner_machine, runner: runner, system_xid: 'bar', contacted_at: 1.hour.ago) } + let(:runner_manager) { create(:ci_runner_machine, runner: runner, system_xid: 'bar', contacted_at: 1.hour.ago) } - subject(:current_runner_machine) { helper.current_runner_machine } + subject(:current_runner_manager) { helper.current_runner_manager } - context 'with create_runner_machine FF enabled' do + context 'when runner manager already exists' do before do - stub_feature_flags(create_runner_machine: true) + allow(helper).to receive(:params).and_return(token: runner.token, system_id: runner_manager.system_xid) end - context 'when runner machine already exists' do - before do - allow(helper).to receive(:params).and_return(token: runner.token, system_id: runner_machine.system_xid) - end + it { is_expected.to eq(runner_manager) } - it { is_expected.to eq(runner_machine) } - - it 'does not update the contacted_at field' do - expect(current_runner_machine.contacted_at).to eq 1.hour.ago - end - end - - context 'when runner machine cannot be found' do - it 'creates a new runner machine', :aggregate_failures do - allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id') - - expect { current_runner_machine }.to change { Ci::RunnerMachine.count }.by(1) - - expect(current_runner_machine).not_to be_nil - expect(current_runner_machine.system_xid).to eq('new_system_id') - expect(current_runner_machine.contacted_at).to eq(Time.current) - expect(current_runner_machine.runner).to eq(runner) - end - - it 'creates a new runner machine if system_id is not specified', :aggregate_failures do - allow(helper).to receive(:params).and_return(token: runner.token) - - expect { current_runner_machine }.to change { Ci::RunnerMachine.count }.by(1) - - expect(current_runner_machine).not_to be_nil - expect(current_runner_machine.system_xid).to eq(::API::Ci::Helpers::Runner::LEGACY_SYSTEM_XID) - expect(current_runner_machine.runner).to eq(runner) - end + it 'does not update the contacted_at field' do + expect(current_runner_manager.contacted_at).to eq 1.hour.ago end end - context 'with create_runner_machine FF disabled' do - before do - stub_feature_flags(create_runner_machine: false) - end + context 'when runner manager cannot be found' do + it 'creates a new runner manager', :aggregate_failures do + allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id') - it 'does not return runner machine if no system_id specified' do - allow(helper).to receive(:params).and_return(token: runner.token) + expect { current_runner_manager }.to change { Ci::RunnerManager.count }.by(1) - is_expected.to be_nil + expect(current_runner_manager).not_to be_nil + expect(current_runner_manager.system_xid).to eq('new_system_id') + expect(current_runner_manager.contacted_at).to eq(Time.current) + expect(current_runner_manager.runner).to eq(runner) end - context 'when runner machine can not be found' do - before do - allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id') - end + it 'creates a new runner manager if system_id is not specified', :aggregate_failures do + allow(helper).to receive(:params).and_return(token: runner.token) - it 'does not create a new runner machine', :aggregate_failures do - expect { current_runner_machine }.not_to change { Ci::RunnerMachine.count } + expect { current_runner_manager }.to change { Ci::RunnerManager.count }.by(1) - expect(current_runner_machine).to be_nil - end + expect(current_runner_manager).not_to be_nil + expect(current_runner_manager.system_xid).to eq(::API::Ci::Helpers::Runner::LEGACY_SYSTEM_XID) + expect(current_runner_manager.runner).to eq(runner) end end end diff --git a/spec/lib/api/entities/clusters/agent_authorization_spec.rb b/spec/lib/api/entities/clusters/agent_authorization_spec.rb deleted file mode 100644 index 3a1deb43bf8..00000000000 --- a/spec/lib/api/entities/clusters/agent_authorization_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe API::Entities::Clusters::AgentAuthorization do - subject { described_class.new(authorization).as_json } - - shared_examples 'generic authorization' do - it 'includes shared fields' do - expect(subject).to include( - id: authorization.agent_id, - config_project: a_hash_including(id: authorization.agent.project_id), - configuration: authorization.config - ) - end - end - - context 'project authorization' do - let(:authorization) { create(:agent_project_authorization) } - - include_examples 'generic authorization' - end - - context 'group authorization' do - let(:authorization) { create(:agent_group_authorization) } - - include_examples 'generic authorization' - end - - context 'implicit authorization' do - let(:agent) { create(:cluster_agent) } - let(:authorization) { Clusters::Agents::ImplicitAuthorization.new(agent: agent) } - - include_examples 'generic authorization' - end -end diff --git a/spec/lib/api/entities/clusters/agents/authorizations/ci_access_spec.rb b/spec/lib/api/entities/clusters/agents/authorizations/ci_access_spec.rb new file mode 100644 index 00000000000..4dd20f26dc9 --- /dev/null +++ b/spec/lib/api/entities/clusters/agents/authorizations/ci_access_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Clusters::Agents::Authorizations::CiAccess, feature_category: :deployment_management do + subject { described_class.new(authorization).as_json } + + shared_examples 'generic authorization' do + it 'includes shared fields' do + expect(subject).to include( + id: authorization.agent_id, + config_project: a_hash_including(id: authorization.agent.project_id), + configuration: authorization.config + ) + end + end + + context 'project authorization' do + let(:authorization) { create(:agent_ci_access_project_authorization) } + + include_examples 'generic authorization' + end + + context 'group authorization' do + let(:authorization) { create(:agent_ci_access_group_authorization) } + + include_examples 'generic authorization' + end + + context 'implicit authorization' do + let(:agent) { create(:cluster_agent) } + let(:authorization) { Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization.new(agent: agent) } + + include_examples 'generic authorization' + end +end diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb index b64a1555332..28fef16a532 100644 --- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb +++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb @@ -55,13 +55,13 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do describe 'run_id' do it 'is the iid as string' do - expect(subject[:run_id]).to eq(candidate.iid.to_s) + expect(subject[:run_id]).to eq(candidate.eid.to_s) end end describe 'run_uuid' do it 'is the iid as string' do - expect(subject[:run_uuid]).to eq(candidate.iid.to_s) + expect(subject[:run_uuid]).to eq(candidate.eid.to_s) end end diff --git a/spec/lib/api/entities/ml/mlflow/run_spec.rb b/spec/lib/api/entities/ml/mlflow/run_spec.rb index b8d38093681..a57f70f788b 100644 --- a/spec/lib/api/entities/ml/mlflow/run_spec.rb +++ b/spec/lib/api/entities/ml/mlflow/run_spec.rb @@ -12,7 +12,7 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do end it 'has the id' do - expect(subject.dig(:run, :info, :run_id)).to eq(candidate.iid.to_s) + expect(subject.dig(:run, :info, :run_id)).to eq(candidate.eid.to_s) end it 'presents the metrics' do diff --git a/spec/lib/api/github/entities_spec.rb b/spec/lib/api/github/entities_spec.rb index 00ea60c5d65..63c54b259a2 100644 --- a/spec/lib/api/github/entities_spec.rb +++ b/spec/lib/api/github/entities_spec.rb @@ -12,7 +12,7 @@ RSpec.describe API::Github::Entities do subject { entity.as_json } - specify :aggregate_failure do + specify :aggregate_failures do expect(subject[:id]).to eq user.id expect(subject[:login]).to eq 'name_of_user' expect(subject[:url]).to eq expected_user_url diff --git a/spec/lib/api/helpers/members_helpers_spec.rb b/spec/lib/api/helpers/members_helpers_spec.rb index 987d5ba9f6c..ee1ae6b1781 100644 --- a/spec/lib/api/helpers/members_helpers_spec.rb +++ b/spec/lib/api/helpers/members_helpers_spec.rb @@ -22,15 +22,6 @@ RSpec.describe API::Helpers::MembersHelpers, feature_category: :subgroups do it_behaves_like 'returns all direct members' it_behaves_like 'query with source filters' - - context 'when project_members_index_by_project_namespace feature flag is disabled' do - before do - stub_feature_flags(project_members_index_by_project_namespace: false) - end - - it_behaves_like 'returns all direct members' - it_behaves_like 'query with source filters' - end end context 'for a project' do @@ -39,15 +30,6 @@ RSpec.describe API::Helpers::MembersHelpers, feature_category: :subgroups do it_behaves_like 'returns all direct members' it_behaves_like 'query without source filters' - - context 'when project_members_index_by_project_namespace feature flag is disabled' do - before do - stub_feature_flags(project_members_index_by_project_namespace: false) - end - - it_behaves_like 'returns all direct members' - it_behaves_like 'query with source filters' - end end end end diff --git a/spec/lib/atlassian/jira_connect/serializers/branch_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/branch_entity_spec.rb index 86e48a4a0fd..230908ccea1 100644 --- a/spec/lib/atlassian/jira_connect/serializers/branch_entity_spec.rb +++ b/spec/lib/atlassian/jira_connect/serializers/branch_entity_spec.rb @@ -3,7 +3,10 @@ require 'spec_helper' RSpec.describe Atlassian::JiraConnect::Serializers::BranchEntity, feature_category: :integrations do - let(:project) { create(:project, :repository) } + include AfterNextHelpers + + let_it_be(:project) { create(:project, :repository) } + let(:branch) { project.repository.find_branch('improve/awesome') } subject { described_class.represent(branch, project: project).as_json } @@ -11,4 +14,48 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BranchEntity, feature_catego it 'sets the hash of the branch name as the id' do expect(subject[:id]).to eq('bbfba9b197ace5da93d03382a7ce50081ae89d99faac1f2326566941288871ce') end + + describe '#issue_keys' do + it 'calls Atlassian::JiraIssueKeyExtractors::Branch#issue_keys' do + expect_next(Atlassian::JiraIssueKeyExtractors::Branch) do |extractor| + expect(extractor).to receive(:issue_keys) + end + + subject + end + + it 'avoids N+1 queries when fetching merge requests for multiple branches' do + master_branch = project.repository.find_branch('master') + + create( + :merge_request, + source_project: project, + source_branch: 'improve/awesome', + title: 'OPEN_MR_TITLE-1', + description: 'OPEN_MR_DESC-1' + ) + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { subject } + + create( + :merge_request, + source_project: project, + source_branch: 'master', + title: 'MASTER_MR_TITLE-1', + description: 'MASTER_MR_DESC-1' + ) + + expect(subject).to include( + name: 'improve/awesome', + issueKeys: match_array(%w[OPEN_MR_TITLE-1 OPEN_MR_DESC-1]) + ) + + expect do + expect(described_class.represent([branch, master_branch], project: project).as_json).to contain_exactly( + hash_including(name: 'improve/awesome', issueKeys: match_array(%w[BRANCH-1 OPEN_MR_TITLE-1 OPEN_MR_DESC-1])), + hash_including(name: 'master', issueKeys: match_array(%w[MASTER_MR_TITLE-1 MASTER_MR_DESC-1])) + ) + end.not_to exceed_query_limit(control) + end + end end diff --git a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb index f05adb49651..1f68b85c7ba 100644 --- a/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb +++ b/spec/lib/atlassian/jira_connect/serializers/build_entity_spec.rb @@ -49,7 +49,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::BuildEntity, feature_categor end end - context 'in the pipeline\'s commit messsage' do + context 'in the pipeline\'s commit message' do let_it_be(:pipeline) { create(:ci_pipeline, project: project) } let(:commit_message) { "Merge branch 'staging' into 'master'\n\nFixes bug described in PROJ-1234" } diff --git a/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb index 3f84404f38d..bf855e98570 100644 --- a/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb +++ b/spec/lib/atlassian/jira_connect/serializers/feature_flag_entity_spec.rb @@ -47,10 +47,12 @@ RSpec.describe Atlassian::JiraConnect::Serializers::FeatureFlagEntity, feature_c context 'it has a percentage strategy' do let!(:scopes) do - strat = create(:operations_strategy, - feature_flag: feature_flag, - name: ::Operations::FeatureFlags::Strategy::STRATEGY_GRADUALROLLOUTUSERID, - parameters: { 'percentage' => '50', 'groupId' => 'abcde' }) + strat = create( + :operations_strategy, + feature_flag: feature_flag, + name: ::Operations::FeatureFlags::Strategy::STRATEGY_GRADUALROLLOUTUSERID, + parameters: { 'percentage' => '50', 'groupId' => 'abcde' } + ) [ create(:operations_scope, strategy: strat, environment_scope: 'production in live'), diff --git a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb index 42fc441b868..48339d46153 100644 --- a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb +++ b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb @@ -33,5 +33,13 @@ RSpec.describe Atlassian::JiraIssueKeyExtractor, feature_category: :integrations is_expected.to contain_exactly('TEST-01') end end + + context 'with custom_regex' do + subject { described_class.new('TEST-01 some A-100', custom_regex: /(?[B-Z]+-\d+)/).issue_keys } + + it 'returns all valid Jira issue keys' do + is_expected.to contain_exactly('TEST-01') + end + end end end diff --git a/spec/lib/atlassian/jira_issue_key_extractors/branch_spec.rb b/spec/lib/atlassian/jira_issue_key_extractors/branch_spec.rb new file mode 100644 index 00000000000..52b6fc39a3f --- /dev/null +++ b/spec/lib/atlassian/jira_issue_key_extractors/branch_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Atlassian::JiraIssueKeyExtractors::Branch, feature_category: :integrations do + include AfterNextHelpers + + let_it_be(:project) { create(:project, :repository) } + + let(:branch) { project.repository.find_branch('improve/awesome') } + + describe '.has_keys?' do + it 'delegates to `#issue_keys?`' do + expect_next(described_class) do |instance| + expect(instance).to receive_message_chain(:issue_keys, :any?) + end + + described_class.has_keys?(project, branch.name) + end + end + + describe '#issue_keys' do + subject { described_class.new(project, branch.name).issue_keys } + + context 'when branch name does not refer to an issue' do + it { is_expected.to eq([]) } + end + + context 'when branch name refers to an issue' do + before do + allow(branch).to receive(:name).and_return('BRANCH-1') + end + + it { is_expected.to eq(['BRANCH-1']) } + + context 'when there is a related open merge request, and related closed merge request' do + before_all do + create(:merge_request, + source_project: project, + source_branch: 'BRANCH-1', + title: 'OPEN_MR_TITLE-1', + description: 'OPEN_MR_DESC-1' + ) + + create(:merge_request, :closed, + source_project: project, + source_branch: 'BRANCH-1', + title: 'CLOSED_MR_TITLE-2', + description: 'CLOSED_MR_DESC-2' + ) + end + + it { is_expected.to eq(%w[BRANCH-1 OPEN_MR_TITLE-1 OPEN_MR_DESC-1]) } + end + end + end +end diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb index c70d47e4940..f0cee8ce36a 100644 --- a/spec/lib/backup/database_spec.rb +++ b/spec/lib/backup/database_spec.rb @@ -11,12 +11,17 @@ end RSpec.describe Backup::Database, feature_category: :backup_restore do let(:progress) { StringIO.new } let(:output) { progress.string } - let(:one_db_configured?) { Gitlab::Database.database_base_models.one? } - let(:database_models_for_backup) { Gitlab::Database.database_base_models_with_gitlab_shared } + let(:one_database_configured?) { base_models_for_backup.one? } let(:timeout_service) do instance_double(Gitlab::Database::TransactionTimeoutSettings, restore_timeouts: nil, disable_timeouts: nil) end + let(:base_models_for_backup) do + Gitlab::Database.database_base_models_with_gitlab_shared.select do |database_name| + Gitlab::Database.has_database?(database_name) + end + end + before(:all) do Rake::Task.define_task(:environment) Rake.application.rake_require 'active_record/railties/databases' @@ -33,7 +38,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do subject { described_class.new(progress, force: force) } before do - database_models_for_backup.each do |database_name, base_model| + base_models_for_backup.each do |_, base_model| base_model.connection.rollback_transaction unless base_model.connection.open_transactions.zero? allow(base_model.connection).to receive(:execute).and_call_original end @@ -43,7 +48,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do Dir.mktmpdir do |dir| subject.dump(dir, backup_id) - database_models_for_backup.each_key do |database_name| + base_models_for_backup.each_key do |database_name| filename = database_name == 'main' ? 'database.sql.gz' : "#{database_name}_database.sql.gz" expect(File.exist?(File.join(dir, filename))).to eq(true) end @@ -56,8 +61,8 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do expect(base_model.connection).to receive(:begin_transaction).with( isolation: :repeatable_read ).and_call_original - expect(base_model.connection).to receive(:execute).with( - "SELECT pg_export_snapshot() as snapshot_id;" + expect(base_model.connection).to receive(:select_value).with( + "SELECT pg_export_snapshot()" ).and_call_original expect(base_model.connection).to receive(:rollback_transaction).and_call_original @@ -66,7 +71,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do end it 'disables transaction time out' do - number_of_databases = Gitlab::Database.database_base_models_with_gitlab_shared.count + number_of_databases = base_models_for_backup.count expect(Gitlab::Database::TransactionTimeoutSettings) .to receive(:new).exactly(2 * number_of_databases).times.and_return(timeout_service) expect(timeout_service).to receive(:disable_timeouts).exactly(number_of_databases).times @@ -94,10 +99,10 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper) allow(dumper).to receive(:dump).with(any_args).and_return(true) - database_models_for_backup.each do |database_name, base_model| - allow(base_model.connection).to receive(:execute).with( - "SELECT pg_export_snapshot() as snapshot_id;" - ).and_return(['snapshot_id' => snapshot_id]) + base_models_for_backup.each do |_, base_model| + allow(base_model.connection).to receive(:select_value).with( + "SELECT pg_export_snapshot()" + ).and_return(snapshot_id) end end @@ -134,7 +139,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do it 'restores timeouts' do Dir.mktmpdir do |dir| - number_of_databases = Gitlab::Database.database_base_models_with_gitlab_shared.count + number_of_databases = base_models_for_backup.count expect(Gitlab::Database::TransactionTimeoutSettings) .to receive(:new).exactly(number_of_databases).times.and_return(timeout_service) expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times @@ -165,7 +170,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do it 'warns the user and waits' do expect(subject).to receive(:sleep) - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) @@ -183,7 +188,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do context 'with an empty .gz file' do it 'returns successfully' do - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) @@ -203,7 +208,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do end it 'raises a backup error' do - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) @@ -219,7 +224,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] } it 'filters out noise from errors and has a post restore warning' do - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) @@ -246,7 +251,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do end it 'overrides default config values' do - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) @@ -270,7 +275,7 @@ RSpec.describe Backup::Database, feature_category: :backup_restore do end it 'raises an error about missing source file' do - if one_db_configured? + if one_database_configured? expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke) else expect(Rake::Task['gitlab:db:drop_tables:main']).not_to receive(:invoke) diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb index ad0e5553fa1..172fc28dd3e 100644 --- a/spec/lib/backup/gitaly_backup_spec.rb +++ b/spec/lib/backup/gitaly_backup_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Backup::GitalyBackup do +RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do let(:max_parallelism) { nil } let(:storage_parallelism) { nil } let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') } @@ -181,6 +181,15 @@ RSpec.describe Backup::GitalyBackup do expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1']) end + it 'clears specified storages when remove_all_repositories is set' do + expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer', '-remove-all-repositories', 'default').and_call_original + + copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle') + subject.start(:restore, destination, backup_id: backup_id, remove_all_repositories: %w[default]) + subject.enqueue(project, Gitlab::GlRepository::PROJECT) + subject.finish! + end + context 'parallel option set' do let(:max_parallelism) { 3 } diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 02889c1535d..1733d21c23f 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -77,7 +77,9 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do end before do - allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml')) + allow(YAML).to receive(:safe_load_file).with( + File.join(Gitlab.config.backup.path, 'backup_information.yml'), + permitted_classes: described_class::YAML_PERMITTED_CLASSES) .and_return(backup_information) end @@ -603,14 +605,16 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do end expect(Kernel).not_to have_received(:system).with(*pack_tar_cmdline) - expect(YAML.load_file(File.join(Gitlab.config.backup.path, 'backup_information.yml'))).to include( - backup_created_at: backup_time.localtime, - db_version: be_a(String), - gitlab_version: Gitlab::VERSION, - installation_type: Gitlab::INSTALLATION_TYPE, - skipped: 'tar', - tar_version: be_a(String) - ) + expect(YAML.safe_load_file( + File.join(Gitlab.config.backup.path, 'backup_information.yml'), + permitted_classes: described_class::YAML_PERMITTED_CLASSES)).to include( + backup_created_at: backup_time.localtime, + db_version: be_a(String), + gitlab_version: Gitlab::VERSION, + installation_type: Gitlab::INSTALLATION_TYPE, + skipped: 'tar', + tar_version: be_a(String) + ) expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp')) end end @@ -629,8 +633,10 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do end before do - allow(YAML).to receive(:load_file).and_call_original - allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml')) + allow(YAML).to receive(:safe_load_file).and_call_original + allow(YAML).to receive(:safe_load_file).with( + File.join(Gitlab.config.backup.path, 'backup_information.yml'), + permitted_classes: described_class::YAML_PERMITTED_CLASSES) .and_return(backup_information) end @@ -658,8 +664,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do it 'prints the list of available backups' do expect { subject.create }.to raise_error SystemExit # rubocop:disable Rails/SaveBang - expect(progress).to have_received(:puts) - .with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31')) + expect(progress).to have_received(:puts).with(a_string_matching('1451606400_2016_01_01_1.2.3')) + expect(progress).to have_received(:puts).with(a_string_matching('1451520000_2015_12_31')) end it 'fails the operation and prints an error' do @@ -892,12 +898,13 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do .with(a_string_matching('Non tarred backup found ')) expect(progress).to have_received(:puts) .with(a_string_matching("Backup #{backup_id} is done")) - expect(YAML.load_file(File.join(Gitlab.config.backup.path, 'backup_information.yml'))).to include( - backup_created_at: backup_time, - full_backup_id: full_backup_id, - gitlab_version: Gitlab::VERSION, - skipped: 'something,tar' - ) + expect(YAML.safe_load_file(File.join(Gitlab.config.backup.path, 'backup_information.yml'), + permitted_classes: described_class::YAML_PERMITTED_CLASSES)).to include( + backup_created_at: backup_time, + full_backup_id: full_backup_id, + gitlab_version: Gitlab::VERSION, + skipped: 'something,tar' + ) end context 'on version mismatch' do @@ -943,7 +950,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do allow(Gitlab::BackupLogger).to receive(:info) allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz')) allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')) - allow(YAML).to receive(:load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml')) + allow(YAML).to receive(:safe_load_file).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'), + permitted_classes: described_class::YAML_PERMITTED_CLASSES) .and_return(backup_information) allow(Rake::Task['gitlab:shell:setup']).to receive(:invoke) allow(Rake::Task['cache:clear']).to receive(:invoke) @@ -973,8 +981,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do it 'prints the list of available backups' do expect { subject.restore }.to raise_error SystemExit - expect(progress).to have_received(:puts) - .with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31')) + expect(progress).to have_received(:puts).with(a_string_matching('1451606400_2016_01_01_1.2.3')) + expect(progress).to have_received(:puts).with(a_string_matching('1451520000_2015_12_31')) end it 'fails the operation and prints an error' do diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index 8bcf1e46c33..c75f6c2ac89 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Backup::Repositories do +RSpec.describe Backup::Repositories, feature_category: :backup_restore do let(:progress) { spy(:stdout) } let(:strategy) { spy(:strategy) } let(:storages) { [] } @@ -165,7 +165,7 @@ RSpec.describe Backup::Repositories do it 'calls enqueue for each repository type', :aggregate_failures do subject.restore(destination) - expect(strategy).to have_received(:start).with(:restore, destination) + expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default]) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) @@ -246,7 +246,7 @@ RSpec.describe Backup::Repositories do subject.restore(destination) - expect(strategy).to have_received(:start).with(:restore, destination) + expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default]) expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT) expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) @@ -268,7 +268,7 @@ RSpec.describe Backup::Repositories do subject.restore(destination) - expect(strategy).to have_received(:start).with(:restore, destination) + expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil) expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT) expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) @@ -289,7 +289,7 @@ RSpec.describe Backup::Repositories do subject.restore(destination) - expect(strategy).to have_received(:start).with(:restore, destination) + expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil) expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT) expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET) expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET) diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb index 3f72896939d..de259342998 100644 --- a/spec/lib/banzai/filter/external_link_filter_spec.rb +++ b/spec/lib/banzai/filter/external_link_filter_spec.rb @@ -2,25 +2,25 @@ require 'spec_helper' -RSpec.shared_examples 'an external link with rel attribute', feature_category: :team_planning do - it 'adds rel="nofollow" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'nofollow' - end +RSpec.describe Banzai::Filter::ExternalLinkFilter, feature_category: :team_planning do + include FilterSpecHelper - it 'adds rel="noreferrer" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'noreferrer' - end + shared_examples 'an external link with rel attribute' do + it 'adds rel="nofollow" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'nofollow' + end - it 'adds rel="noopener" to external links' do - expect(doc.at_css('a')).to have_attribute('rel') - expect(doc.at_css('a')['rel']).to include 'noopener' - end -end + it 'adds rel="noreferrer" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'noreferrer' + end -RSpec.describe Banzai::Filter::ExternalLinkFilter do - include FilterSpecHelper + it 'adds rel="noopener" to external links' do + expect(doc.at_css('a')).to have_attribute('rel') + expect(doc.at_css('a')['rel']).to include 'noopener' + end + end it 'ignores elements without an href attribute' do exp = act = %q(Ignore Me) diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb index 80061539a0b..2b86a4f8cfc 100644 --- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb @@ -202,7 +202,7 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor filter(link, context) end.count - expect(control_count).to eq 10 + expect(control_count).to eq 11 expect do filter("#{link} #{link2}", context) diff --git a/spec/lib/banzai/filter/kroki_filter_spec.rb b/spec/lib/banzai/filter/kroki_filter_spec.rb index 1cd11161439..ccb629d865d 100644 --- a/spec/lib/banzai/filter/kroki_filter_spec.rb +++ b/spec/lib/banzai/filter/kroki_filter_spec.rb @@ -13,10 +13,12 @@ RSpec.describe Banzai::Filter::KrokiFilter, feature_category: :team_planning do end it 'replaces nomnoml pre tag with img tag if both kroki and plantuml are enabled' do - stub_application_setting(kroki_enabled: true, - kroki_url: "http://localhost:8000", - plantuml_enabled: true, - plantuml_url: "http://localhost:8080") + stub_application_setting( + kroki_enabled: true, + kroki_url: "http://localhost:8000", + plantuml_enabled: true, + plantuml_url: "http://localhost:8080" + ) doc = filter("
    [Pirate|eyeCount: Int|raid();pillage()|\n  [beard]--[parrot]\n  [beard]-:>[foul mouth]\n]
    ") expect(doc.to_s).to eq '' @@ -30,10 +32,12 @@ RSpec.describe Banzai::Filter::KrokiFilter, feature_category: :team_planning do end it 'does not replace plantuml pre tag with img tag if both kroki and plantuml are enabled' do - stub_application_setting(kroki_enabled: true, - kroki_url: "http://localhost:8000", - plantuml_enabled: true, - plantuml_url: "http://localhost:8080") + stub_application_setting( + kroki_enabled: true, + kroki_url: "http://localhost:8000", + plantuml_enabled: true, + plantuml_url: "http://localhost:8080" + ) doc = filter("
    Bob->Alice : hello
    ") expect(doc.to_s).to eq '
    Bob->Alice : hello
    ' diff --git a/spec/lib/banzai/filter/markdown_engines/base_spec.rb b/spec/lib/banzai/filter/markdown_engines/base_spec.rb new file mode 100644 index 00000000000..e7b32876610 --- /dev/null +++ b/spec/lib/banzai/filter/markdown_engines/base_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Banzai::Filter::MarkdownEngines::Base, feature_category: :team_planning do + it 'raise error if render not implemented' do + engine = described_class.new({}) + + expect { engine.render('# hi') }.to raise_error(NotImplementedError) + end + + it 'turns off sourcepos' do + engine = described_class.new({ no_sourcepos: true }) + + expect(engine.send(:sourcepos_disabled?)).to be_truthy + end +end diff --git a/spec/lib/banzai/filter/markdown_engines/common_mark_spec.rb b/spec/lib/banzai/filter/markdown_engines/common_mark_spec.rb new file mode 100644 index 00000000000..74fac75abe8 --- /dev/null +++ b/spec/lib/banzai/filter/markdown_engines/common_mark_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Banzai::Filter::MarkdownEngines::CommonMark, feature_category: :team_planning do + it 'defaults to generating sourcepos' do + engine = described_class.new({}) + + expect(engine.render('# hi')).to eq %(

    hi

    \n) + end + + it 'turns off sourcepos' do + engine = described_class.new({ no_sourcepos: true }) + + expect(engine.render('# hi')).to eq %(

    hi

    \n) + end +end diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb index c79cd58255d..64d65528426 100644 --- a/spec/lib/banzai/filter/markdown_filter_spec.rb +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -6,20 +6,19 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning include FilterSpecHelper describe 'markdown engine from context' do - it 'defaults to CommonMark' do - expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| - expect(instance).to receive(:render).and_return('test') - end - - filter('test') + it 'finds the correct engine' do + expect(described_class.render_engine(:common_mark)).to eq Banzai::Filter::MarkdownEngines::CommonMark end - it 'uses CommonMark' do - expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| - expect(instance).to receive(:render).and_return('test') - end + it 'defaults to the DEFAULT_ENGINE' do + default_engine = Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE.to_s.classify + default = "Banzai::Filter::MarkdownEngines::#{default_engine}".constantize + + expect(described_class.render_engine(nil)).to eq default + end - filter('test', { markdown_engine: :common_mark }) + it 'raise error for unrecognized engines' do + expect { described_class.render_engine(:foo_bar) }.to raise_error(NameError) end end diff --git a/spec/lib/banzai/filter/math_filter_spec.rb b/spec/lib/banzai/filter/math_filter_spec.rb index 374983e40a1..50784d3e423 100644 --- a/spec/lib/banzai/filter/math_filter_spec.rb +++ b/spec/lib/banzai/filter/math_filter_spec.rb @@ -101,6 +101,7 @@ RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do context 'with valid syntax' do where(:text, :result_template) do "$$\n2+2\n$$" | "2+2\n" + "$$ \n2+2\n$$" | "2+2\n" "$$\n2+2\n3+4\n$$" | "2+2\n3+4\n" end diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb index 08de9700cad..d97067de155 100644 --- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb @@ -128,10 +128,12 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego let(:subject) { filter_instance.data_attributes_for(input_text, project, design) } specify do - is_expected.to include(issue: design.issue_id, - original: input_text, - project: project.id, - design: design.id) + is_expected.to include( + issue: design.issue_id, + original: input_text, + project: project.id, + design: design.id + ) end end diff --git a/spec/lib/banzai/filter/repository_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb index b2162ea2756..b6966709f5c 100644 --- a/spec/lib/banzai/filter/repository_link_filter_spec.rb +++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb @@ -369,7 +369,18 @@ RSpec.describe Banzai::Filter::RepositoryLinkFilter, feature_category: :team_pla end end - context 'with a valid commit' do + context 'when public project repo with a valid commit' do + include_examples 'valid repository' + end + + context 'when private project repo with a valid commit' do + let_it_be(:project) { create(:project, :repository, :private) } + + before do + # user must have `read_code` ability + project.add_developer(user) + end + include_examples 'valid repository' end diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index 0d7f322d08f..394e6dcd7dc 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -116,7 +116,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter, feature_category: :team_pl include_examples "XSS prevention", lang include_examples "XSS prevention", - "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" + "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" include_examples "XSS prevention", "#{lang} data-meta=\"foo-bar-kux\"" diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb index e7c15ed9cf6..b8d2b6f7d7e 100644 --- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb @@ -80,7 +80,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_ let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) } it 'renders correct html' do - correct_html_included(markdown, %Q(
    foo\n
    )) + correct_html_included(markdown, %Q(
    foo\n
    )) end where(:markdown, :expected) do @@ -95,7 +95,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_ end def correct_html_included(markdown, expected) - result = described_class.call(markdown, {}) + result = described_class.call(markdown, { no_sourcepos: true }) expect(result[:output].to_html).to include(expected) diff --git a/spec/lib/banzai/reference_parser/commit_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_parser_spec.rb index 081bfa26fb2..9ed6235b8dd 100644 --- a/spec/lib/banzai/reference_parser/commit_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_parser_spec.rb @@ -5,8 +5,9 @@ require 'spec_helper' RSpec.describe Banzai::ReferenceParser::CommitParser, feature_category: :source_code_management do include ReferenceParserHelpers - let(:project) { create(:project, :public) } - let(:user) { create(:user) } + let_it_be(:project) { create(:project, :public, :repository) } + let_it_be(:user) { create(:user) } + subject { described_class.new(Banzai::RenderContext.new(project, user)) } let(:link) { empty_html_link } @@ -130,20 +131,50 @@ RSpec.describe Banzai::ReferenceParser::CommitParser, feature_category: :source_ end describe '#find_commits' do - it 'returns an Array of commit objects' do - commit = double(:commit) + let_it_be(:ids) { project.repository.commits(project.default_branch, limit: 3).map(&:id) } + + it 'is empty when repo is invalid' do + allow(project).to receive(:valid_repo?).and_return(false) - expect(project).to receive(:commit).with('123').and_return(commit) - expect(project).to receive(:valid_repo?).and_return(true) + expect(subject.find_commits(project, ids)).to eq([]) + end - expect(subject.find_commits(project, %w{123})).to eq([commit]) + it 'returns commits by the specified ids' do + expect(subject.find_commits(project, ids).map(&:id)).to eq(%w[ + b83d6e391c22777fca1ed3012fce84f633d7fed0 + 498214de67004b1da3d820901307bed2a68a8ef6 + 1b12f15a11fc6e62177bef08f47bc7b5ce50b141 + ]) end - it 'skips commit IDs for which no commit could be found' do - expect(project).to receive(:commit).with('123').and_return(nil) - expect(project).to receive(:valid_repo?).and_return(true) + it 'is limited' do + stub_const("#{described_class}::COMMITS_LIMIT", 1) + + expect(subject.find_commits(project, ids).map(&:id)).to eq([ + "b83d6e391c22777fca1ed3012fce84f633d7fed0" + ]) + end + + context 'when limited_commit_parser feature flag disabled' do + before do + stub_feature_flags(limited_commit_parser: false) + end + + it 'returns an Array of commit objects' do + commit = double(:commit) - expect(subject.find_commits(project, %w{123})).to eq([]) + expect(project).to receive(:commit).with('123').and_return(commit) + expect(project).to receive(:valid_repo?).and_return(true) + + expect(subject.find_commits(project, %w{123})).to eq([commit]) + end + + it 'skips commit IDs for which no commit could be found' do + expect(project).to receive(:commit).with('123').and_return(nil) + expect(project).to receive(:valid_repo?).and_return(true) + + expect(subject.find_commits(project, %w{123})).to eq([]) + end end end diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb index 8a8f3ce586a..21736903cbf 100644 --- a/spec/lib/banzai/reference_redactor_spec.rb +++ b/spec/lib/banzai/reference_redactor_spec.rb @@ -111,13 +111,16 @@ RSpec.describe Banzai::ReferenceRedactor, feature_category: :team_planning do def create_link(issuable) type = issuable.class.name.underscore.downcase - ActionController::Base.helpers.link_to(issuable.to_reference, '', - class: 'gfm has-tooltip', - title: issuable.title, - data: { - reference_type: type, - "#{type}": issuable.id - }) + ActionController::Base.helpers.link_to( + issuable.to_reference, + '', + class: 'gfm has-tooltip', + title: issuable.title, + data: { + reference_type: type, + "#{type}": issuable.id + } + ) end before do diff --git a/spec/lib/bulk_imports/clients/graphql_spec.rb b/spec/lib/bulk_imports/clients/graphql_spec.rb index 58e6992698c..9bb37a7c438 100644 --- a/spec/lib/bulk_imports/clients/graphql_spec.rb +++ b/spec/lib/bulk_imports/clients/graphql_spec.rb @@ -8,39 +8,8 @@ RSpec.describe BulkImports::Clients::Graphql, feature_category: :importers do subject { described_class.new(url: config.url, token: config.access_token) } describe '#execute' do - let(:query) { '{ metadata { version } }' } let(:graphql_client_double) { double } let(:response_double) { double } - let(:version) { '14.0.0' } - - before do - stub_const('BulkImports::MINIMUM_COMPATIBLE_MAJOR_VERSION', version) - end - - describe 'source instance validation' do - before do - allow(graphql_client_double).to receive(:execute) - allow(subject).to receive(:client).and_return(graphql_client_double) - allow(graphql_client_double).to receive(:execute).with(query).and_return(response_double) - allow(response_double).to receive_message_chain(:data, :metadata, :version).and_return(version) - end - - context 'when source instance is compatible' do - it 'marks source instance as compatible' do - subject.execute('test') - - expect(subject.instance_variable_get(:@compatible_instance_version)).to eq(true) - end - end - - context 'when source instance is incompatible' do - let(:version) { '13.0.0' } - - it 'raises an error' do - expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab version. Source instance must run GitLab version #{BulkImport::MIN_MAJOR_VERSION} or later.") - end - end - end describe 'network errors' do before do diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb index 40261947750..aff049408e2 100644 --- a/spec/lib/bulk_imports/clients/http_spec.rb +++ b/spec/lib/bulk_imports/clients/http_spec.rb @@ -261,7 +261,7 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do .to_return(status: 401, body: "", headers: { 'Content-Type' => 'application/json' }) expect { subject.instance_version }.to raise_exception(BulkImports::Error, - "Import aborted as the provided personal access token does not have the required 'api' scope or " \ + "Personal access token does not have the required 'api' scope or " \ "is no longer valid.") end end @@ -273,7 +273,7 @@ RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do .to_return(status: 403, body: "", headers: { 'Content-Type' => 'application/json' }) expect { subject.instance_version }.to raise_exception(BulkImports::Error, - "Import aborted as the provided personal access token does not have the required 'api' scope or " \ + "Personal access token does not have the required 'api' scope or " \ "is no longer valid.") end end diff --git a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb index 395f3568913..0155dc8053e 100644 --- a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb @@ -17,18 +17,18 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline, feature_ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - let(:extracted_data) do - BulkImports::Pipeline::ExtractedData.new(data: { - 'id' => 'gid://gitlab/Project/1234567', - 'name' => 'My Project', - 'path' => 'my-project', - 'full_path' => 'group/my-project' - }) - end - subject { described_class.new(context) } describe '#run' do + let(:extracted_data) do + BulkImports::Pipeline::ExtractedData.new(data: { + 'id' => 'gid://gitlab/Project/1234567', + 'name' => 'My Project', + 'path' => 'my-project', + 'full_path' => 'group/my-project' + }) + end + before do allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| allow(extractor).to receive(:extract).and_return(extracted_data) diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb index 138a92a7e6b..9782f2aac27 100644 --- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb @@ -85,6 +85,22 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, fe end end + context 'when the destination_slug has invalid characters' do + let(:entity) do + build_stubbed( + :bulk_import_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_slug: '____destination-_slug-path----__', + destination_namespace: destination_namespace + ) + end + + it 'normalizes the path' do + expect(transformed_data[:path]).to eq('destination-slug-path') + end + end + describe 'parent group transformation' do it 'sets parent id' do expect(transformed_data['parent_id']).to eq(destination_group.id) @@ -101,45 +117,62 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, fe end end - describe 'group name transformation' do - context 'when destination namespace is empty' do - before do - entity.destination_namespace = '' - end + context 'when destination namespace is empty' do + before do + entity.destination_namespace = '' + end + + it 'does not transform name' do + expect(transformed_data['name']).to eq('Source Group Name') + end + end + context 'when destination namespace is present' do + context 'when destination namespace does not have a group or project with same path' do it 'does not transform name' do expect(transformed_data['name']).to eq('Source Group Name') end end - context 'when destination namespace is present' do - context 'when destination namespace does not have a group with same name' do - it 'does not transform name' do - expect(transformed_data['name']).to eq('Source Group Name') - end + context 'when destination namespace already has a group or project with the same name' do + before do + create(:project, group: destination_group, name: 'Source Project Name', path: 'project') + create(:group, parent: destination_group, name: 'Source Group Name', path: 'group') + create(:group, parent: destination_group, name: 'Source Group Name_1', path: 'group_1') + create(:group, parent: destination_group, name: 'Source Group Name_2', path: 'group_2') end - context 'when destination namespace already have a group with the same name' do - before do - create(:group, parent: destination_group, name: 'Source Group Name', path: 'group_1') - create(:group, parent: destination_group, name: 'Source Group Name(1)', path: 'group_2') - create(:group, parent: destination_group, name: 'Source Group Name(2)', path: 'group_3') - create(:group, parent: destination_group, name: 'Source Group Name(1)(1)', path: 'group_4') - end + it 'makes the name unique by appending a counter', :aggregate_failures do + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name')) + expect(transformed_data['name']).to eq('Source Group Name_3') - it 'makes the name unique by appeding a counter', :aggregate_failures do - transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name')) - expect(transformed_data['name']).to eq('Source Group Name(3)') + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name_1')) + expect(transformed_data['name']).to eq('Source Group Name_1_1') - transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(2)')) - expect(transformed_data['name']).to eq('Source Group Name(2)(1)') + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name_2')) + expect(transformed_data['name']).to eq('Source Group Name_2_1') - transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(1)')) - expect(transformed_data['name']).to eq('Source Group Name(1)(2)') + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Project Name')) + expect(transformed_data['name']).to eq('Source Project Name_1') + end + end - transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(1)(1)')) - expect(transformed_data['name']).to eq('Source Group Name(1)(1)(1)') - end + context 'when destination namespace already has a group or project with the same path' do + before do + create(:project, group: destination_group, name: 'Source Project Name', path: 'destination-slug-path') + create(:group, parent: destination_group, name: 'Source Group Name_4', path: 'destination-slug-path_4') + create(:group, parent: destination_group, name: 'Source Group Name_2', path: 'destination-slug-path_2') + create(:group, parent: destination_group, name: 'Source Group Name_3', path: 'destination-slug-path_3') + end + + it 'makes the path unique by appending a counter', :aggregate_failures do + transformed_data = described_class.new.transform(context, data) + expect(transformed_data['path']).to eq('destination-slug-path_1') + + create(:group, parent: destination_group, name: 'Source Group Name_1', path: 'destination-slug-path_1') + + transformed_data = described_class.new.transform(context, data) + expect(transformed_data['path']).to eq('destination-slug-path_5') end end end @@ -148,6 +181,49 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, fe subject(:transformed_data) { described_class.new.transform(context, data) } include_examples 'visibility level settings' + + context 'when destination is blank' do + let(:destination_namespace) { '' } + + context 'when visibility level is public' do + let(:data) { { 'visibility' => 'public' } } + + it 'sets visibility level to public' do + expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PUBLIC) + end + end + + context 'when when visibility level is internal' do + let(:data) { { 'visibility' => 'internal' } } + + it 'sets visibility level to internal' do + expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL) + end + end + + context 'when private' do + let(:data) { { 'visibility' => 'private' } } + + it 'sets visibility level to private' do + expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) + end + end + + context 'when visibility level is restricted' do + let(:data) { { 'visibility' => 'internal' } } + + it 'sets visibility level to private' do + stub_application_setting( + restricted_visibility_levels: [ + Gitlab::VisibilityLevel::INTERNAL, + Gitlab::VisibilityLevel::PUBLIC + ] + ) + + expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) + end + end + end end end end diff --git a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb index 09385a261b6..82b8bb3958a 100644 --- a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline do +RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline, feature_category: :importers do describe '#run' do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb index 895d37ea385..3a808851f81 100644 --- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb @@ -81,6 +81,16 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}

    ") .and include(project.full_path.to_s) end + + context 'when object body is nil' do + let(:issue) { create(:issue, project: project, description: nil) } + + it 'returns ExtractedData not containing the object' do + extracted_data = subject.extract(context) + + expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note) + end + end end describe '#transform' do diff --git a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb index 36dc63a9331..0e3d8b36fb2 100644 --- a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb @@ -5,7 +5,6 @@ require 'spec_helper' RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer, feature_category: :importers do describe '#transform' do let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project, name: 'My Source Project') } let_it_be(:bulk_import) { create(:bulk_import, user: user) } let(:entity) do @@ -25,6 +24,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer let(:context) { BulkImports::Pipeline::Context.new(tracker) } let(:data) do { + 'name' => 'My Project', 'visibility' => 'private', 'created_at' => '2016-11-18T09:29:42.634Z' } @@ -32,12 +32,13 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer subject(:transformed_data) { described_class.new.transform(context, data) } - it 'transforms name to destination slug' do - expect(transformed_data[:name]).to eq(entity.destination_slug) + it 'uniquifies project name' do + create(:project, group: destination_group, name: 'My Project') + expect(transformed_data[:name]).to eq('My Project_1') end - it 'adds path as parameterized name' do - expect(transformed_data[:path]).to eq(entity.destination_slug.parameterize) + it 'adds path as normalized name' do + expect(transformed_data[:path]).to eq(entity.destination_slug.downcase) end it 'adds import type' do @@ -45,27 +46,8 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer end describe 'namespace_id' do - context 'when destination namespace is present' do - it 'adds namespace_id' do - expect(transformed_data[:namespace_id]).to eq(destination_group.id) - end - end - - context 'when destination namespace is blank' do - it 'does not add namespace_id key' do - entity = create( - :bulk_import_entity, - source_type: :project_entity, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_slug: 'Destination-Project-Name', - destination_namespace: '' - ) - - context = double(entity: entity) - - expect(described_class.new.transform(context, data)).not_to have_key(:namespace_id) - end + it 'adds namespace_id' do + expect(transformed_data[:namespace_id]).to eq(destination_group.id) end end @@ -86,6 +68,64 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer end end + context 'when destination_slug has invalid characters' do + let(:entity) do + create( + :bulk_import_entity, + source_type: :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_slug: '------------Destination_-Project-_Name------------', + destination_namespace: destination_namespace + ) + end + + it 'parameterizes the path' do + expect(transformed_data[:path]).to eq('destination-project-name') + end + end + + context 'when destination namespace already has a group or project with the same name' do + before do + create(:project, group: destination_group, name: 'Destination-Project-Name', path: 'project') + create(:project, group: destination_group, name: 'Destination-Project-Name_1', path: 'project_1') + end + + it 'makes the name unique by appending a counter' do + data = { + 'visibility' => 'private', + 'created_at' => '2016-11-18T09:29:42.634Z', + 'name' => 'Destination-Project-Name' + } + + transformed_data = described_class.new.transform(context, data) + expect(transformed_data['name']).to eq('Destination-Project-Name_2') + end + end + + context 'when destination namespace already has a project with the same path' do + let(:entity) do + create( + :bulk_import_entity, + source_type: :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_slug: 'destination-slug-path', + destination_namespace: destination_namespace + ) + end + + before do + create(:project, group: destination_group, name: 'Source Project Name', path: 'destination-slug-path') + create(:project, group: destination_group, name: 'Source Project Name_1', path: 'destination-slug-path_1') + end + + it 'makes the path unique by appending a counter' do + transformed_data = described_class.new.transform(context, data) + expect(transformed_data['path']).to eq('destination-slug-path_2') + end + end + describe 'visibility level' do include_examples 'visibility level settings' end diff --git a/spec/lib/feature_groups/gitlab_team_members_spec.rb b/spec/lib/feature_groups/gitlab_team_members_spec.rb deleted file mode 100644 index f4db02e6c58..00000000000 --- a/spec/lib/feature_groups/gitlab_team_members_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe FeatureGroups::GitlabTeamMembers, feature_category: :shared do - let_it_be(:gitlab_com) { create(:group) } - let_it_be_with_reload(:member) { create(:user).tap { |user| gitlab_com.add_developer(user) } } - let_it_be_with_reload(:non_member) { create(:user) } - - before do - stub_const("#{described_class.name}::GITLAB_COM_GROUP_ID", gitlab_com.id) - end - - describe '#enabled?' do - context 'when not on gitlab.com' do - before do - allow(Gitlab).to receive(:com?).and_return(false) - end - - it 'returns false' do - expect(described_class.enabled?(member)).to eq(false) - end - end - - context 'when on gitlab.com' do - before do - allow(Gitlab).to receive(:com?).and_return(true) - end - - it 'returns true for gitlab-com group members' do - expect(described_class.enabled?(member)).to eq(true) - end - - it 'returns false for users not in gitlab-com' do - expect(described_class.enabled?(non_member)).to eq(false) - end - - it 'returns false when actor is not a user' do - expect(described_class.enabled?(gitlab_com)).to eq(false) - end - - it 'reloads members after 1 hour' do - expect(described_class.enabled?(non_member)).to eq(false) - - gitlab_com.add_developer(non_member) - - travel_to(2.hours.from_now) do - expect(described_class.enabled?(non_member)).to eq(true) - end - end - - it 'does not make queries on subsequent calls', :use_clean_rails_memory_store_caching do - described_class.enabled?(member) - non_member - - queries = ActiveRecord::QueryRecorder.new do - described_class.enabled?(member) - described_class.enabled?(non_member) - end - - expect(queries.count).to eq(0) - end - end - end -end diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb index c86bc36057a..51f21e7f46e 100644 --- a/spec/lib/feature_spec.rb +++ b/spec/lib/feature_spec.rb @@ -154,17 +154,6 @@ RSpec.describe Feature, stub_feature_flags: false, feature_category: :shared do end end - describe '.register_feature_groups' do - before do - Flipper.unregister_groups - described_class.register_feature_groups - end - - it 'registers expected groups' do - expect(Flipper.groups).to include(an_object_having_attributes(name: :gitlab_team_members)) - end - end - describe '.enabled?' do before do allow(Feature).to receive(:log_feature_flag_states?).and_return(false) @@ -361,22 +350,6 @@ RSpec.describe Feature, stub_feature_flags: false, feature_category: :shared do end end - context 'with gitlab_team_members feature group' do - let(:actor) { build_stubbed(:user) } - - before do - Flipper.unregister_groups - described_class.register_feature_groups - described_class.enable(:enabled_feature_flag, :gitlab_team_members) - end - - it 'delegates check to FeatureGroups::GitlabTeamMembers' do - expect(FeatureGroups::GitlabTeamMembers).to receive(:enabled?).with(actor) - - described_class.enabled?(:enabled_feature_flag, actor) - end - end - context 'with an individual actor' do let(:actor) { stub_feature_flag_gate('CustomActor:5') } let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } diff --git a/spec/lib/gitlab/analytics/cycle_analytics/request_params_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/request_params_spec.rb index 3c171d684d6..9b362debb10 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/request_params_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/request_params_spec.rb @@ -17,10 +17,24 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RequestParams, feature_categor expect(attributes).to match(hash_including({ namespace: { name: project.name, - full_path: project.full_path + full_path: project.full_path, + type: "Project" } })) end + + context 'with a subgroup project' do + let_it_be(:sub_group) { create(:group, parent: root_group) } + let_it_be_with_refind(:subgroup_project) { create(:project, group: sub_group) } + let(:namespace) { subgroup_project.project_namespace } + + it 'includes the correct group_path' do + expect(attributes).to match(hash_including({ + group_path: "groups/#{subgroup_project.namespace.full_path}", + full_path: subgroup_project.full_path + })) + end + end end end end diff --git a/spec/lib/gitlab/app_logger_spec.rb b/spec/lib/gitlab/app_logger_spec.rb index e3415f4ad8c..149c3d1f19f 100644 --- a/spec/lib/gitlab/app_logger_spec.rb +++ b/spec/lib/gitlab/app_logger_spec.rb @@ -2,31 +2,12 @@ require 'spec_helper' -RSpec.describe Gitlab::AppLogger do +RSpec.describe Gitlab::AppLogger, feature_category: :shared do subject { described_class } - context 'when UNSTRUCTURED_RAILS_LOG is enabled' do - before do - stub_env('UNSTRUCTURED_RAILS_LOG', 'true') - end + specify { expect(described_class.primary_logger).to be Gitlab::AppJsonLogger } - it 'builds two Logger instances' do - expect(Gitlab::Logger).to receive(:new).and_call_original - expect(Gitlab::JsonLogger).to receive(:new).and_call_original - - subject.info('Hello World!') - end - - it 'logs info to AppLogger and AppJsonLogger' do - expect_any_instance_of(Gitlab::AppTextLogger).to receive(:info).and_call_original - expect_any_instance_of(Gitlab::AppJsonLogger).to receive(:info).and_call_original - - subject.info('Hello World!') - end - end - - it 'logs info to only the AppJsonLogger when unstructured logs are disabled' do - expect_any_instance_of(Gitlab::AppTextLogger).not_to receive(:info).and_call_original + it 'logs to AppJsonLogger' do expect_any_instance_of(Gitlab::AppJsonLogger).to receive(:info).and_call_original subject.info('Hello World!') diff --git a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb index c94f962ee93..8c50b2acac6 100644 --- a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb @@ -2,14 +2,19 @@ require 'spec_helper' -RSpec.describe Gitlab::Auth::OAuth::AuthHash do +RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management do let(:provider) { 'ldap' } let(:auth_hash) do described_class.new( OmniAuth::AuthHash.new( provider: provider, uid: uid_ascii, - info: info_hash + info: info_hash, + extra: { + raw_info: { + 'https://example.com/claims/username': username_claim_utf8 + } + } ) ) end @@ -24,6 +29,7 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do let(:first_name_raw) { +'Onur' } let(:last_name_raw) { +"K\xC3\xBC\xC3\xA7\xC3\xBCk" } let(:name_raw) { +"Onur K\xC3\xBC\xC3\xA7\xC3\xBCk" } + let(:username_claim_raw) { +'onur.partner' } let(:uid_ascii) { uid_raw.force_encoding(Encoding::ASCII_8BIT) } let(:email_ascii) { email_raw.force_encoding(Encoding::ASCII_8BIT) } @@ -37,6 +43,7 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do let(:nickname_utf8) { nickname_ascii.force_encoding(Encoding::UTF_8) } let(:name_utf8) { name_ascii.force_encoding(Encoding::UTF_8) } let(:first_name_utf8) { first_name_ascii.force_encoding(Encoding::UTF_8) } + let(:username_claim_utf8) { username_claim_raw.force_encoding(Encoding::ASCII_8BIT) } let(:info_hash) do { @@ -98,10 +105,16 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for).and_return(provider_config) end - it 'uses the custom field for the username' do + it 'uses the custom field for the username within info' do expect(auth_hash.username).to eql first_name_utf8 end + it 'uses the custom field for the username within extra.raw_info' do + provider_config['args']['gitlab_username_claim'] = 'https://example.com/claims/username' + + expect(auth_hash.username).to eql username_claim_utf8 + end + it 'uses the default claim for the username when the custom claim is not found' do provider_config['args']['gitlab_username_claim'] = 'nonexistent' @@ -146,4 +159,66 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do expect(auth_hash.password.encoding).to eql Encoding::UTF_8 end end + + describe '#get_from_auth_hash_or_info' do + context 'for a key not within auth_hash' do + let(:auth_hash) do + described_class.new( + OmniAuth::AuthHash.new( + provider: provider, + uid: uid_ascii, + info: info_hash + ) + ) + end + + let(:info_hash) { { nickname: nickname_ascii } } + + it 'provides username from info_hash' do + expect(auth_hash.username).to eql nickname_utf8 + end + end + + context 'for a key within auth_hash' do + let(:auth_hash) do + described_class.new( + OmniAuth::AuthHash.new( + provider: provider, + uid: uid_ascii, + info: info_hash, + username: nickname_ascii + ) + ) + end + + let(:info_hash) { { something: nickname_ascii } } + + it 'provides username from auth_hash' do + expect(auth_hash.username).to eql nickname_utf8 + end + end + + context 'for a key within auth_hash extra' do + let(:auth_hash) do + described_class.new( + OmniAuth::AuthHash.new( + provider: provider, + uid: uid_ascii, + info: info_hash, + extra: { + raw_info: { + nickname: nickname_ascii + } + } + ) + ) + end + + let(:info_hash) { { something: nickname_ascii } } + + it 'provides username from auth_hash extra' do + expect(auth_hash.username).to eql nickname_utf8 + end + end + end end diff --git a/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb b/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb deleted file mode 100644 index deddc7f5294..00000000000 --- a/spec/lib/gitlab/auth/u2f_webauthn_converter_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Auth::U2fWebauthnConverter do - let_it_be(:u2f_registration) do - device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5)) - create(:u2f_registration, name: 'u2f_device', - certificate: Base64.strict_encode64(device.cert_raw), - key_handle: U2F.urlsafe_encode64(device.key_handle_raw), - public_key: Base64.strict_encode64(device.origin_public_key_raw)) - end - - it 'converts u2f registration' do - webauthn_credential = WebAuthn::U2fMigrator.new( - app_id: Gitlab.config.gitlab.url, - certificate: u2f_registration.certificate, - key_handle: u2f_registration.key_handle, - public_key: u2f_registration.public_key, - counter: u2f_registration.counter - ).credential - - converted_webauthn = described_class.new(u2f_registration).convert - - expect(converted_webauthn).to( - include(user_id: u2f_registration.user_id, - credential_xid: Base64.strict_encode64(webauthn_credential.id))) - end -end diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index 11e9ecdb878..36c87fb4557 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -21,6 +21,10 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate expect(subject::REPOSITORY_SCOPES).to match_array %i[read_repository write_repository] end + it 'OBSERVABILITY_SCOPES contains all scopes for Observability access' do + expect(subject::OBSERVABILITY_SCOPES).to match_array %i[read_observability write_observability] + end + it 'OPENID_SCOPES contains all scopes for OpenID Connect' do expect(subject::OPENID_SCOPES).to match_array [:openid] end @@ -31,54 +35,103 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate end context 'available_scopes' do - it 'contains all non-default scopes' do + before do stub_container_registry_config(enabled: true) + end - expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] + it 'contains all non-default scopes' do + expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability] end - it 'contains for non-admin user all non-default scopes without ADMIN access' do - stub_container_registry_config(enabled: true) - user = create(:user, admin: false) + it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do + user = build_stubbed(:user, admin: false) expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry] end - it 'contains for admin user all non-default scopes with ADMIN access' do - stub_container_registry_config(enabled: true) - user = create(:user, admin: true) + it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do + user = build_stubbed(:user, admin: true) expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] end + it 'contains for project all resource bot scopes without observability scopes' do + expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry] + end + + it 'contains for group all resource bot scopes' do + group = build_stubbed(:group) + + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability] + end + + it 'contains for unsupported type no scopes' do + expect(subject.available_scopes_for(:something)).to be_empty + end + it 'optional_scopes contains all non-default scopes' do - stub_container_registry_config(enabled: true) + expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability] + end + + context 'with observability_group_tab feature flag' do + context 'when disabled' do + before do + stub_feature_flags(observability_group_tab: false) + end + + it 'contains for group all resource bot scopes without observability scopes' do + group = build_stubbed(:group) - expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email] + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry] + end + end + + context 'when enabled for specific group' do + let(:group) { build_stubbed(:group) } + + before do + stub_feature_flags(observability_group_tab: group) + end + + it 'contains for other group all resource bot scopes including observability scopes' do + expect(subject.available_scopes_for(group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry read_observability write_observability] + end + + it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do + user = build_stubbed(:user, admin: true) + + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] + end + + it 'contains for project all resource bot scopes without observability scopes' do + expect(subject.available_scopes_for(project)).to match_array %i[api read_api read_repository write_repository read_registry write_registry] + end + + it 'contains for other group all resource bot scopes without observability scopes' do + other_group = build_stubbed(:group) + + expect(subject.available_scopes_for(other_group)).to match_array %i[api read_api read_repository write_repository read_registry write_registry] + end + end end - context 'with feature flag disabled' do + context 'with admin_mode_for_api feature flag disabled' do before do stub_feature_flags(admin_mode_for_api: false) end it 'contains all non-default scopes' do - stub_container_registry_config(enabled: true) - - expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] + expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability] end - it 'contains for admin user all non-default scopes with ADMIN access' do - stub_container_registry_config(enabled: true) - user = create(:user, admin: true) + it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do + user = build_stubbed(:user, admin: true) expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo] end it 'optional_scopes contains all non-default scopes' do - stub_container_registry_config(enabled: true) - - expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email] + expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability] end end @@ -120,8 +173,8 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate end end - it 'raises an IpBlacklisted exception' do - expect { subject }.to raise_error(Gitlab::Auth::IpBlacklisted) + it 'raises an IpBlocked exception' do + expect { subject }.to raise_error(Gitlab::Auth::IpBlocked) end end @@ -314,15 +367,17 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate using RSpec::Parameterized::TableSyntax where(:scopes, :abilities) do - 'api' | described_class.full_authentication_abilities - 'read_api' | described_class.read_only_authentication_abilities - 'read_repository' | [:download_code] - 'write_repository' | [:download_code, :push_code] - 'read_user' | [] - 'sudo' | [] - 'openid' | [] - 'profile' | [] - 'email' | [] + 'api' | described_class.full_authentication_abilities + 'read_api' | described_class.read_only_authentication_abilities + 'read_repository' | [:download_code] + 'write_repository' | [:download_code, :push_code] + 'read_user' | [] + 'sudo' | [] + 'openid' | [] + 'profile' | [] + 'email' | [] + 'read_observability' | [] + 'write_observability' | [] end with_them do @@ -1024,6 +1079,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate it { is_expected.to include(*described_class::API_SCOPES - [:read_user]) } it { is_expected.to include(*described_class::REPOSITORY_SCOPES) } it { is_expected.to include(*described_class.registry_scopes) } + it { is_expected.to include(*described_class::OBSERVABILITY_SCOPES) } end private diff --git a/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb b/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb index d2da6867773..92fec48454c 100644 --- a/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb @@ -24,8 +24,12 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillAdminModeScopeForPersonalAcc personal_access_tokens.create!(name: 'admin 4', user_id: admin.id, scopes: "---\n- admin_mode\n") end - let!(:pat_admin_2) { personal_access_tokens.create!(name: 'admin 5', user_id: admin.id, scopes: "---\n- read_api\n") } - let!(:pat_not_in_range) { personal_access_tokens.create!(name: 'admin 6', user_id: admin.id, scopes: "---\n- api\n") } + let!(:pat_with_symbol_in_scopes) do + personal_access_tokens.create!(name: 'admin 5', user_id: admin.id, scopes: "---\n- :api\n") + end + + let!(:pat_admin_2) { personal_access_tokens.create!(name: 'admin 6', user_id: admin.id, scopes: "---\n- read_api\n") } + let!(:pat_not_in_range) { personal_access_tokens.create!(name: 'admin 7', user_id: admin.id, scopes: "---\n- api\n") } subject do described_class.new( @@ -47,6 +51,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillAdminModeScopeForPersonalAcc expect(pat_revoked.reload.scopes).to eq("---\n- api\n") expect(pat_expired.reload.scopes).to eq("---\n- api\n") expect(pat_admin_mode.reload.scopes).to eq("---\n- admin_mode\n") + expect(pat_with_symbol_in_scopes.reload.scopes).to eq("---\n- api\n- admin_mode\n") expect(pat_admin_2.reload.scopes).to eq("---\n- read_api\n- admin_mode\n") expect(pat_not_in_range.reload.scopes).to eq("---\n- api\n") end diff --git a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb deleted file mode 100644 index aaf8c124a83..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb +++ /dev/null @@ -1,245 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillCiQueuingTables, :migration, - :suppress_gitlab_schemas_validate_connection, schema: 20220208115439 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:ci_cd_settings) { table(:project_ci_cd_settings) } - let(:builds) { table(:ci_builds) } - let(:queuing_entries) { table(:ci_pending_builds) } - let(:tags) { table(:tags) } - let(:taggings) { table(:taggings) } - - subject { described_class.new } - - describe '#perform' do - let!(:namespace) do - namespaces.create!( - id: 10, - name: 'namespace10', - path: 'namespace10', - traversal_ids: [10]) - end - - let!(:other_namespace) do - namespaces.create!( - id: 11, - name: 'namespace11', - path: 'namespace11', - traversal_ids: [11]) - end - - let!(:project) do - projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1') - end - - let!(:ci_cd_setting) do - ci_cd_settings.create!(id: 5, project_id: 5, group_runners_enabled: true) - end - - let!(:other_project) do - projects.create!(id: 7, namespace_id: 11, name: 'test2', path: 'test2') - end - - let!(:other_ci_cd_setting) do - ci_cd_settings.create!(id: 7, project_id: 7, group_runners_enabled: false) - end - - let!(:another_project) do - projects.create!(id: 9, namespace_id: 10, name: 'test3', path: 'test3', shared_runners_enabled: false) - end - - let!(:ruby_tag) do - tags.create!(id: 22, name: 'ruby') - end - - let!(:postgres_tag) do - tags.create!(id: 23, name: 'postgres') - end - - it 'creates ci_pending_builds for all pending builds in range' do - builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build') - builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build') - builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build') - - taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 22) - taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23) - - builds.create!(id: 60, status: :pending, name: 'test1', project_id: 7, type: 'Ci::Build') - builds.create!(id: 61, status: :running, name: 'test2', project_id: 7, protected: true, type: 'Ci::Build') - builds.create!(id: 62, status: :pending, name: 'test3', project_id: 7, type: 'Ci::Build') - - taggings.create!(taggable_id: 60, taggable_type: 'CommitStatus', tag_id: 23) - taggings.create!(taggable_id: 62, taggable_type: 'CommitStatus', tag_id: 22) - - builds.create!(id: 70, status: :pending, name: 'test1', project_id: 9, protected: true, type: 'Ci::Build') - builds.create!(id: 71, status: :failed, name: 'test2', project_id: 9, type: 'Ci::Build') - builds.create!(id: 72, status: :pending, name: 'test3', project_id: 9, type: 'Ci::Build') - - taggings.create!(taggable_id: 71, taggable_type: 'CommitStatus', tag_id: 22) - - subject.perform(1, 100) - - expect(queuing_entries.all).to contain_exactly( - an_object_having_attributes( - build_id: 50, - project_id: 5, - namespace_id: 10, - protected: false, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: [], - namespace_traversal_ids: [10]), - an_object_having_attributes( - build_id: 52, - project_id: 5, - namespace_id: 10, - protected: true, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: match_array([22, 23]), - namespace_traversal_ids: [10]), - an_object_having_attributes( - build_id: 60, - project_id: 7, - namespace_id: 11, - protected: false, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: [23], - namespace_traversal_ids: []), - an_object_having_attributes( - build_id: 62, - project_id: 7, - namespace_id: 11, - protected: false, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: [22], - namespace_traversal_ids: []), - an_object_having_attributes( - build_id: 70, - project_id: 9, - namespace_id: 10, - protected: true, - instance_runners_enabled: false, - minutes_exceeded: false, - tag_ids: [], - namespace_traversal_ids: []), - an_object_having_attributes( - build_id: 72, - project_id: 9, - namespace_id: 10, - protected: false, - instance_runners_enabled: false, - minutes_exceeded: false, - tag_ids: [], - namespace_traversal_ids: []) - ) - end - - it 'skips builds that already have ci_pending_builds' do - builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build') - builds.create!(id: 51, status: :created, name: 'test2', project_id: 5, type: 'Ci::Build') - builds.create!(id: 52, status: :pending, name: 'test3', project_id: 5, protected: true, type: 'Ci::Build') - - taggings.create!(taggable_id: 50, taggable_type: 'CommitStatus', tag_id: 22) - taggings.create!(taggable_id: 52, taggable_type: 'CommitStatus', tag_id: 23) - - queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10) - - subject.perform(1, 100) - - expect(queuing_entries.all).to contain_exactly( - an_object_having_attributes( - build_id: 50, - project_id: 5, - namespace_id: 10, - protected: false, - instance_runners_enabled: false, - minutes_exceeded: false, - tag_ids: [], - namespace_traversal_ids: []), - an_object_having_attributes( - build_id: 52, - project_id: 5, - namespace_id: 10, - protected: true, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: [23], - namespace_traversal_ids: [10]) - ) - end - - it 'upserts values in case of conflicts' do - builds.create!(id: 50, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build') - queuing_entries.create!(build_id: 50, project_id: 5, namespace_id: 10) - - build = described_class::Ci::Build.find(50) - described_class::Ci::PendingBuild.upsert_from_build!(build) - - expect(queuing_entries.all).to contain_exactly( - an_object_having_attributes( - build_id: 50, - project_id: 5, - namespace_id: 10, - protected: false, - instance_runners_enabled: true, - minutes_exceeded: false, - tag_ids: [], - namespace_traversal_ids: [10]) - ) - end - end - - context 'Ci::Build' do - describe '.each_batch' do - let(:model) { described_class::Ci::Build } - - before do - builds.create!(id: 1, status: :pending, name: 'test1', project_id: 5, type: 'Ci::Build') - builds.create!(id: 2, status: :pending, name: 'test2', project_id: 5, type: 'Ci::Build') - builds.create!(id: 3, status: :pending, name: 'test3', project_id: 5, type: 'Ci::Build') - builds.create!(id: 4, status: :pending, name: 'test4', project_id: 5, type: 'Ci::Build') - builds.create!(id: 5, status: :pending, name: 'test5', project_id: 5, type: 'Ci::Build') - end - - it 'yields an ActiveRecord::Relation when a block is given' do - model.each_batch do |relation| - expect(relation).to be_a_kind_of(ActiveRecord::Relation) - end - end - - it 'yields a batch index as the second argument' do - model.each_batch do |_, index| - expect(index).to eq(1) - end - end - - it 'accepts a custom batch size' do - amount = 0 - - model.each_batch(of: 1) { amount += 1 } - - expect(amount).to eq(5) - end - - it 'does not include ORDER BYs in the yielded relations' do - model.each_batch do |relation| - expect(relation.to_sql).not_to include('ORDER BY') - end - end - - it 'orders ascending' do - ids = [] - - model.each_batch(of: 1) { |rel| ids.concat(rel.ids) } - - expect(ids).to eq(ids.sort) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb index e0be5a785b8..2c2740434de 100644 --- a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, schema: 20220302114046 do +RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, schema: 20220314184009 do let(:group_features) { table(:group_features) } let(:namespaces) { table(:namespaces) } diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb deleted file mode 100644 index e6588644b4f..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew, :migration, schema: 20220212120735 do - let(:migration) { described_class.new } - let(:integrations) { table(:integrations) } - - let(:namespaced_integrations) do - Set.new( - %w[ - Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog - Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost - MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker - Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao - Github GitlabSlackApplication - ]).freeze - end - - before do - integrations.connection.execute 'ALTER TABLE integrations DISABLE TRIGGER "trigger_type_new_on_insert"' - - namespaced_integrations.each_with_index do |type, i| - integrations.create!(id: i + 1, type: "#{type}Service") - end - - integrations.create!(id: namespaced_integrations.size + 1, type: 'LegacyService') - ensure - integrations.connection.execute 'ALTER TABLE integrations ENABLE TRIGGER "trigger_type_new_on_insert"' - end - - it 'backfills `type_new` for the selected records' do - # We don't want to mock `Kernel.sleep`, so instead we mock it on the migration - # class before it gets forwarded. - expect(migration).to receive(:sleep).with(0.05).exactly(5).times - - queries = ActiveRecord::QueryRecorder.new do - migration.perform(2, 10, :integrations, :id, 2, 50) - end - - expect(queries.count).to be(16) - expect(queries.log.grep(/^SELECT/).size).to be(11) - expect(queries.log.grep(/^UPDATE/).size).to be(5) - expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq( - [ - 'WHERE integrations.id BETWEEN 2 AND 3', - 'WHERE integrations.id BETWEEN 4 AND 5', - 'WHERE integrations.id BETWEEN 6 AND 7', - 'WHERE integrations.id BETWEEN 8 AND 9', - 'WHERE integrations.id BETWEEN 10 AND 10' - ]) - - expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly( - ['AssemblaService', 'Integrations::Assembla'], - ['BambooService', 'Integrations::Bamboo'], - ['BugzillaService', 'Integrations::Bugzilla'], - ['BuildkiteService', 'Integrations::Buildkite'], - ['CampfireService', 'Integrations::Campfire'], - ['ConfluenceService', 'Integrations::Confluence'], - ['CustomIssueTrackerService', 'Integrations::CustomIssueTracker'], - ['DatadogService', 'Integrations::Datadog'], - ['DiscordService', 'Integrations::Discord'] - ) - - expect(integrations.where.not(id: 2..10)).to all(have_attributes(type_new: nil)) - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb index e1ef12a1479..ea07079f9ee 100644 --- a/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_member_namespace_for_group_members_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220120211832 do +RSpec.describe Gitlab::BackgroundMigration::BackfillMemberNamespaceForGroupMembers, :migration, schema: 20220314184009 do let(:migration) { described_class.new } let(:members_table) { table(:members) } let(:namespaces_table) { table(:namespaces) } diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb index b821efcadb0..f4e8fa1bbac 100644 --- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_namespace_route_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForNamespaceRoute, :migration, schema: 20220120123800 do +RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForNamespaceRoute, :migration, schema: 20220314184009 do let(:migration) { described_class.new } let(:namespaces_table) { table(:namespaces) } let(:projects_table) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb deleted file mode 100644 index 876eb070745..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210826171758 do - let(:namespaces_table) { table(:namespaces) } - - let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) } - let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) } - let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) } - - describe '#perform' do - it 'backfills traversal_ids for child namespaces' do - described_class.new.perform(1, 3, 5) - - expect(user_namespace.reload.traversal_ids).to eq([]) - expect(root_group.reload.traversal_ids).to eq([]) - expect(sub_group.reload.traversal_ids).to eq([root_group.id, sub_group.id]) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb deleted file mode 100644 index ad9b54608c6..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210826171758 do - let(:namespaces_table) { table(:namespaces) } - - let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) } - let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) } - let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) } - - describe '#perform' do - it 'backfills traversal_ids for root namespaces' do - described_class.new.perform(1, 3, 5) - - expect(user_namespace.reload.traversal_ids).to eq([user_namespace.id]) - expect(root_group.reload.traversal_ids).to eq([root_group.id]) - expect(sub_group.reload.traversal_ids).to eq([]) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/background_migration/backfill_partitioned_table_spec.rb new file mode 100644 index 00000000000..53216cc780b --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_partitioned_table_spec.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionedTable, feature_category: :database do + subject(:backfill_job) do + described_class.new( + start_id: 1, + end_id: 3, + batch_table: source_table, + batch_column: :id, + sub_batch_size: 2, + pause_ms: 0, + job_arguments: [destination_table], + connection: connection + ) + end + + let(:connection) { ApplicationRecord.connection } + let(:source_table) { '_test_source_table' } + let(:destination_table) { "#{source_table}_partitioned" } + let(:source_model) { Class.new(ApplicationRecord) } + let(:destination_model) { Class.new(ApplicationRecord) } + + describe '#perform' do + context 'without the destination table' do + let(:expected_error_message) do + "exiting backfill migration because partitioned table #{destination_table} does not exist. " \ + "This could be due to rollback of the migration which created the partitioned table." + end + + it 'raises an exception' do + expect { backfill_job.perform }.to raise_error(expected_error_message) + end + end + + context 'with destination table being not partitioned' do + before do + connection.execute(<<~SQL) + CREATE TABLE #{destination_table} ( + id serial NOT NULL, + col1 int NOT NULL, + col2 text NOT NULL, + created_at timestamptz NOT NULL, + PRIMARY KEY (id, created_at) + ) + SQL + end + + after do + connection.drop_table destination_table + end + + let(:expected_error_message) do + "exiting backfill migration because the given destination table is not partitioned." + end + + it 'raises an exception' do + expect { backfill_job.perform }.to raise_error(expected_error_message) + end + end + + context 'when the destination table exists' do + before do + connection.execute(<<~SQL) + CREATE TABLE #{source_table} ( + id serial NOT NULL PRIMARY KEY, + col1 int NOT NULL, + col2 text NOT NULL, + created_at timestamptz NOT NULL + ) + SQL + + connection.execute(<<~SQL) + CREATE TABLE #{destination_table} ( + id serial NOT NULL, + col1 int NOT NULL, + col2 text NOT NULL, + created_at timestamptz NOT NULL, + PRIMARY KEY (id, created_at) + ) PARTITION BY RANGE (created_at) + SQL + + connection.execute(<<~SQL) + CREATE TABLE #{destination_table}_202001 PARTITION OF #{destination_table} + FOR VALUES FROM ('2020-01-01') TO ('2020-02-01') + SQL + + connection.execute(<<~SQL) + CREATE TABLE #{destination_table}_202002 PARTITION OF #{destination_table} + FOR VALUES FROM ('2020-02-01') TO ('2020-03-01') + SQL + + source_model.table_name = source_table + destination_model.table_name = destination_table + end + + after do + connection.drop_table source_table + connection.drop_table destination_table + end + + let(:timestamp) { Time.utc(2020, 1, 2).round } + let!(:source1) { create_source_record(timestamp) } + let!(:source2) { create_source_record(timestamp + 1.day) } + let!(:source3) { create_source_record(timestamp + 1.month) } + + it 'copies data into the destination table idempotently' do + expect(destination_model.count).to eq(0) + + backfill_job.perform + + expect(destination_model.count).to eq(3) + + source_model.find_each do |source_record| + destination_record = destination_model.find_by_id(source_record.id) + + expect(destination_record.attributes).to eq(source_record.attributes) + end + + backfill_job.perform + + expect(destination_model.count).to eq(3) + end + + it 'breaks the assigned batch into smaller sub batches' do + expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BulkCopy) do |bulk_copy| + expect(bulk_copy).to receive(:copy_between).with(source1.id, source2.id) + expect(bulk_copy).to receive(:copy_between).with(source3.id, source3.id) + end + + backfill_job.perform + end + end + end + + def create_source_record(timestamp) + source_model.create!(col1: 123, col2: 'original value', created_at: timestamp) + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_prepared_at_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_prepared_at_merge_requests_spec.rb index b33a1a31c40..28ecfae1bd4 100644 --- a/spec/lib/gitlab/background_migration/backfill_prepared_at_merge_requests_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_prepared_at_merge_requests_spec.rb @@ -14,18 +14,6 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPreparedAtMergeRequests, :mi projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace.id, project_namespace_id: proj_namespace.id) end - let(:test_worker) do - described_class.new( - start_id: 1, - end_id: 100, - batch_table: :merge_requests, - batch_column: :id, - sub_batch_size: 10, - pause_ms: 0, - connection: ApplicationRecord.connection - ) - end - it 'updates merge requests with prepared_at nil' do time = Time.current @@ -40,6 +28,16 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPreparedAtMergeRequests, :mi mr_5 = mr_table.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature', prepared_at: time, merge_status: 'preparing') + test_worker = described_class.new( + start_id: mr_1.id, + end_id: [(mr_5.id + 1), 100].max, + batch_table: :merge_requests, + batch_column: :id, + sub_batch_size: 10, + pause_ms: 0, + connection: ApplicationRecord.connection + ) + expect(mr_1.prepared_at).to be_nil expect(mr_2.prepared_at).to be_nil expect(mr_3.prepared_at.to_i).to eq(time.to_i) diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb index 80fd86e90bb..6f6ff9232e0 100644 --- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210826171758, +RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20220314184009, feature_category: :source_code_management do let(:gitlab_shell) { Gitlab::Shell.new } let(:users) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb deleted file mode 100644 index 7142aea3ab2..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210826171758 do - let(:award_emoji) { table(:award_emoji) } - - let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } - let!(:project1) { table(:projects).create!(namespace_id: namespace.id) } - let!(:project2) { table(:projects).create!(namespace_id: namespace.id) } - let!(:issue1) { table(:issues).create!(project_id: project1.id) } - let!(:issue2) { table(:issues).create!(project_id: project2.id) } - let!(:issue3) { table(:issues).create!(project_id: project2.id) } - let!(:issue4) { table(:issues).create!(project_id: project2.id) } - - describe '#perform' do - before do - add_upvotes(issue1, :thumbsdown, 1) - add_upvotes(issue2, :thumbsup, 2) - add_upvotes(issue2, :thumbsdown, 1) - add_upvotes(issue3, :thumbsup, 3) - add_upvotes(issue4, :thumbsup, 4) - end - - it 'updates upvotes_count' do - subject.perform(issue1.id, issue4.id) - - expect(issue1.reload.upvotes_count).to eq(0) - expect(issue2.reload.upvotes_count).to eq(2) - expect(issue3.reload.upvotes_count).to eq(3) - expect(issue4.reload.upvotes_count).to eq(4) - end - end - - private - - def add_upvotes(issue, name, count) - count.times do - award_emoji.create!( - name: name.to_s, - awardable_type: 'Issue', - awardable_id: issue.id - ) - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb deleted file mode 100644 index 395248b786d..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillUserNamespace, :migration, schema: 20210930211936 do - let(:migration) { described_class.new } - let(:namespaces_table) { table(:namespaces) } - - let(:table_name) { 'namespaces' } - let(:batch_column) { :id } - let(:sub_batch_size) { 100 } - let(:pause_ms) { 0 } - - subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) } - - before do - namespaces_table.create!(id: 1, name: 'test1', path: 'test1', type: nil) - namespaces_table.create!(id: 2, name: 'test2', path: 'test2', type: 'User') - namespaces_table.create!(id: 3, name: 'test3', path: 'test3', type: 'Group') - namespaces_table.create!(id: 4, name: 'test4', path: 'test4', type: nil) - namespaces_table.create!(id: 11, name: 'test11', path: 'test11', type: nil) - end - - it 'backfills `type` for the selected records', :aggregate_failures do - queries = ActiveRecord::QueryRecorder.new do - perform_migration - end - - expect(queries.count).to eq(3) - expect(namespaces_table.where(type: 'User').count).to eq 3 - expect(namespaces_table.where(type: 'User').pluck(:id)).to match_array([1, 2, 4]) - end - - it 'tracks timings of queries' do - expect(migration.batch_metrics.timings).to be_empty - - expect { perform_migration }.to change { migration.batch_metrics.timings } - end -end diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb deleted file mode 100644 index 5ffe665f0ad..00000000000 --- a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb +++ /dev/null @@ -1,85 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210826171758 do - let(:lfs_objects_projects) { table(:lfs_objects_projects) } - let(:lfs_objects) { table(:lfs_objects) } - let(:projects) { table(:projects) } - let(:namespaces) { table(:namespaces) } - - let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') } - let(:project) { projects.create!(namespace_id: namespace.id) } - let(:another_project) { projects.create!(namespace_id: namespace.id) } - let(:lfs_object) { lfs_objects.create!(oid: 'abcdef', size: 1) } - let(:another_lfs_object) { lfs_objects.create!(oid: '1abcde', size: 2) } - - let!(:without_object1) { create_object(project_id: project.id) } - let!(:without_object2) { create_object(project_id: another_project.id) } - let!(:without_object3) { create_object(project_id: another_project.id) } - let!(:with_project_and_object1) { create_object(project_id: project.id, lfs_object_id: lfs_object.id) } - let!(:with_project_and_object2) { create_object(project_id: project.id, lfs_object_id: another_lfs_object.id) } - let!(:with_project_and_object3) { create_object(project_id: another_project.id, lfs_object_id: another_lfs_object.id) } - let!(:without_project1) { create_object(lfs_object_id: lfs_object.id) } - let!(:without_project2) { create_object(lfs_object_id: another_lfs_object.id) } - let!(:without_project_and_object) { create_object } - - def create_object(project_id: non_existing_record_id, lfs_object_id: non_existing_record_id) - lfs_objects_project = nil - - ActiveRecord::Base.connection.disable_referential_integrity do - lfs_objects_project = lfs_objects_projects.create!(project_id: project_id, lfs_object_id: lfs_object_id) - end - - lfs_objects_project - end - - subject { described_class.new } - - describe '#perform' do - it 'lfs_objects_projects without an existing lfs object or project are removed' do - subject.perform(without_object1.id, without_object3.id) - - expect(lfs_objects_projects.all).to match_array( - [ - with_project_and_object1, with_project_and_object2, with_project_and_object3, - without_project1, without_project2, without_project_and_object - ]) - - subject.perform(with_project_and_object1.id, with_project_and_object3.id) - - expect(lfs_objects_projects.all).to match_array( - [ - with_project_and_object1, with_project_and_object2, with_project_and_object3, - without_project1, without_project2, without_project_and_object - ]) - - subject.perform(without_project1.id, without_project_and_object.id) - - expect(lfs_objects_projects.all).to match_array( - [ - with_project_and_object1, with_project_and_object2, with_project_and_object3 - ]) - - expect(lfs_objects.ids).to contain_exactly(lfs_object.id, another_lfs_object.id) - expect(projects.ids).to contain_exactly(project.id, another_project.id) - end - - it 'cache for affected projects is being reset' do - expect(ProjectCacheWorker).to receive(:bulk_perform_in) do |delay, args| - expect(delay).to eq(1.minute) - expect(args).to match_array([[project.id, [], [:lfs_objects_size]], [another_project.id, [], [:lfs_objects_size]]]) - end - - subject.perform(without_object1.id, with_project_and_object1.id) - - expect(ProjectCacheWorker).not_to receive(:bulk_perform_in) - - subject.perform(with_project_and_object1.id, with_project_and_object3.id) - - expect(ProjectCacheWorker).not_to receive(:bulk_perform_in) - - subject.perform(without_project1.id, without_project_and_object.id) - end - end -end diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb deleted file mode 100644 index 8f058c875a2..00000000000 --- a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210826171758 do - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) } - let(:background_migration_jobs) { table(:background_migration_jobs) } - - before do - create_deployment!(environment.id, project.id) - end - - it 'deletes only orphaned deployments' do - expect(valid_deployments.pluck(:id)).not_to be_empty - - subject.perform(table(:deployments).minimum(:id), table(:deployments).maximum(:id)) - - expect(valid_deployments.pluck(:id)).not_to be_empty - end - - it 'marks jobs as done' do - first_job = background_migration_jobs.create!( - class_name: 'DeleteOrphanedDeployments', - arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)] - ) - - subject.perform(table(:deployments).minimum(:id), table(:deployments).minimum(:id)) - - expect(first_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded]) - end - - private - - def valid_deployments - table(:deployments).where('EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)') - end - - def orphaned_deployments - table(:deployments).where('NOT EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)') - end - - def create_deployment!(environment_id, project_id) - table(:deployments).create!( - environment_id: environment_id, - project_id: project_id, - ref: 'master', - tag: false, - sha: 'x', - status: 1, - iid: table(:deployments).count + 1) - end -end diff --git a/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb b/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb deleted file mode 100644 index e7b0471810d..00000000000 --- a/spec/lib/gitlab/background_migration/disable_expiration_policies_linked_to_no_container_images_spec.rb +++ /dev/null @@ -1,142 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::DisableExpirationPoliciesLinkedToNoContainerImages, :migration, schema: 20220326161803 do # rubocop:disable Layout/LineLength - let!(:projects) { table(:projects) } - let!(:container_expiration_policies) { table(:container_expiration_policies) } - let!(:container_repositories) { table(:container_repositories) } - let!(:namespaces) { table(:namespaces) } - - let!(:namespace) { namespaces.create!(name: 'test', path: 'test') } - - let!(:policy1) { create_expiration_policy(project_id: 1, enabled: true) } - let!(:policy2) { create_expiration_policy(project_id: 2, enabled: false) } - let!(:policy3) { create_expiration_policy(project_id: 3, enabled: false) } - let!(:policy4) { create_expiration_policy(project_id: 4, enabled: true, with_images: true) } - let!(:policy5) { create_expiration_policy(project_id: 5, enabled: false, with_images: true) } - let!(:policy6) { create_expiration_policy(project_id: 6, enabled: false) } - let!(:policy7) { create_expiration_policy(project_id: 7, enabled: true) } - let!(:policy8) { create_expiration_policy(project_id: 8, enabled: true, with_images: true) } - let!(:policy9) { create_expiration_policy(project_id: 9, enabled: true) } - - describe '#perform' do - subject { described_class.new.perform(from_id, to_id) } - - shared_examples 'disabling policies with no images' do - it 'disables the proper policies' do - subject - - rows = container_expiration_policies.order(:project_id).to_h do |row| - [row.project_id, row.enabled] - end - expect(rows).to eq(expected_rows) - end - end - - context 'the whole range' do - let(:from_id) { 1 } - let(:to_id) { 9 } - - it_behaves_like 'disabling policies with no images' do - let(:expected_rows) do - { - 1 => false, - 2 => false, - 3 => false, - 4 => true, - 5 => false, - 6 => false, - 7 => false, - 8 => true, - 9 => false - } - end - end - end - - context 'a range with no policies to disable' do - let(:from_id) { 2 } - let(:to_id) { 6 } - - it_behaves_like 'disabling policies with no images' do - let(:expected_rows) do - { - 1 => true, - 2 => false, - 3 => false, - 4 => true, - 5 => false, - 6 => false, - 7 => true, - 8 => true, - 9 => true - } - end - end - end - - context 'a range with only images' do - let(:from_id) { 4 } - let(:to_id) { 5 } - - it_behaves_like 'disabling policies with no images' do - let(:expected_rows) do - { - 1 => true, - 2 => false, - 3 => false, - 4 => true, - 5 => false, - 6 => false, - 7 => true, - 8 => true, - 9 => true - } - end - end - end - - context 'a range with a single element' do - let(:from_id) { 9 } - let(:to_id) { 9 } - - it_behaves_like 'disabling policies with no images' do - let(:expected_rows) do - { - 1 => true, - 2 => false, - 3 => false, - 4 => true, - 5 => false, - 6 => false, - 7 => true, - 8 => true, - 9 => false - } - end - end - end - end - - def create_expiration_policy(project_id:, enabled:, with_images: false) - projects.create!(id: project_id, namespace_id: namespace.id, name: "gitlab-#{project_id}") - - if with_images - container_repositories.create!(project_id: project_id, name: "image-#{project_id}") - end - - container_expiration_policies.create!( - enabled: enabled, - project_id: project_id - ) - end - - def enabled_policies - container_expiration_policies.where(enabled: true) - end - - def disabled_policies - container_expiration_policies.where(enabled: false) - end -end diff --git a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb deleted file mode 100644 index 5fdd8683d06..00000000000 --- a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb +++ /dev/null @@ -1,57 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::DropInvalidSecurityFindings, :suppress_gitlab_schemas_validate_connection, - schema: 20211108211434 do - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) } - let(:project) { table(:projects).create!(namespace_id: namespace.id) } - - let(:pipelines) { table(:ci_pipelines) } - let!(:pipeline) { pipelines.create!(project_id: project.id) } - - let(:ci_builds) { table(:ci_builds) } - let!(:ci_build) { ci_builds.create! } - - let(:security_scans) { table(:security_scans) } - let!(:security_scan) do - security_scans.create!( - scan_type: 1, - status: 1, - build_id: ci_build.id, - project_id: project.id, - pipeline_id: pipeline.id - ) - end - - let(:vulnerability_scanners) { table(:vulnerability_scanners) } - let!(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let(:security_findings) { table(:security_findings) } - let!(:security_finding_without_uuid) do - security_findings.create!( - severity: 1, - confidence: 1, - scan_id: security_scan.id, - scanner_id: vulnerability_scanner.id, - uuid: nil - ) - end - - let!(:security_finding_with_uuid) do - security_findings.create!( - severity: 1, - confidence: 1, - scan_id: security_scan.id, - scanner_id: vulnerability_scanner.id, - uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e' - ) - end - - let(:sub_batch_size) { 10_000 } - - subject { described_class.new.perform(security_finding_without_uuid.id, security_finding_with_uuid.id, sub_batch_size) } - - it 'drops Security::Finding objects with no UUID' do - expect { subject }.to change(security_findings, :count).from(2).to(1) - end -end diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb deleted file mode 100644 index 8f3ef44e00c..00000000000 --- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb +++ /dev/null @@ -1,126 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210826171758 do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let!(:users) { table(:users) } - let!(:user) { create_user! } - let!(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - - let!(:scanners) { table(:vulnerability_scanners) } - let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let!(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - - let!(:vulnerabilities) { table(:vulnerabilities) } - let!(:vulnerability_with_finding) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:vulnerability_without_finding) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - let!(:primary_identifier) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - let!(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - let!(:finding) do - create_finding!( - vulnerability_id: vulnerability_with_finding.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: primary_identifier.id - ) - end - - let(:succeeded_status) { 1 } - let(:pending_status) { 0 } - - it 'drops Vulnerabilities without any Findings' do - expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id, vulnerability_without_finding.id]) - - expect { subject.perform(vulnerability_with_finding.id, vulnerability_without_finding.id) }.to change(vulnerabilities, :count).by(-1) - - expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id]) - end - - it 'marks jobs as done' do - background_migration_jobs.create!( - class_name: 'DropInvalidVulnerabilities', - arguments: [vulnerability_with_finding.id, vulnerability_with_finding.id] - ) - - background_migration_jobs.create!( - class_name: 'DropInvalidVulnerabilities', - arguments: [vulnerability_without_finding.id, vulnerability_without_finding.id] - ) - - subject.perform(vulnerability_with_finding.id, vulnerability_with_finding.id) - - expect(background_migration_jobs.first.status).to eq(succeeded_status) - expect(background_migration_jobs.second.status).to eq(pending_status) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0, - user_type: user_type, - confirmed_at: Time.current - ) - end -end diff --git a/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb index b52f30a5e21..dd3e7877f8a 100644 --- a/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb +++ b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb @@ -10,8 +10,7 @@ RSpec.describe Gitlab::BackgroundMigration::EncryptCiTriggerToken, feature_categ mode: :per_attribute_iv, key: ::Settings.attr_encrypted_db_key_base_32, algorithm: 'aes-256-gcm', - encode: false, - encode_iv: false + encode: false end end @@ -52,6 +51,7 @@ RSpec.describe Gitlab::BackgroundMigration::EncryptCiTriggerToken, feature_categ already_encrypted_token = Ci::Trigger.find(with_encryption.id) expect(already_encrypted_token.encrypted_token).to eq(with_encryption.encrypted_token) expect(already_encrypted_token.encrypted_token_iv).to eq(with_encryption.encrypted_token_iv) + expect(already_encrypted_token.token).to eq(already_encrypted_token.encrypted_token_tmp) expect(with_encryption.token).to eq(with_encryption.encrypted_token_tmp) end end diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb deleted file mode 100644 index c788b701d79..00000000000 --- a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties, schema: 20220415124804 do - let(:integrations) do - table(:integrations) do |integrations| - integrations.send :attr_encrypted, :encrypted_properties_tmp, - attribute: :encrypted_properties, - mode: :per_attribute_iv, - key: ::Settings.attr_encrypted_db_key_base_32, - algorithm: 'aes-256-gcm', - marshal: true, - marshaler: ::Gitlab::Json, - encode: false, - encode_iv: false - end - end - - let!(:no_properties) { integrations.create! } - let!(:with_plaintext_1) { integrations.create!(properties: json_props(1)) } - let!(:with_plaintext_2) { integrations.create!(properties: json_props(2)) } - let!(:with_encrypted) do - x = integrations.new - x.properties = nil - x.encrypted_properties_tmp = some_props(3) - x.save! - x - end - - let(:start_id) { integrations.minimum(:id) } - let(:end_id) { integrations.maximum(:id) } - - it 'ensures all properties are encrypted', :aggregate_failures do - described_class.new.perform(start_id, end_id) - - props = integrations.all.to_h do |record| - [record.id, [Gitlab::Json.parse(record.properties), record.encrypted_properties_tmp]] - end - - expect(integrations.count).to eq(4) - - expect(props).to match( - no_properties.id => both(be_nil), - with_plaintext_1.id => both(eq some_props(1)), - with_plaintext_2.id => both(eq some_props(2)), - with_encrypted.id => match([be_nil, eq(some_props(3))]) - ) - end - - private - - def both(obj) - match [obj, obj] - end - - def some_props(id) - HashWithIndifferentAccess.new({ id: id, foo: 1, bar: true, baz: %w[a string array] }) - end - - def json_props(id) - some_props(id).to_json - end -end diff --git a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb deleted file mode 100644 index 4e7b97d33f6..00000000000 --- a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::EncryptStaticObjectToken do - let(:users) { table(:users) } - let!(:user_without_tokens) { create_user!(name: 'notoken') } - let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') } - let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') } - let!(:user_with_plaintext_empty_token) { create_user!(name: 'plaintext_3', token: '') } - let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') } - let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') } - - before do - allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).and_call_original - allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('token') { 'secure_token' } - allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('TOKEN') { 'SECURE_TOKEN' } - end - - subject { described_class.new.perform(start_id, end_id) } - - let(:start_id) { users.minimum(:id) } - let(:end_id) { users.maximum(:id) } - - it 'backfills encrypted tokens to users with plaintext token only', :aggregate_failures do - subject - - new_state = users.pluck(:id, :static_object_token, :static_object_token_encrypted).to_h do |row| - [row[0], [row[1], row[2]]] - end - - expect(new_state.count).to eq(6) - - expect(new_state[user_with_plaintext_token_1.id]).to match_array(%w[token secure_token]) - expect(new_state[user_with_plaintext_token_2.id]).to match_array(%w[TOKEN SECURE_TOKEN]) - - expect(new_state[user_with_plaintext_empty_token.id]).to match_array(['', nil]) - expect(new_state[user_without_tokens.id]).to match_array([nil, nil]) - expect(new_state[user_with_both_tokens.id]).to match_array(%w[token2 encrypted2]) - expect(new_state[user_with_encrypted_token.id]).to match_array([nil, 'encrypted']) - end - - context 'when id range does not include existing user ids' do - let(:arguments) { [non_existing_record_id, non_existing_record_id.succ] } - - it_behaves_like 'marks background migration job records' do - subject { described_class.new } - end - end - - private - - def create_user!(name:, token: nil, encrypted_token: nil) - email = "#{name}@example.com" - - table(:users).create!( - name: name, - email: email, - username: name, - projects_limit: 0, - static_object_token: token, - static_object_token_encrypted: encrypted_token - ) - end -end diff --git a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb deleted file mode 100644 index 586e75ffb37..00000000000 --- a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable, - :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do - it 'correctly extracts project topics into separate table' do - namespaces = table(:namespaces) - projects = table(:projects) - taggings = table(:taggings) - tags = table(:tags) - project_topics = table(:project_topics) - topics = table(:topics) - - namespace = namespaces.create!(name: 'foo', path: 'foo') - project = projects.create!(namespace_id: namespace.id) - tag_1 = tags.create!(name: 'Topic1') - tag_2 = tags.create!(name: 'Topic2') - tag_3 = tags.create!(name: 'Topic3') - topic_3 = topics.create!(name: 'Topic3') - tagging_1 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_1.id) - tagging_2 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_2.id) - other_tagging = taggings.create!(taggable_type: 'Other', taggable_id: project.id, context: 'topics', tag_id: tag_1.id) - tagging_3 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_3.id) - tagging_4 = taggings.create!(taggable_type: 'Project', taggable_id: -1, context: 'topics', tag_id: tag_1.id) - tagging_5 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: -1) - - subject.perform(tagging_1.id, tagging_5.id) - - # Tagging records - expect { tagging_1.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect { tagging_2.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect { other_tagging.reload }.not_to raise_error - expect { tagging_3.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect { tagging_4.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect { tagging_5.reload }.to raise_error(ActiveRecord::RecordNotFound) - - # Topic records - topic_1 = topics.find_by(name: 'Topic1') - topic_2 = topics.find_by(name: 'Topic2') - expect(topics.all).to contain_exactly(topic_1, topic_2, topic_3) - - # ProjectTopic records - expect(project_topics.all.map(&:topic_id)).to contain_exactly(topic_1.id, topic_2.id, topic_3.id) - end -end diff --git a/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb b/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb deleted file mode 100644 index 7f15aceca42..00000000000 --- a/spec/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at_spec.rb +++ /dev/null @@ -1,166 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require Rails.root.join('db', 'post_migrate', '20211004110500_add_temporary_index_to_issue_metrics.rb') - -RSpec.describe Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt, :migration, schema: 20211004110500 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:users) { table(:users) } - let(:merge_requests) { table(:merge_requests) } - let(:issues) { table(:issues) } - let(:issue_metrics) { table(:issue_metrics) } - let(:merge_requests_closing_issues) { table(:merge_requests_closing_issues) } - let(:diffs) { table(:merge_request_diffs) } - let(:ten_days_ago) { 10.days.ago } - let(:commits) do - table(:merge_request_diff_commits).tap do |t| - t.extend(SuppressCompositePrimaryKeyWarning) - end - end - - let(:namespace) { namespaces.create!(name: 'ns', path: 'ns') } - let(:project) { projects.create!(namespace_id: namespace.id) } - - let!(:issue1) do - issues.create!( - title: 'issue', - description: 'description', - project_id: project.id - ) - end - - let!(:issue2) do - issues.create!( - title: 'issue', - description: 'description', - project_id: project.id - ) - end - - let!(:merge_request1) do - merge_requests.create!( - source_branch: 'a', - target_branch: 'master', - target_project_id: project.id - ) - end - - let!(:merge_request2) do - merge_requests.create!( - source_branch: 'b', - target_branch: 'master', - target_project_id: project.id - ) - end - - let!(:merge_request_closing_issue1) do - merge_requests_closing_issues.create!(issue_id: issue1.id, merge_request_id: merge_request1.id) - end - - let!(:merge_request_closing_issue2) do - merge_requests_closing_issues.create!(issue_id: issue2.id, merge_request_id: merge_request2.id) - end - - let!(:diff1) { diffs.create!(merge_request_id: merge_request1.id) } - let!(:diff2) { diffs.create!(merge_request_id: merge_request1.id) } - - let!(:other_diff) { diffs.create!(merge_request_id: merge_request2.id) } - - let!(:commit1) do - commits.create!( - merge_request_diff_id: diff2.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('aaa'), - authored_date: 5.days.ago - ) - end - - let!(:commit2) do - commits.create!( - merge_request_diff_id: diff2.id, - relative_order: 1, - sha: Gitlab::Database::ShaAttribute.serialize('aaa'), - authored_date: 10.days.ago - ) - end - - let!(:commit3) do - commits.create!( - merge_request_diff_id: other_diff.id, - relative_order: 1, - sha: Gitlab::Database::ShaAttribute.serialize('aaa'), - authored_date: 5.days.ago - ) - end - - def run_migration - described_class - .new - .perform(issue_metrics.minimum(:issue_id), issue_metrics.maximum(:issue_id)) - end - - shared_examples 'fixes first_mentioned_in_commit_at' do - it "marks successful slices as completed" do - min_issue_id = issue_metrics.minimum(:issue_id) - max_issue_id = issue_metrics.maximum(:issue_id) - - expect(subject).to receive(:mark_job_as_succeeded).with(min_issue_id, max_issue_id) - - subject.perform(min_issue_id, max_issue_id) - end - - context 'when the persisted first_mentioned_in_commit_at is later than the first commit authored_date' do - it 'updates the issue_metrics record' do - record1 = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: Time.current) - record2 = issue_metrics.create!(issue_id: issue2.id, first_mentioned_in_commit_at: Time.current) - - run_migration - record1.reload - record2.reload - - expect(record1.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit2.authored_date) - expect(record2.first_mentioned_in_commit_at).to be_within(2.seconds).of(commit3.authored_date) - end - end - - context 'when the persisted first_mentioned_in_commit_at is earlier than the first commit authored_date' do - it 'does not update the issue_metrics record' do - record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: 20.days.ago) - - expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } - end - end - - context 'when the first_mentioned_in_commit_at is null' do - it 'does nothing' do - record = issue_metrics.create!(issue_id: issue1.id, first_mentioned_in_commit_at: nil) - - expect { run_migration }.not_to change { record.reload.first_mentioned_in_commit_at } - end - end - end - - describe 'running the migration when first_mentioned_in_commit_at is timestamp without time zone' do - it_behaves_like 'fixes first_mentioned_in_commit_at' - end - - describe 'running the migration when first_mentioned_in_commit_at is timestamp with time zone' do - around do |example| - AddTemporaryIndexToIssueMetrics.new.down - - ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp with time zone" - Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information - AddTemporaryIndexToIssueMetrics.new.up - - example.run - - AddTemporaryIndexToIssueMetrics.new.down - ActiveRecord::Base.connection.execute "ALTER TABLE issue_metrics ALTER first_mentioned_in_commit_at type timestamp without time zone" - Gitlab::BackgroundMigration::FixFirstMentionedInCommitAt::TmpIssueMetrics.reset_column_information - AddTemporaryIndexToIssueMetrics.new.up - end - - it_behaves_like 'fixes first_mentioned_in_commit_at' - end -end diff --git a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb deleted file mode 100644 index 99df21562b0..00000000000 --- a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# rubocop: disable RSpec/FactoriesInMigrationSpecs -RSpec.describe Gitlab::BackgroundMigration::FixMergeRequestDiffCommitUsers do - let(:migration) { described_class.new } - - describe '#perform' do - context 'when the project exists' do - it 'does nothing' do - project = create(:project) - - expect { migration.perform(project.id) }.not_to raise_error - end - end - - context 'when the project does not exist' do - it 'does nothing' do - expect { migration.perform(-1) }.not_to raise_error - end - end - end -end -# rubocop: enable RSpec/FactoriesInMigrationSpecs diff --git a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb index af551861d47..3cbc05b762a 100644 --- a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb +++ b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::FixVulnerabilityOccurrencesWithHashesAsRawMetadata, schema: 20211209203821 do +RSpec.describe Gitlab::BackgroundMigration::FixVulnerabilityOccurrencesWithHashesAsRawMetadata, schema: 20220314184009 do let(:users) { table(:users) } let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb b/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb deleted file mode 100644 index 2c2c048992f..00000000000 --- a/spec/lib/gitlab/background_migration/merge_topics_with_same_name_spec.rb +++ /dev/null @@ -1,148 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MergeTopicsWithSameName, schema: 20220331133802 do - def set_avatar(topic_id, avatar) - topic = ::Projects::Topic.find(topic_id) - topic.avatar = avatar - topic.save! - topic.avatar.absolute_path - end - - it 'merges project topics with same case insensitive name' do - namespaces = table(:namespaces) - projects = table(:projects) - topics = table(:topics) - project_topics = table(:project_topics) - - group_1 = namespaces.create!(name: 'space1', type: 'Group', path: 'space1') - group_2 = namespaces.create!(name: 'space2', type: 'Group', path: 'space2') - group_3 = namespaces.create!(name: 'space3', type: 'Group', path: 'space3') - proj_space_1 = namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: group_1.id) - proj_space_2 = namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: group_2.id) - proj_space_3 = namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: group_3.id) - project_1 = projects.create!(namespace_id: group_1.id, project_namespace_id: proj_space_1.id, visibility_level: 20) - project_2 = projects.create!(namespace_id: group_2.id, project_namespace_id: proj_space_2.id, visibility_level: 10) - project_3 = projects.create!(namespace_id: group_3.id, project_namespace_id: proj_space_3.id, visibility_level: 0) - topic_1_keep = topics.create!( - name: 'topic1', - title: 'Topic 1', - description: 'description 1 to keep', - total_projects_count: 2, - non_private_projects_count: 2 - ) - topic_1_remove = topics.create!( - name: 'TOPIC1', - title: 'Topic 1', - description: 'description 1 to remove', - total_projects_count: 2, - non_private_projects_count: 1 - ) - topic_2_remove = topics.create!( - name: 'topic2', - title: 'Topic 2', - total_projects_count: 0 - ) - topic_2_keep = topics.create!( - name: 'TOPIC2', - title: 'Topic 2', - description: 'description 2 to keep', - total_projects_count: 1 - ) - topic_3_remove_1 = topics.create!( - name: 'topic3', - title: 'Topic 3', - total_projects_count: 2, - non_private_projects_count: 1 - ) - topic_3_keep = topics.create!( - name: 'Topic3', - title: 'Topic 3', - total_projects_count: 2, - non_private_projects_count: 2 - ) - topic_3_remove_2 = topics.create!( - name: 'TOPIC3', - title: 'Topic 3', - description: 'description 3 to keep', - total_projects_count: 2, - non_private_projects_count: 1 - ) - topic_4_keep = topics.create!( - name: 'topic4', - title: 'Topic 4' - ) - - project_topics_1 = [] - project_topics_3 = [] - project_topics_removed = [] - - project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_1.id) - project_topics_1 << project_topics.create!(topic_id: topic_1_keep.id, project_id: project_2.id) - project_topics_removed << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_2.id) - project_topics_1 << project_topics.create!(topic_id: topic_1_remove.id, project_id: project_3.id) - - project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_1.id) - project_topics_3 << project_topics.create!(topic_id: topic_3_keep.id, project_id: project_2.id) - project_topics_removed << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_1.id) - project_topics_3 << project_topics.create!(topic_id: topic_3_remove_1.id, project_id: project_3.id) - project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_1.id) - project_topics_removed << project_topics.create!(topic_id: topic_3_remove_2.id, project_id: project_3.id) - - avatar_paths = { - topic_1_keep: set_avatar(topic_1_keep.id, fixture_file_upload('spec/fixtures/avatars/avatar1.png')), - topic_1_remove: set_avatar(topic_1_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar2.png')), - topic_2_remove: set_avatar(topic_2_remove.id, fixture_file_upload('spec/fixtures/avatars/avatar3.png')), - topic_3_remove_1: set_avatar(topic_3_remove_1.id, fixture_file_upload('spec/fixtures/avatars/avatar4.png')), - topic_3_remove_2: set_avatar(topic_3_remove_2.id, fixture_file_upload('spec/fixtures/avatars/avatar5.png')) - } - - subject.perform(%w[topic1 topic2 topic3 topic4]) - - # Topics - [topic_1_keep, topic_2_keep, topic_3_keep, topic_4_keep].each(&:reload) - expect(topic_1_keep.name).to eq('topic1') - expect(topic_1_keep.description).to eq('description 1 to keep') - expect(topic_1_keep.total_projects_count).to eq(3) - expect(topic_1_keep.non_private_projects_count).to eq(2) - expect(topic_2_keep.name).to eq('TOPIC2') - expect(topic_2_keep.description).to eq('description 2 to keep') - expect(topic_2_keep.total_projects_count).to eq(0) - expect(topic_2_keep.non_private_projects_count).to eq(0) - expect(topic_3_keep.name).to eq('Topic3') - expect(topic_3_keep.description).to eq('description 3 to keep') - expect(topic_3_keep.total_projects_count).to eq(3) - expect(topic_3_keep.non_private_projects_count).to eq(2) - expect(topic_4_keep.reload.name).to eq('topic4') - - [topic_1_remove, topic_2_remove, topic_3_remove_1, topic_3_remove_2].each do |topic| - expect { topic.reload }.to raise_error(ActiveRecord::RecordNotFound) - end - - # Topic avatars - expect(topic_1_keep.avatar).to eq('avatar1.png') - expect(File.exist?(::Projects::Topic.find(topic_1_keep.id).avatar.absolute_path)).to be_truthy - expect(topic_2_keep.avatar).to eq('avatar3.png') - expect(File.exist?(::Projects::Topic.find(topic_2_keep.id).avatar.absolute_path)).to be_truthy - expect(topic_3_keep.avatar).to eq('avatar4.png') - expect(File.exist?(::Projects::Topic.find(topic_3_keep.id).avatar.absolute_path)).to be_truthy - - [:topic_1_remove, :topic_2_remove, :topic_3_remove_1, :topic_3_remove_2].each do |topic| - expect(File.exist?(avatar_paths[topic])).to be_falsey - end - - # Project Topic assignments - project_topics_1.each do |project_topic| - expect(project_topic.reload.topic_id).to eq(topic_1_keep.id) - end - - project_topics_3.each do |project_topic| - expect(project_topic.reload.topic_id).to eq(topic_3_keep.id) - end - - project_topics_removed.each do |project_topic| - expect { project_topic.reload }.to raise_error(ActiveRecord::RecordNotFound) - end - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings_spec.rb b/spec/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings_spec.rb index b70044ab2a4..28e16a5820d 100644 --- a/spec/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_evidences_for_vulnerability_findings_spec.rb @@ -38,8 +38,6 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateEvidencesForVulnerabilityFind end it 'does not create any evidence' do - expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.not_to change { vulnerability_finding_evidences.count } end end @@ -50,8 +48,6 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateEvidencesForVulnerabilityFind end it 'does not create any evidence' do - expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.not_to change { vulnerability_finding_evidences.count } end end @@ -61,32 +57,15 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateEvidencesForVulnerabilityFind let!(:finding2) { create_finding!(project1.id, scanner1.id, { evidence: evidence_hash }) } it 'creates new evidence for each finding' do - expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.to change { vulnerability_finding_evidences.count }.by(2) end - context 'when create throws exception StandardError' do - before do - allow(migration).to receive(:create_evidences).and_raise(StandardError) - end - - it 'logs StandardError' do - expect(Gitlab::AppLogger).to receive(:error).with({ - class: described_class.name, message: StandardError.to_s - }) - expect { perform_migration }.not_to change { vulnerability_finding_evidences.count } - end - end - context 'when parse throws exception JSON::ParserError' do before do allow(Gitlab::Json).to receive(:parse).and_raise(JSON::ParserError) end - it 'does not log this error nor create new records' do - expect(Gitlab::AppLogger).not_to receive(:error) - + it 'does not create new records' do expect { perform_migration }.not_to change { vulnerability_finding_evidences.count } end end @@ -100,8 +79,6 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateEvidencesForVulnerabilityFind end it 'does not create new evidence' do - expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.not_to change { vulnerability_finding_evidences.count } end @@ -109,8 +86,6 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateEvidencesForVulnerabilityFind let!(:finding3) { create_finding!(project1.id, scanner1.id, { evidence: { url: 'http://secondary.com' } }) } it 'creates a new evidence only to the non-existing evidence' do - expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.to change { vulnerability_finding_evidences.count }.by(1) end end diff --git a/spec/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings_spec.rb b/spec/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings_spec.rb index fd2e3ffb670..9a90af968e2 100644 --- a/spec/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_links_for_vulnerability_findings_spec.rb @@ -56,6 +56,64 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateLinksForVulnerabilityFindings end end + context 'with links equals to a string' do + before do + create_finding!(project1.id, scanner1.id, { links: "wrong format" }) + end + + it 'does not create any link' do + expect(Gitlab::AppLogger).not_to receive(:error) + + expect { perform_migration }.not_to change { vulnerability_finding_links.count } + end + end + + context 'with some elements which do not contain the key url' do + let!(:finding) do + create_finding!(project1.id, scanner1.id, { links: [link_hash, "wrong format", {}] }) + end + + it 'creates links only to valid elements' do + expect(Gitlab::AppLogger).not_to receive(:error) + + perform_migration + + expect(vulnerability_finding_links.all).to contain_exactly(have_attributes( + url: link_hash[:url], + vulnerability_occurrence_id: finding.id)) + end + end + + context 'when link name is too long' do + let!(:finding) do + create_finding!(project1.id, scanner1.id, { links: [{ name: 'A' * 300, url: 'https://foo' }] }) + end + + it 'skips creation of link and logs error' do + expect(Gitlab::AppLogger).to receive(:error).with({ + class: described_class.name, + message: /check_55f0a95439/, + model_id: finding.id + }) + expect { perform_migration }.not_to change { vulnerability_finding_links.count } + end + end + + context 'when link url is too long' do + let!(:finding) do + create_finding!(project1.id, scanner1.id, { links: [{ url: "https://f#{'o' * 2050}" }] }) + end + + it 'skips creation of link and logs error' do + expect(Gitlab::AppLogger).to receive(:error).with({ + class: described_class.name, + message: /check_b7fe886df6/, + model_id: finding.id + }) + expect { perform_migration }.not_to change { vulnerability_finding_links.count } + end + end + context 'with links equals to an array of duplicated elements' do let!(:finding) do create_finding!(project1.id, scanner1.id, { links: [link_hash, link_hash] }) @@ -64,7 +122,11 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateLinksForVulnerabilityFindings it 'creates one new link' do expect(Gitlab::AppLogger).not_to receive(:error) - expect { perform_migration }.to change { vulnerability_finding_links.count }.by(1) + perform_migration + + expect(vulnerability_finding_links.all).to contain_exactly(have_attributes( + url: link_hash[:url], + vulnerability_occurrence_id: finding.id)) end end diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb deleted file mode 100644 index c3ae2cc060c..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb +++ /dev/null @@ -1,413 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers, schema: 20211012134316 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:users) { table(:users) } - let(:merge_requests) { table(:merge_requests) } - let(:diffs) { table(:merge_request_diffs) } - let(:commits) do - table(:merge_request_diff_commits).tap do |t| - t.extend(SuppressCompositePrimaryKeyWarning) - end - end - - let(:commit_users) { described_class::MergeRequestDiffCommitUser } - - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let(:project) { projects.create!(namespace_id: namespace.id) } - let(:merge_request) do - merge_requests.create!( - source_branch: 'x', - target_branch: 'master', - target_project_id: project.id - ) - end - - let(:diff) { diffs.create!(merge_request_id: merge_request.id) } - let(:migration) { described_class.new } - - describe 'MergeRequestDiffCommit' do - describe '.each_row_to_migrate' do - it 'yields the rows to migrate for a given range' do - commit1 = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: 'bob', - author_email: 'bob@example.com', - committer_name: 'bob', - committer_email: 'bob@example.com' - ) - - commit2 = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 1, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: 'Alice', - author_email: 'alice@example.com', - committer_name: 'Alice', - committer_email: 'alice@example.com' - ) - - # We stub this constant to make sure we run at least two pagination - # queries for getting the data. This way we can test if the pagination - # is actually working properly. - stub_const( - 'Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers::COMMIT_ROWS_PER_QUERY', - 1 - ) - - rows = [] - - described_class::MergeRequestDiffCommit.each_row_to_migrate(diff.id, diff.id + 1) do |row| - rows << row - end - - expect(rows.length).to eq(2) - - expect(rows[0].author_name).to eq(commit1.author_name) - expect(rows[1].author_name).to eq(commit2.author_name) - end - end - end - - describe 'MergeRequestDiffCommitUser' do - describe '.union' do - it 'produces a union of the given queries' do - alice = commit_users.create!(name: 'Alice', email: 'alice@example.com') - bob = commit_users.create!(name: 'Bob', email: 'bob@example.com') - users = commit_users.union( - [ - commit_users.where(name: 'Alice').to_sql, - commit_users.where(name: 'Bob').to_sql - ]) - - expect(users).to include(alice) - expect(users).to include(bob) - end - end - end - - describe '#perform' do - it 'skips jobs that have already been completed' do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - arguments: [1, 10], - status: :succeeded - ) - - expect(migration).not_to receive(:get_data_to_update) - - migration.perform(1, 10) - end - - it 'migrates the data in the range' do - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: 'bob', - author_email: 'bob@example.com', - committer_name: 'bob', - committer_email: 'bob@example.com' - ) - - migration.perform(diff.id, diff.id + 1) - - bob = commit_users.find_by(name: 'bob') - commit = commits.first - - expect(commit.commit_author_id).to eq(bob.id) - expect(commit.committer_id).to eq(bob.id) - end - - it 'treats empty names and Emails the same as NULL values' do - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: 'bob', - author_email: 'bob@example.com', - committer_name: '', - committer_email: '' - ) - - migration.perform(diff.id, diff.id + 1) - - bob = commit_users.find_by(name: 'bob') - commit = commits.first - - expect(commit.commit_author_id).to eq(bob.id) - expect(commit.committer_id).to be_nil - end - - it 'does not update rows without a committer and author' do - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc') - ) - - migration.perform(diff.id, diff.id + 1) - - commit = commits.first - - expect(commit_users.count).to eq(0) - expect(commit.commit_author_id).to be_nil - expect(commit.committer_id).to be_nil - end - - it 'marks the background job as done' do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - arguments: [diff.id, diff.id + 1] - ) - - migration.perform(diff.id, diff.id + 1) - - job = Gitlab::Database::BackgroundMigrationJob.first - - expect(job.status).to eq('succeeded') - end - end - - describe '#get_data_to_update' do - it 'returns the users and commit rows to update' do - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: 'bob' + ('a' * 510), - author_email: 'bob@example.com', - committer_name: 'bob' + ('a' * 510), - committer_email: 'bob@example.com' - ) - - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 1, - sha: Gitlab::Database::ShaAttribute.serialize('456abc'), - author_name: 'alice', - author_email: 'alice@example.com', - committer_name: 'alice', - committer_email: 'alice@example.com' - ) - - users, to_update = migration.get_data_to_update(diff.id, diff.id + 1) - - bob_name = 'bob' + ('a' * 509) - - expect(users).to include(%w[alice alice@example.com]) - expect(users).to include([bob_name, 'bob@example.com']) - - expect(to_update[[diff.id, 0]]) - .to eq([[bob_name, 'bob@example.com'], [bob_name, 'bob@example.com']]) - - expect(to_update[[diff.id, 1]]) - .to eq([%w[alice alice@example.com], %w[alice alice@example.com]]) - end - - it 'does not include a user if both the name and Email are missing' do - commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc'), - author_name: nil, - author_email: nil, - committer_name: 'bob', - committer_email: 'bob@example.com' - ) - - users, _ = migration.get_data_to_update(diff.id, diff.id + 1) - - expect(users).to eq([%w[bob bob@example.com]].to_set) - end - end - - describe '#get_user_rows_in_batches' do - it 'retrieves all existing users' do - alice = commit_users.create!(name: 'alice', email: 'alice@example.com') - bob = commit_users.create!(name: 'bob', email: 'bob@example.com') - - users = [[alice.name, alice.email], [bob.name, bob.email]] - mapping = {} - - migration.get_user_rows_in_batches(users, mapping) - - expect(mapping[%w[alice alice@example.com]]).to eq(alice) - expect(mapping[%w[bob bob@example.com]]).to eq(bob) - end - end - - describe '#create_missing_users' do - it 'creates merge request diff commit users that are missing' do - alice = commit_users.create!(name: 'alice', email: 'alice@example.com') - users = [%w[alice alice@example.com], %w[bob bob@example.com]] - mapping = { %w[alice alice@example.com] => alice } - - migration.create_missing_users(users, mapping) - - expect(mapping[%w[alice alice@example.com]]).to eq(alice) - expect(mapping[%w[bob bob@example.com]].name).to eq('bob') - expect(mapping[%w[bob bob@example.com]].email).to eq('bob@example.com') - end - end - - describe '#update_commit_rows' do - it 'updates the merge request diff commit rows' do - to_update = { [42, 0] => [%w[alice alice@example.com], []] } - user_mapping = { %w[alice alice@example.com] => double(:user, id: 1) } - - expect(migration) - .to receive(:bulk_update_commit_rows) - .with({ [42, 0] => [1, nil] }) - - migration.update_commit_rows(to_update, user_mapping) - end - end - - describe '#bulk_update_commit_rows' do - context 'when there are no authors and committers' do - it 'does not update any rows' do - migration.bulk_update_commit_rows({ [1, 0] => [] }) - - expect(described_class::MergeRequestDiffCommit.connection) - .not_to receive(:execute) - end - end - - context 'when there are only authors' do - it 'only updates the author IDs' do - author = commit_users.create!(name: 'Alice', email: 'alice@example.com') - commit = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc') - ) - - mapping = { - [commit.merge_request_diff_id, commit.relative_order] => - [author.id, nil] - } - - migration.bulk_update_commit_rows(mapping) - - commit = commits.first - - expect(commit.commit_author_id).to eq(author.id) - expect(commit.committer_id).to be_nil - end - end - - context 'when there are only committers' do - it 'only updates the committer IDs' do - committer = - commit_users.create!(name: 'Alice', email: 'alice@example.com') - - commit = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc') - ) - - mapping = { - [commit.merge_request_diff_id, commit.relative_order] => - [nil, committer.id] - } - - migration.bulk_update_commit_rows(mapping) - - commit = commits.first - - expect(commit.committer_id).to eq(committer.id) - expect(commit.commit_author_id).to be_nil - end - end - - context 'when there are both authors and committers' do - it 'updates both the author and committer IDs' do - author = commit_users.create!(name: 'Bob', email: 'bob@example.com') - committer = - commit_users.create!(name: 'Alice', email: 'alice@example.com') - - commit = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc') - ) - - mapping = { - [commit.merge_request_diff_id, commit.relative_order] => - [author.id, committer.id] - } - - migration.bulk_update_commit_rows(mapping) - - commit = commits.first - - expect(commit.commit_author_id).to eq(author.id) - expect(commit.committer_id).to eq(committer.id) - end - end - - context 'when there are multiple commit rows to update' do - it 'updates all the rows' do - author = commit_users.create!(name: 'Bob', email: 'bob@example.com') - committer = - commit_users.create!(name: 'Alice', email: 'alice@example.com') - - commit1 = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 0, - sha: Gitlab::Database::ShaAttribute.serialize('123abc') - ) - - commit2 = commits.create!( - merge_request_diff_id: diff.id, - relative_order: 1, - sha: Gitlab::Database::ShaAttribute.serialize('456abc') - ) - - mapping = { - [commit1.merge_request_diff_id, commit1.relative_order] => - [author.id, committer.id], - - [commit2.merge_request_diff_id, commit2.relative_order] => - [author.id, nil] - } - - migration.bulk_update_commit_rows(mapping) - - commit1 = commits.find_by(relative_order: 0) - commit2 = commits.find_by(relative_order: 1) - - expect(commit1.commit_author_id).to eq(author.id) - expect(commit1.committer_id).to eq(committer.id) - - expect(commit2.commit_author_id).to eq(author.id) - expect(commit2.committer_id).to be_nil - end - end - end - - describe '#primary_key' do - it 'returns the primary key for the commits table' do - key = migration.primary_key - - expect(key.to_sql).to eq('("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order")') - end - end - - describe '#prepare' do - it 'trims a value to at most 512 characters' do - expect(migration.prepare('€' * 1_000)).to eq('€' * 512) - end - - it 'returns nil if the value is an empty string' do - expect(migration.prepare('')).to be_nil - end - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb index 07e77bdbc13..90d05ccbe1a 100644 --- a/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220208080921 do +RSpec.describe Gitlab::BackgroundMigration::MigratePersonalNamespaceProjectMaintainerToOwner, :migration, schema: 20220314184009 do let(:migration) { described_class.new } let(:users_table) { table(:users) } let(:members_table) { table(:members) } diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb deleted file mode 100644 index b252df4ecff..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, - :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do - it 'correctly migrates project taggings context from tags to topics' do - taggings = table(:taggings) - - project_old_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'tags') - project_new_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'topics') - project_other_context_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'other') - project_old_tagging_2 = taggings.create!(taggable_type: 'Project', context: 'tags') - project_old_tagging_3 = taggings.create!(taggable_type: 'Project', context: 'tags') - - subject.perform(project_old_tagging_1.id, project_old_tagging_2.id) - - project_old_tagging_1.reload - project_new_tagging_1.reload - project_other_context_tagging_1.reload - project_old_tagging_2.reload - project_old_tagging_3.reload - - expect(project_old_tagging_1.context).to eq('topics') - expect(project_new_tagging_1.context).to eq('topics') - expect(project_other_context_tagging_1.context).to eq('other') - expect(project_old_tagging_2.context).to eq('topics') - expect(project_old_tagging_3.context).to eq('tags') - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb deleted file mode 100644 index 08fde0d0ff4..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require 'webauthn/u2f_migrator' - -RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210826171758 do - let(:users) { table(:users) } - - let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } - - let(:u2f_registrations) { table(:u2f_registrations) } - let(:webauthn_registrations) { table(:webauthn_registrations) } - - let!(:u2f_registration_not_migrated) { create_u2f_registration(1, 'reg1') } - let!(:u2f_registration_not_migrated_no_name) { create_u2f_registration(2, nil, 2) } - let!(:u2f_registration_migrated) { create_u2f_registration(3, 'reg3') } - - subject { described_class.new.perform(1, 3) } - - before do - converted_credential = convert_credential_for(u2f_registration_migrated) - webauthn_registrations.create!(converted_credential) - end - - it 'migrates all records' do - expect { subject }.to change { webauthn_registrations.count }.from(1).to(3) - - all_webauthn_registrations = webauthn_registrations.all.map(&:attributes) - - [u2f_registration_not_migrated, u2f_registration_not_migrated_no_name].each do |u2f_registration| - expected_credential = convert_credential_for(u2f_registration).except(:created_at).stringify_keys - expect(all_webauthn_registrations).to include(a_hash_including(expected_credential)) - end - end - - def create_u2f_registration(id, name, counter = 5) - device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5)) - u2f_registrations.create!({ id: id, - certificate: Base64.strict_encode64(device.cert_raw), - key_handle: U2F.urlsafe_encode64(device.key_handle_raw), - public_key: Base64.strict_encode64(device.origin_public_key_raw), - counter: counter, - name: name, - user_id: user.id }) - end - - def convert_credential_for(u2f_registration) - converted_credential = WebAuthn::U2fMigrator.new( - app_id: Gitlab.config.gitlab.url, - certificate: u2f_registration.certificate, - key_handle: u2f_registration.key_handle, - public_key: u2f_registration.public_key, - counter: u2f_registration.counter - ).credential - - { - credential_xid: Base64.strict_encode64(converted_credential.id), - public_key: Base64.strict_encode64(converted_credential.public_key), - counter: u2f_registration.counter, - name: u2f_registration.name || '', - user_id: u2f_registration.user_id, - u2f_registration_id: u2f_registration.id, - created_at: u2f_registration.created_at - } - end -end diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb deleted file mode 100644 index 71cf58a933f..00000000000 --- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb +++ /dev/null @@ -1,98 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210826171758 do - let(:enabled) { 20 } - let(:disabled) { 0 } - - let(:namespaces) { table(:namespaces) } - let(:project_features) { table(:project_features) } - let(:projects) { table(:projects) } - - let(:namespace) { namespaces.create!(name: 'user', path: 'user') } - let!(:project1) { projects.create!(namespace_id: namespace.id) } - let!(:project2) { projects.create!(namespace_id: namespace.id) } - let!(:project3) { projects.create!(namespace_id: namespace.id) } - let!(:project4) { projects.create!(namespace_id: namespace.id) } - - # pages_access_level cannot be null. - let(:non_null_project_features) { { pages_access_level: enabled } } - let!(:project_feature1) { project_features.create!(project_id: project1.id, **non_null_project_features) } - let!(:project_feature2) { project_features.create!(project_id: project2.id, **non_null_project_features) } - let!(:project_feature3) { project_features.create!(project_id: project3.id, **non_null_project_features) } - - describe '#perform' do - before do - project1.update!(container_registry_enabled: true) - project2.update!(container_registry_enabled: false) - project3.update!(container_registry_enabled: nil) - project4.update!(container_registry_enabled: true) - end - - it 'copies values to project_features' do - table(:background_migration_jobs).create!( - class_name: 'MoveContainerRegistryEnabledToProjectFeature', - arguments: [project1.id, project4.id] - ) - table(:background_migration_jobs).create!( - class_name: 'MoveContainerRegistryEnabledToProjectFeature', - arguments: [-1, -3] - ) - - expect(project1.container_registry_enabled).to eq(true) - expect(project2.container_registry_enabled).to eq(false) - expect(project3.container_registry_enabled).to eq(nil) - expect(project4.container_registry_enabled).to eq(true) - - expect(project_feature1.container_registry_access_level).to eq(disabled) - expect(project_feature2.container_registry_access_level).to eq(disabled) - expect(project_feature3.container_registry_access_level).to eq(disabled) - - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger| - expect(logger).to receive(:info) - .with(message: "#{described_class}: Copied container_registry_enabled values for projects with IDs between #{project1.id}..#{project4.id}") - - expect(logger).not_to receive(:info) - end - - subject.perform(project1.id, project4.id) - - expect(project1.reload.container_registry_enabled).to eq(true) - expect(project2.reload.container_registry_enabled).to eq(false) - expect(project3.reload.container_registry_enabled).to eq(nil) - expect(project4.container_registry_enabled).to eq(true) - - expect(project_feature1.reload.container_registry_access_level).to eq(enabled) - expect(project_feature2.reload.container_registry_access_level).to eq(disabled) - expect(project_feature3.reload.container_registry_access_level).to eq(disabled) - - expect(table(:background_migration_jobs).first.status).to eq(1) # succeeded - expect(table(:background_migration_jobs).second.status).to eq(0) # pending - end - - context 'when no projects exist in range' do - it 'does not fail' do - expect(project1.container_registry_enabled).to eq(true) - expect(project_feature1.container_registry_access_level).to eq(disabled) - - expect { subject.perform(-1, -2) }.not_to raise_error - - expect(project1.container_registry_enabled).to eq(true) - expect(project_feature1.container_registry_access_level).to eq(disabled) - end - end - - context 'when projects in range all have nil container_registry_enabled' do - it 'does not fail' do - expect(project3.container_registry_enabled).to eq(nil) - expect(project_feature3.container_registry_access_level).to eq(disabled) - - expect { subject.perform(project3.id, project3.id) }.not_to raise_error - - expect(project3.container_registry_enabled).to eq(nil) - expect(project_feature3.container_registry_access_level).to eq(disabled) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb index 2f0eef3c399..7c78350e697 100644 --- a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb +++ b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds, - :suppress_gitlab_schemas_validate_connection, migration: :gitlab_ci, schema: 20220223112304 do + :suppress_gitlab_schemas_validate_connection, migration: :gitlab_ci, schema: 20220314184009 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:ci_runners) { table(:ci_runners) } diff --git a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb deleted file mode 100644 index 4a7d52ee784..00000000000 --- a/spec/lib/gitlab/background_migration/populate_namespace_statistics_spec.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateNamespaceStatistics do - let!(:namespaces) { table(:namespaces) } - let!(:namespace_statistics) { table(:namespace_statistics) } - let!(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) } - let!(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) } - - let!(:group1) { namespaces.create!(id: 10, type: 'Group', name: 'group1', path: 'group1') } - let!(:group2) { namespaces.create!(id: 20, type: 'Group', name: 'group2', path: 'group2') } - - let!(:group1_manifest) do - dependency_proxy_manifests.create!(group_id: 10, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123') - end - - let!(:group2_manifest) do - dependency_proxy_manifests.create!(group_id: 20, size: 20, file_name: 'test-file', file: 'test', digest: 'abc123') - end - - let!(:group1_stats) { namespace_statistics.create!(id: 10, namespace_id: 10) } - - let(:ids) { namespaces.pluck(:id) } - let(:statistics) { [] } - - subject(:perform) { described_class.new.perform(ids, statistics) } - - it 'creates/updates all namespace_statistics and updates root storage statistics', :aggregate_failures do - expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group1.id) - expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async).with(group2.id) - - expect { perform }.to change(namespace_statistics, :count).from(1).to(2) - - namespace_statistics.all.each do |stat| - expect(stat.dependency_proxy_size).to eq 20 - expect(stat.storage_size).to eq 20 - end - end - - context 'when just a stat is passed' do - let(:statistics) { [:dependency_proxy_size] } - - it 'calls the statistics update service with just that stat' do - expect(Groups::UpdateStatisticsService) - .to receive(:new) - .with(anything, statistics: [:dependency_proxy_size]) - .twice.and_call_original - - perform - end - end - - context 'when a statistics update fails' do - before do - error_response = instance_double(ServiceResponse, message: 'an error', error?: true) - - allow_next_instance_of(Groups::UpdateStatisticsService) do |instance| - allow(instance).to receive(:execute).and_return(error_response) - end - end - - it 'logs an error' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:error).twice - end - - perform - end - end -end diff --git a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb b/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb deleted file mode 100644 index e72e3392210..00000000000 --- a/spec/lib/gitlab/background_migration/populate_topics_non_private_projects_count_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateTopicsNonPrivateProjectsCount, schema: 20220125122640 do - it 'correctly populates the non private projects counters' do - namespaces = table(:namespaces) - projects = table(:projects) - topics = table(:topics) - project_topics = table(:project_topics) - - group = namespaces.create!(name: 'group', path: 'group') - project_public = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PUBLIC) - project_internal = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::INTERNAL) - project_private = projects.create!(namespace_id: group.id, visibility_level: Gitlab::VisibilityLevel::PRIVATE) - topic_1 = topics.create!(name: 'Topic1') - topic_2 = topics.create!(name: 'Topic2') - topic_3 = topics.create!(name: 'Topic3') - topic_4 = topics.create!(name: 'Topic4') - topic_5 = topics.create!(name: 'Topic5') - topic_6 = topics.create!(name: 'Topic6') - topic_7 = topics.create!(name: 'Topic7') - topic_8 = topics.create!(name: 'Topic8') - - project_topics.create!(topic_id: topic_1.id, project_id: project_public.id) - project_topics.create!(topic_id: topic_2.id, project_id: project_internal.id) - project_topics.create!(topic_id: topic_3.id, project_id: project_private.id) - project_topics.create!(topic_id: topic_4.id, project_id: project_public.id) - project_topics.create!(topic_id: topic_4.id, project_id: project_internal.id) - project_topics.create!(topic_id: topic_5.id, project_id: project_public.id) - project_topics.create!(topic_id: topic_5.id, project_id: project_private.id) - project_topics.create!(topic_id: topic_6.id, project_id: project_internal.id) - project_topics.create!(topic_id: topic_6.id, project_id: project_private.id) - project_topics.create!(topic_id: topic_7.id, project_id: project_public.id) - project_topics.create!(topic_id: topic_7.id, project_id: project_internal.id) - project_topics.create!(topic_id: topic_7.id, project_id: project_private.id) - project_topics.create!(topic_id: topic_8.id, project_id: project_public.id) - - subject.perform(topic_1.id, topic_7.id) - - expect(topic_1.reload.non_private_projects_count).to eq(1) - expect(topic_2.reload.non_private_projects_count).to eq(1) - expect(topic_3.reload.non_private_projects_count).to eq(0) - expect(topic_4.reload.non_private_projects_count).to eq(2) - expect(topic_5.reload.non_private_projects_count).to eq(1) - expect(topic_6.reload.non_private_projects_count).to eq(1) - expect(topic_7.reload.non_private_projects_count).to eq(2) - expect(topic_8.reload.non_private_projects_count).to eq(0) - end -end diff --git a/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb b/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb deleted file mode 100644 index 8e07b43f5b9..00000000000 --- a/spec/lib/gitlab/background_migration/populate_topics_total_projects_count_cache_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateTopicsTotalProjectsCountCache, schema: 20211006060436 do - it 'correctly populates total projects count cache' do - namespaces = table(:namespaces) - projects = table(:projects) - topics = table(:topics) - project_topics = table(:project_topics) - - group = namespaces.create!(name: 'group', path: 'group') - project_1 = projects.create!(namespace_id: group.id) - project_2 = projects.create!(namespace_id: group.id) - project_3 = projects.create!(namespace_id: group.id) - topic_1 = topics.create!(name: 'Topic1') - topic_2 = topics.create!(name: 'Topic2') - topic_3 = topics.create!(name: 'Topic3') - topic_4 = topics.create!(name: 'Topic4') - - project_topics.create!(project_id: project_1.id, topic_id: topic_1.id) - project_topics.create!(project_id: project_1.id, topic_id: topic_3.id) - project_topics.create!(project_id: project_2.id, topic_id: topic_3.id) - project_topics.create!(project_id: project_1.id, topic_id: topic_4.id) - project_topics.create!(project_id: project_2.id, topic_id: topic_4.id) - project_topics.create!(project_id: project_3.id, topic_id: topic_4.id) - - subject.perform(topic_1.id, topic_4.id) - - expect(topic_1.reload.total_projects_count).to eq(1) - expect(topic_2.reload.total_projects_count).to eq(0) - expect(topic_3.reload.total_projects_count).to eq(2) - expect(topic_4.reload.total_projects_count).to eq(3) - end -end diff --git a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb deleted file mode 100644 index c0470f26d9e..00000000000 --- a/spec/lib/gitlab/background_migration/populate_vulnerability_reads_spec.rb +++ /dev/null @@ -1,93 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateVulnerabilityReads, :migration, schema: 20220326161803 do - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerability_reads) { table(:vulnerability_reads) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - let(:vulnerability_issue_links) { table(:vulnerability_issue_links) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) } - let(:project) { table(:projects).create!(namespace_id: namespace.id) } - let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let(:sub_batch_size) { 1000 } - - before do - vulnerabilities_findings.connection.execute 'ALTER TABLE vulnerability_occurrences DISABLE TRIGGER "trigger_insert_or_update_vulnerability_reads_from_occurrences"' - vulnerabilities.connection.execute 'ALTER TABLE vulnerabilities DISABLE TRIGGER "trigger_update_vulnerability_reads_on_vulnerability_update"' - vulnerability_issue_links.connection.execute 'ALTER TABLE vulnerability_issue_links DISABLE TRIGGER "trigger_update_has_issues_on_vulnerability_issue_links_update"' - - 10.times.each do |x| - vulnerability = create_vulnerability!( - project_id: project.id, - report_type: 7, - author_id: user.id - ) - identifier = table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s), - name: 'Identifier for UUIDv5') - - create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - end - end - - it 'creates vulnerability_reads for the given records' do - described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size) - - expect(vulnerability_reads.count).to eq(10) - end - - it 'does not create new records when records already exists' do - described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size) - described_class.new.perform(vulnerabilities.first.id, vulnerabilities.last.id, sub_batch_size) - - expect(vulnerability_reads.count).to eq(10) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location: location, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb deleted file mode 100644 index 2271bbfb2f3..00000000000 --- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb +++ /dev/null @@ -1,530 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -def create_background_migration_job(ids, status) - proper_status = case status - when :pending - Gitlab::Database::BackgroundMigrationJob.statuses['pending'] - when :succeeded - Gitlab::Database::BackgroundMigrationJob.statuses['succeeded'] - else - raise ArgumentError - end - - background_migration_jobs.create!( - class_name: 'RecalculateVulnerabilitiesOccurrencesUuid', - arguments: Array(ids), - status: proper_status, - created_at: Time.now.utc - ) -end - -RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, :suppress_gitlab_schemas_validate_connection, schema: 20211124132705 do - let(:background_migration_jobs) { table(:background_migration_jobs) } - let(:pending_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']) } - let(:succeeded_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:users) { table(:users) } - let(:user) { create_user! } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:scanners) { table(:vulnerability_scanners) } - let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerability_findings) { table(:vulnerability_occurrences) } - let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) } - let(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) } - let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - - let(:identifier_1) { 'identifier-1' } - let!(:vulnerability_identifier) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: identifier_1, - external_id: identifier_1, - fingerprint: Gitlab::Database::ShaAttribute.serialize('ff9ef548a6e30a0462795d916f3f00d1e2b082ca'), - name: 'Identifier 1') - end - - let(:identifier_2) { 'identifier-2' } - let!(:vulnerability_identfier2) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: identifier_2, - external_id: identifier_2, - fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'), - name: 'Identifier 2') - end - - let(:identifier_3) { 'identifier-3' } - let!(:vulnerability_identifier3) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: identifier_3, - external_id: identifier_3, - fingerprint: Gitlab::Database::ShaAttribute.serialize('8e91632f9c6671e951834a723ee221c44cc0d844'), - name: 'Identifier 3') - end - - let(:known_uuid_v4) { "b3cc2518-5446-4dea-871c-89d5e999c1ac" } - let(:known_uuid_v5) { "05377088-dc26-5161-920e-52a7159fdaa1" } - let(:desired_uuid_v5) { "f3e9a23f-9181-54bf-a5ab-c5bc7a9b881a" } - - subject { described_class.new.perform(start_id, end_id) } - - context "when finding has a UUIDv4" do - before do - @uuid_v4 = create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner2.id, - primary_identifier_id: vulnerability_identfier2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"), - uuid: known_uuid_v4 - ) - end - - let(:start_id) { @uuid_v4.id } - let(:end_id) { @uuid_v4.id } - - it "replaces it with UUIDv5" do - expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v4]) - - subject - - expect(vulnerability_findings.pluck(:uuid)).to match_array([desired_uuid_v5]) - end - - it 'logs recalculation' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:info).twice - end - - subject - end - end - - context "when finding has a UUIDv5" do - before do - @uuid_v5 = create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize("838574be0210968bf6b9f569df9c2576242cbf0a"), - uuid: known_uuid_v5 - ) - end - - let(:start_id) { @uuid_v5.id } - let(:end_id) { @uuid_v5.id } - - it "stays the same" do - expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5]) - - subject - - expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5]) - end - end - - context 'if a duplicate UUID would be generated' do # rubocop: disable RSpec/MultipleMemoizedHelpers - let(:v1) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:finding_with_incorrect_uuid) do - create_finding!( - vulnerability_id: v1.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e' - ) - end - - let(:v2) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:finding_with_correct_uuid) do - create_finding!( - vulnerability_id: v2.id, - project_id: project.id, - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '91984483-5efe-5215-b471-d524ac5792b1' - ) - end - - let(:v3) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:finding_with_incorrect_uuid2) do - create_finding!( - vulnerability_id: v3.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identfier2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '00000000-1111-2222-3333-444444444444' - ) - end - - let(:v4) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:finding_with_correct_uuid2) do - create_finding!( - vulnerability_id: v4.id, - project_id: project.id, - scanner_id: scanner2.id, - primary_identifier_id: vulnerability_identfier2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '1edd751e-ef9a-5391-94db-a832c8635bfc' - ) - end - - let!(:finding_with_incorrect_uuid3) do - create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier3.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '22222222-3333-4444-5555-666666666666' - ) - end - - let!(:duplicate_not_in_the_same_batch) do - create_finding!( - id: 99999, - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner2.id, - primary_identifier_id: vulnerability_identifier3.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '4564f9d5-3c6b-5cc3-af8c-7c25285362a7' - ) - end - - let(:start_id) { finding_with_incorrect_uuid.id } - let(:end_id) { finding_with_incorrect_uuid3.id } - - before do - 4.times do - create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid.id) - create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid.id) - create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid2.id) - create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid2.id) - end - end - - it 'drops duplicates and related records', :aggregate_failures do - expect(vulnerability_findings.pluck(:id)).to match_array( - [ - finding_with_correct_uuid.id, - finding_with_incorrect_uuid.id, - finding_with_correct_uuid2.id, - finding_with_incorrect_uuid2.id, - finding_with_incorrect_uuid3.id, - duplicate_not_in_the_same_batch.id - ]) - - expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8) - .and change(vulnerability_findings, :count).from(6).to(3) - .and change(vulnerabilities, :count).from(4).to(2) - - expect(vulnerability_findings.pluck(:id)).to match_array([finding_with_incorrect_uuid.id, finding_with_incorrect_uuid2.id, finding_with_incorrect_uuid3.id]) - end - - context 'if there are conflicting UUID values within the batch' do # rubocop: disable RSpec/MultipleMemoizedHelpers - let(:end_id) { finding_with_broken_data_integrity.id } - let(:vulnerability_5) { create_vulnerability!(project_id: project.id, author_id: user.id) } - let(:different_project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:identifier_with_broken_data_integrity) do - vulnerability_identifiers.create!( - project_id: different_project.id, - external_type: identifier_2, - external_id: identifier_2, - fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'), - name: 'Identifier 2') - end - - let(:finding_with_broken_data_integrity) do - create_finding!( - vulnerability_id: vulnerability_5, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier_with_broken_data_integrity.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: SecureRandom.uuid - ) - end - - it 'deletes the conflicting record' do - expect { subject }.to change { vulnerability_findings.find_by_id(finding_with_broken_data_integrity.id) }.to(nil) - end - end - - context 'if a conflicting UUID is found during the migration' do # rubocop:disable RSpec/MultipleMemoizedHelpers - let(:finding_class) { Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding } - let(:uuid) { '4564f9d5-3c6b-5cc3-af8c-7c25285362a7' } - - before do - exception = ActiveRecord::RecordNotUnique.new("(uuid)=(#{uuid})") - - call_count = 0 - allow(::Gitlab::Database::BulkUpdate).to receive(:execute) do - call_count += 1 - call_count.eql?(1) ? raise(exception) : {} - end - - allow(finding_class).to receive(:find_by).with(uuid: uuid).and_return(duplicate_not_in_the_same_batch) - end - - it 'retries the recalculation' do - subject - - expect(Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding) - .to have_received(:find_by).with(uuid: uuid).once - end - - it 'logs the conflict' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:info).exactly(6).times - end - - subject - end - - it 'marks the job as done' do - create_background_migration_job([start_id, end_id], :pending) - - subject - - expect(pending_jobs.count).to eq(0) - expect(succeeded_jobs.count).to eq(1) - end - end - - it 'logs an exception if a different uniquness problem was found' do - exception = ActiveRecord::RecordNotUnique.new("Totally not an UUID uniqueness problem") - allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(exception) - allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception) - - subject - - expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(exception).once - end - - it 'logs a duplicate found message' do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance| - expect(instance).to receive(:info).exactly(3).times - end - - subject - end - end - - context 'when finding has a signature' do - before do - @f1 = create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: 'd15d774d-e4b1-5a1b-929b-19f2a53e35ec' - ) - - vulnerability_finding_signatures.create!( - finding_id: @f1.id, - algorithm_type: 2, # location - signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis') - ) - - vulnerability_finding_signatures.create!( - finding_id: @f1.id, - algorithm_type: 1, # hash - signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis') - ) - - @f2 = create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identfier2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis') - uuid: '4be029b5-75e5-5ac0-81a2-50ab41726135' - ) - - vulnerability_finding_signatures.create!( - finding_id: @f2.id, - algorithm_type: 2, # location - signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis') - ) - - vulnerability_finding_signatures.create!( - finding_id: @f2.id, - algorithm_type: 1, # hash - signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis') - ) - end - - let(:start_id) { @f1.id } - let(:end_id) { @f2.id } - - let(:uuids_before) { [@f1.uuid, @f2.uuid] } - let(:uuids_after) { %w[d3b60ddd-d312-5606-b4d3-ad058eebeacb 349d9bec-c677-5530-a8ac-5e58889c3b1a] } - - it 'is recalculated using signature' do - expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_before) - - subject - - expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_after) - end - end - - context 'if all records are removed before the job ran' do - let(:start_id) { 1 } - let(:end_id) { 9 } - - before do - create_background_migration_job([start_id, end_id], :pending) - end - - it 'does not error out' do - expect { subject }.not_to raise_error - end - - it 'marks the job as done' do - subject - - expect(pending_jobs.count).to eq(0) - expect(succeeded_jobs.count).to eq(1) - end - end - - context 'when recalculation fails' do - before do - @uuid_v4 = create_finding!( - vulnerability_id: nil, - project_id: project.id, - scanner_id: scanner2.id, - primary_identifier_id: vulnerability_identfier2.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"), - uuid: known_uuid_v4 - ) - - allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception) - allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(expected_error) - end - - let(:start_id) { @uuid_v4.id } - let(:end_id) { @uuid_v4.id } - let(:expected_error) { RuntimeError.new } - - it 'captures the errors and does not crash entirely' do - expect { subject }.not_to raise_error - - allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception) - expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(expected_error).once - end - - it_behaves_like 'marks background migration job records' do - let(:arguments) { [1, 4] } - subject { described_class.new } - end - end - - it_behaves_like 'marks background migration job records' do - let(:arguments) { [1, 4] } - subject { described_class.new } - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerability_findings.create!({ - id: id, - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - }.compact - ) - end - # rubocop:enable Metrics/ParameterLists - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0, - user_type: user_type, - confirmed_at: confirmed_at - ) - end - - def create_finding_pipeline!(project_id:, finding_id:) - pipeline = table(:ci_pipelines).create!(project_id: project_id) - vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id) - end -end diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb deleted file mode 100644 index eabc012f98b..00000000000 --- a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration, - :suppress_gitlab_schemas_validate_connection, schema: 20220131000001 do - subject(:perform) { migration.perform(1, 99) } - - let(:migration) { described_class.new } - - let(:trace_in_range) { create_trace!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) } - let(:trace_outside_range) { create_trace!(id: 40, created_at: Date.new(2020, 06, 22), expire_at: Date.new(2021, 01, 22)) } - let(:trace_without_expiry) { create_trace!(id: 30, created_at: Date.new(2020, 06, 21), expire_at: nil) } - let(:archive_in_range) { create_archive!(id: 10, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) } - let(:trace_outside_id_range) { create_trace!(id: 100, created_at: Date.new(2020, 06, 20), expire_at: Date.new(2021, 01, 22)) } - - before do - table(:namespaces).create!(id: 1, name: 'the-namespace', path: 'the-path') - table(:projects).create!(id: 1, name: 'the-project', namespace_id: 1) - table(:ci_builds).create!(id: 1, allow_failure: false) - end - - context 'for self-hosted instances' do - it 'sets expire_at for artifacts in range to nil' do - expect { perform }.not_to change { trace_in_range.reload.expire_at } - end - - it 'does not change expire_at timestamps that are not set to midnight' do - expect { perform }.not_to change { trace_outside_range.reload.expire_at } - end - - it 'does not change expire_at timestamps that are set to midnight on a day other than the 22nd' do - expect { perform }.not_to change { trace_without_expiry.reload.expire_at } - end - - it 'does not touch artifacts outside id range' do - expect { perform }.not_to change { archive_in_range.reload.expire_at } - end - - it 'does not touch artifacts outside date range' do - expect { perform }.not_to change { trace_outside_id_range.reload.expire_at } - end - end - - private - - def create_trace!(**args) - table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 3) - end - - def create_archive!(**args) - table(:ci_job_artifacts).create!(**args, project_id: 1, job_id: 1, file_type: 1) - end -end diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb deleted file mode 100644 index ed08ae22245..00000000000 --- a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb +++ /dev/null @@ -1,171 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindings, :migration, schema: 20220326161803 do - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:users) { table(:users) } - let(:user) { create_user! } - let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } - let(:scanners) { table(:vulnerability_scanners) } - let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } - let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerability_findings) { table(:vulnerability_occurrences) } - let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - let(:vulnerability_identifier) do - vulnerability_identifiers.create!( - id: 1244459, - project_id: project.id, - external_type: 'vulnerability-identifier', - external_id: 'vulnerability-identifier', - fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', - name: 'vulnerability identifier') - end - - let!(:vulnerability_for_first_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:first_finding_duplicate) do - create_finding!( - id: 5606961, - uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", - vulnerability_id: vulnerability_for_first_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner1.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_second_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:second_finding_duplicate) do - create_finding!( - id: 8765432, - uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", - vulnerability_id: vulnerability_for_second_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner2.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_third_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:third_finding_duplicate) do - create_finding!( - id: 8832995, - uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", - vulnerability_id: vulnerability_for_third_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner3.id, - project_id: project.id - ) - end - - let!(:unrelated_finding) do - create_finding!( - id: 9999999, - uuid: Gitlab::UUID.v5(SecureRandom.hex), - vulnerability_id: nil, - report_type: 1, - location_fingerprint: 'random_location_fingerprint', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: unrelated_scanner.id, - project_id: project.id - ) - end - - subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) } - - before do - stub_const("#{described_class}::DELETE_BATCH_SIZE", 1) - end - - it "removes entries which would result in duplicate UUIDv5" do - expect(vulnerability_findings.count).to eq(4) - - expect { subject }.to change { vulnerability_findings.count }.from(4).to(2) - - expect(vulnerability_findings.pluck(:id)).to match_array([third_finding_duplicate.id, unrelated_finding.id]) - end - - it "removes vulnerabilites without findings" do - expect(vulnerabilities.count).to eq(3) - - expect { subject }.to change { vulnerabilities.count }.from(3).to(1) - - expect(vulnerabilities.pluck(:id)).to match_array([vulnerability_for_third_duplicate.id]) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - params = { - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: vulnerability_identifier.id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - } - params[:id] = id unless id.nil? - vulnerability_findings.create!(params) - end - # rubocop:enable Metrics/ParameterLists - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0, - user_type: user_type, - confirmed_at: confirmed_at - ) - end -end diff --git a/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb new file mode 100644 index 00000000000..c45c402ab9d --- /dev/null +++ b/spec/lib/gitlab/background_migration/remove_project_group_link_with_missing_groups_spec.rb @@ -0,0 +1,124 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::RemoveProjectGroupLinkWithMissingGroups, :migration, + feature_category: :subgroups, schema: 20230206172702 do + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + let(:project_group_links) { table(:project_group_links) } + + let!(:group) do + namespaces.create!( + name: 'Group0', type: 'Group', path: 'space0' + ) + end + + let!(:group_1) do + namespaces.create!( + name: 'Group1', type: 'Group', path: 'space1' + ) + end + + let!(:group_2) do + namespaces.create!( + name: 'Group2', type: 'Group', path: 'space2' + ) + end + + let!(:group_3) do + namespaces.create!( + name: 'Group3', type: 'Group', path: 'space3' + ) + end + + let!(:project_namespace_1) do + namespaces.create!( + name: 'project_1', path: 'project_1', type: 'Project' + ) + end + + let!(:project_namespace_2) do + namespaces.create!( + name: 'project_2', path: 'project_2', type: 'Project' + ) + end + + let!(:project_namespace_3) do + namespaces.create!( + name: 'project_3', path: 'project_3', type: 'Project' + ) + end + + let!(:project_1) do + projects.create!( + name: 'project_1', path: 'project_1', namespace_id: group.id, project_namespace_id: project_namespace_1.id + ) + end + + let!(:project_2) do + projects.create!( + name: 'project_2', path: 'project_2', namespace_id: group.id, project_namespace_id: project_namespace_2.id + ) + end + + let!(:project_3) do + projects.create!( + name: 'project_3', path: 'project_3', namespace_id: group.id, project_namespace_id: project_namespace_3.id + ) + end + + let!(:project_group_link_1) do + project_group_links.create!( + project_id: project_1.id, group_id: group_1.id, group_access: Gitlab::Access::DEVELOPER + ) + end + + let!(:project_group_link_2) do + project_group_links.create!( + project_id: project_2.id, group_id: group_2.id, group_access: Gitlab::Access::DEVELOPER + ) + end + + let!(:project_group_link_3) do + project_group_links.create!( + project_id: project_3.id, group_id: group_3.id, group_access: Gitlab::Access::DEVELOPER + ) + end + + let!(:project_group_link_4) do + project_group_links.create!( + project_id: project_3.id, group_id: group_2.id, group_access: Gitlab::Access::DEVELOPER + ) + end + + subject do + described_class.new( + start_id: project_group_link_1.id, + end_id: project_group_link_4.id, + batch_table: :project_group_links, + batch_column: :id, + sub_batch_size: 1, + pause_ms: 0, + connection: ApplicationRecord.connection + ).perform + end + + it 'removes the `project_group_links` records whose associated group does not exist anymore' do + group_2.delete + + # Schema is fixed to `20230206172702` on this spec. + # This expectation is needed to make sure that the orphaned records are indeed deleted via the migration + # and not via the foreign_key relationship introduced after `20230206172702`, in `20230207002330` + expect(project_group_links.count).to eq(4) + + expect { subject } + .to change { project_group_links.count }.from(4).to(2) + .and change { + project_group_links.where(project_id: project_2.id, group_id: group_2.id).present? + }.from(true).to(false) + .and change { + project_group_links.where(project_id: project_3.id, group_id: group_2.id).present? + }.from(true).to(false) + end +end diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb index 918df8f4442..da14381aae2 100644 --- a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb +++ b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20211104165220 do +RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :migration, schema: 20220314184009 do let(:vulnerability_findings) { table(:vulnerability_occurrences) } let(:finding_links) { table(:vulnerability_finding_links) } diff --git a/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb deleted file mode 100644 index 841a7f306d7..00000000000 --- a/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUsers, schema: 20211012134316 do - let(:migration) { described_class.new } - - describe '#perform' do - it 'processes the background migration' do - spy = instance_spy( - Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers - ) - - allow(Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers) - .to receive(:new) - .and_return(spy) - - expect(spy).to receive(:perform).with(1, 4) - expect(migration).to receive(:schedule_next_job) - - migration.perform(1, 4) - end - end - - describe '#schedule_next_job' do - it 'schedules the next job in ascending order' do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - arguments: [10, 20] - ) - - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - arguments: [40, 50] - ) - - expect(BackgroundMigrationWorker) - .to receive(:perform_in) - .with(5.minutes, 'StealMigrateMergeRequestDiffCommitUsers', [10, 20]) - - migration.schedule_next_job - end - - it 'does not schedule any new jobs when there are none' do - expect(BackgroundMigrationWorker).not_to receive(:perform_in) - - migration.schedule_next_job - end - end -end diff --git a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb deleted file mode 100644 index 908f11aabc3..00000000000 --- a/spec/lib/gitlab/background_migration/update_timelogs_null_spent_at_spec.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsNullSpentAt, schema: 20211215090620 do - let!(:previous_time) { 10.days.ago } - let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:issue) { table(:issues).create!(project_id: project.id) } - let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') } - let!(:timelog1) { create_timelog!(issue_id: issue.id) } - let!(:timelog2) { create_timelog!(merge_request_id: merge_request.id) } - let!(:timelog3) { create_timelog!(issue_id: issue.id, spent_at: previous_time) } - let!(:timelog4) { create_timelog!(merge_request_id: merge_request.id, spent_at: previous_time) } - - subject(:background_migration) { described_class.new } - - before do - table(:timelogs).where.not(id: [timelog3.id, timelog4.id]).update_all(spent_at: nil) - end - - describe '#perform' do - it 'sets correct spent_at' do - background_migration.perform(timelog1.id, timelog4.id) - - expect(timelog1.reload.spent_at).to be_like_time(timelog1.created_at) - expect(timelog2.reload.spent_at).to be_like_time(timelog2.created_at) - expect(timelog3.reload.spent_at).to be_like_time(previous_time) - expect(timelog4.reload.spent_at).to be_like_time(previous_time) - expect(timelog3.reload.spent_at).not_to be_like_time(timelog3.created_at) - expect(timelog4.reload.spent_at).not_to be_like_time(timelog4.created_at) - end - end - - private - - def create_timelog!(**args) - table(:timelogs).create!(**args, time_spent: 1) - end -end diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb deleted file mode 100644 index b8c3bf8f3ac..00000000000 --- a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210826171758 do - let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } - let!(:project1) { table(:projects).create!(namespace_id: namespace.id) } - let!(:project2) { table(:projects).create!(namespace_id: namespace.id) } - let!(:issue1) { table(:issues).create!(project_id: project1.id) } - let!(:issue2) { table(:issues).create!(project_id: project2.id) } - let!(:merge_request1) { table(:merge_requests).create!(target_project_id: project1.id, source_branch: 'master', target_branch: 'feature') } - let!(:merge_request2) { table(:merge_requests).create!(target_project_id: project2.id, source_branch: 'master', target_branch: 'feature') } - let!(:timelog1) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) } - let!(:timelog2) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) } - let!(:timelog3) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60) } - let!(:timelog4) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) } - let!(:timelog5) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) } - let!(:timelog6) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600) } - let!(:timelog7) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60, project_id: project1.id) } - let!(:timelog8) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600, project_id: project1.id) } - - describe '#perform' do - context 'when timelogs belong to issues' do - it 'sets correct project_id' do - subject.perform(timelog1.id, timelog3.id) - - expect(timelog1.reload.project_id).to eq(issue1.project_id) - expect(timelog2.reload.project_id).to eq(issue1.project_id) - expect(timelog3.reload.project_id).to eq(issue2.project_id) - end - end - - context 'when timelogs belong to merge requests' do - it 'sets correct project ids' do - subject.perform(timelog4.id, timelog6.id) - - expect(timelog4.reload.project_id).to eq(merge_request1.target_project_id) - expect(timelog5.reload.project_id).to eq(merge_request1.target_project_id) - expect(timelog6.reload.project_id).to eq(merge_request2.target_project_id) - end - end - - context 'when timelogs already belong to projects' do - it 'does not update the project id' do - subject.perform(timelog7.id, timelog8.id) - - expect(timelog7.reload.project_id).to eq(project1.id) - expect(timelog8.reload.project_id).to eq(project1.id) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb deleted file mode 100644 index f16ae489b78..00000000000 --- a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb +++ /dev/null @@ -1,84 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210826171758 do - include MigrationHelpers::NamespacesHelpers - - let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true) } - let(:group_with_2fa_child) { create_namespace('child', Gitlab::VisibilityLevel::PRIVATE, parent_id: group_with_2fa_parent.id) } - let(:members_table) { table(:members) } - let(:users_table) { table(:users) } - - subject { described_class.new } - - describe '#perform' do - context 'with group members' do - let(:user_1) { create_user('user@example.com') } - let!(:member) { create_group_member(user_1, group_with_2fa_parent) } - let!(:user_without_group) { create_user('user_without@example.com') } - let(:user_other) { create_user('user_other@example.com') } - let!(:member_other) { create_group_member(user_other, group_with_2fa_parent) } - - it 'updates user when user should be required to establish two factor authentication' do - subject.perform(user_1.id, user_without_group.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'does not update user who is not in current batch' do - subject.perform(user_1.id, user_without_group.id) - - expect(user_other.reload.require_two_factor_authentication_from_group).to eq(false) - end - - it 'updates all users in current batch' do - subject.perform(user_1.id, user_other.id) - - expect(user_other.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'updates user when user is member of group in which parent group requires two factor authentication' do - member.destroy! - - subgroup = create_namespace('subgroup', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: false, parent_id: group_with_2fa_child.id) - create_group_member(user_1, subgroup) - - subject.perform(user_1.id, user_other.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'updates user when user is member of a group and the subgroup requires two factor authentication' do - member.destroy! - - parent = create_namespace('other_parent', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: false) - create_namespace('other_subgroup', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true, parent_id: parent.id) - create_group_member(user_1, parent) - - subject.perform(user_1.id, user_other.id) - - expect(user_1.reload.require_two_factor_authentication_from_group).to eq(true) - end - - it 'does not update user when not a member of a group that requires two factor authentication' do - member_other.destroy! - - other_group = create_namespace('other_group', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: false) - create_group_member(user_other, other_group) - - subject.perform(user_1.id, user_other.id) - - expect(user_other.reload.require_two_factor_authentication_from_group).to eq(false) - end - end - end - - def create_user(email, require_2fa: false) - users_table.create!(email: email, projects_limit: 10, require_two_factor_authentication_from_group: require_2fa) - end - - def create_group_member(user, group) - members_table.create!(user_id: user.id, source_id: group.id, access_level: GroupMember::MAINTAINER, source_type: "Namespace", type: "GroupMember", notification_level: 3) - end -end diff --git a/spec/lib/gitlab/bullet/exclusions_spec.rb b/spec/lib/gitlab/bullet/exclusions_spec.rb index 325b0167f58..ccedfee28c7 100644 --- a/spec/lib/gitlab/bullet/exclusions_spec.rb +++ b/spec/lib/gitlab/bullet/exclusions_spec.rb @@ -3,7 +3,7 @@ require 'fast_spec_helper' require 'tempfile' -RSpec.describe Gitlab::Bullet::Exclusions do +RSpec.describe Gitlab::Bullet::Exclusions, feature_category: :application_performance do let(:config_file) do file = Tempfile.new('bullet.yml') File.basename(file) @@ -78,6 +78,19 @@ RSpec.describe Gitlab::Bullet::Exclusions do expect(described_class.new('_some_bogus_file_').execute).to match([]) end end + + context 'with a Symbol' do + let(:exclude) { [] } + let(:config) { { exclusions: { abc: { exclude: exclude } } } } + + before do + File.write(config_file, YAML.dump(config)) + end + + it 'raises an exception' do + expect { executor }.to raise_error(Psych::DisallowedClass) + end + end end describe '#validate_paths!' do diff --git a/spec/lib/gitlab/cache/client_spec.rb b/spec/lib/gitlab/cache/client_spec.rb index ec22fcdee7e..638fed1a905 100644 --- a/spec/lib/gitlab/cache/client_spec.rb +++ b/spec/lib/gitlab/cache/client_spec.rb @@ -8,14 +8,12 @@ RSpec.describe Gitlab::Cache::Client, feature_category: :source_code_management let(:backend) { Rails.cache } let(:metadata) do Gitlab::Cache::Metadata.new( - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource ) end - let(:caller_id) { 'caller-id' } let(:cache_identifier) { 'MyClass#cache' } let(:feature_category) { :source_code_management } let(:backing_resource) { :cpu } @@ -32,7 +30,6 @@ RSpec.describe Gitlab::Cache::Client, feature_category: :source_code_management describe '.build_with_metadata' do it 'builds a cache client with metrics support' do attributes = { - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource diff --git a/spec/lib/gitlab/cache/metadata_spec.rb b/spec/lib/gitlab/cache/metadata_spec.rb index 2e8af7a9c44..d2b79fb8b08 100644 --- a/spec/lib/gitlab/cache/metadata_spec.rb +++ b/spec/lib/gitlab/cache/metadata_spec.rb @@ -5,24 +5,18 @@ require 'spec_helper' RSpec.describe Gitlab::Cache::Metadata, feature_category: :source_code_management do subject(:attributes) do described_class.new( - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource ) end - let(:caller_id) { 'caller-id' } let(:cache_identifier) { 'ApplicationController#show' } let(:feature_category) { :source_code_management } let(:backing_resource) { :unknown } describe '#initialize' do context 'when optional arguments are not set' do - before do - Gitlab::ApplicationContext.push(caller_id: 'context-id') - end - it 'sets default value for them' do attributes = described_class.new( cache_identifier: cache_identifier, @@ -30,7 +24,6 @@ RSpec.describe Gitlab::Cache::Metadata, feature_category: :source_code_managemen ) expect(attributes.backing_resource).to eq(:unknown) - expect(attributes.caller_id).to eq('context-id') end end @@ -68,12 +61,6 @@ RSpec.describe Gitlab::Cache::Metadata, feature_category: :source_code_managemen end end - describe '#caller_id' do - subject { attributes.caller_id } - - it { is_expected.to eq caller_id } - end - describe '#cache_identifier' do subject { attributes.cache_identifier } diff --git a/spec/lib/gitlab/cache/metrics_spec.rb b/spec/lib/gitlab/cache/metrics_spec.rb index 24b274f4209..76ec0dbfa0b 100644 --- a/spec/lib/gitlab/cache/metrics_spec.rb +++ b/spec/lib/gitlab/cache/metrics_spec.rb @@ -7,14 +7,12 @@ RSpec.describe Gitlab::Cache::Metrics do let(:metadata) do Gitlab::Cache::Metadata.new( - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource ) end - let(:caller_id) { 'caller-id' } let(:cache_identifier) { 'ApplicationController#show' } let(:feature_category) { :source_code_management } let(:backing_resource) { :unknown } @@ -37,7 +35,6 @@ RSpec.describe Gitlab::Cache::Metrics do .to receive(:increment) .with( { - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource, @@ -57,7 +54,6 @@ RSpec.describe Gitlab::Cache::Metrics do .to receive(:increment) .with( { - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource, @@ -86,7 +82,6 @@ RSpec.describe Gitlab::Cache::Metrics do :redis_cache_generation_duration_seconds, 'Duration of Redis cache generation', { - caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, backing_resource: backing_resource diff --git a/spec/lib/gitlab/ci/ansi2json/signed_state_spec.rb b/spec/lib/gitlab/ci/ansi2json/signed_state_spec.rb new file mode 100644 index 00000000000..671efdf5256 --- /dev/null +++ b/spec/lib/gitlab/ci/ansi2json/signed_state_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Ansi2json::SignedState, feature_category: :continuous_integration do + def build_state(state_class) + state_class.new('', 1000).tap do |state| + state.offset = 1 + state.new_line!(style: { fg: 'some-fg', bg: 'some-bg', mask: 1234 }) + state.set_last_line_offset + state.open_section('hello', 111, {}) + end + end + + let(:state) { build_state(described_class) } + + describe '#initialize' do + it 'restores valid prior state', :aggregate_failures do + new_state = described_class.new(state.encode, 1000) + + expect(new_state.offset).to eq(1) + expect(new_state.inherited_style).to eq({ + bg: 'some-bg', + fg: 'some-fg', + mask: 1234 + }) + expect(new_state.open_sections).to eq({ 'hello' => 111 }) + end + + it 'ignores unsigned prior state', :aggregate_failures do + unsigned = build_state(Gitlab::Ci::Ansi2json::State).encode + expect(::Gitlab::AppLogger).to( + receive(:warn).with( + message: a_string_matching(/signature missing or invalid/), + invalid_state: unsigned + ) + ) + + new_state = described_class.new(unsigned, 0) + + expect(new_state.offset).to eq(0) + expect(new_state.inherited_style).to eq({}) + expect(new_state.open_sections).to eq({}) + end + + it 'ignores bad input', :aggregate_failures do + expect(::Gitlab::AppLogger).to( + receive(:warn).with( + message: a_string_matching(/signature missing or invalid/), + invalid_state: 'abcd' + ) + ) + + new_state = described_class.new('abcd', 0) + + expect(new_state.offset).to eq(0) + expect(new_state.inherited_style).to eq({}) + expect(new_state.open_sections).to eq({}) + end + end + + describe '#encode' do + it 'deterministically signs the state' do + expect(state.encode).to eq state.encode + end + end +end diff --git a/spec/lib/gitlab/ci/ansi2json/state_spec.rb b/spec/lib/gitlab/ci/ansi2json/state_spec.rb new file mode 100644 index 00000000000..9b14231f1be --- /dev/null +++ b/spec/lib/gitlab/ci/ansi2json/state_spec.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Ansi2json::State, feature_category: :continuous_integration do + def build_state(state_class) + state_class.new('', 1000).tap do |state| + state.offset = 1 + state.new_line!(style: { fg: 'some-fg', bg: 'some-bg', mask: 1234 }) + state.set_last_line_offset + state.open_section('hello', 111, {}) + end + end + + let(:state) { build_state(described_class) } + + describe '#initialize' do + it 'restores valid prior state', :aggregate_failures do + new_state = described_class.new(state.encode, 1000) + + expect(new_state.offset).to eq(1) + expect(new_state.inherited_style).to eq({ + bg: 'some-bg', + fg: 'some-fg', + mask: 1234 + }) + expect(new_state.open_sections).to eq({ 'hello' => 111 }) + end + + it 'ignores signed state' do + signed_state = Gitlab::Ci::Ansi2json::SignedState.new('', 1000) + signed_state.offset = 1 + signed_state.new_line!(style: { fg: 'some-fg', bg: 'some-bg', mask: 1234 }) + signed_state.set_last_line_offset + signed_state.open_section('hello', 111, {}) + + encoded = signed_state.encode + expect(::Gitlab::AppLogger).to( + receive(:warn).with( + message: a_string_matching(/decode error/), + invalid_state: encoded, + error: an_instance_of(JSON::ParserError) + ) + ) + new_state = described_class.new(encoded, 1000) + expect(new_state.offset).to eq(0) + expect(new_state.inherited_style).to eq({}) + expect(new_state.open_sections).to eq({}) + end + + it 'ignores invalid Base64 and logs a warning', :aggregate_failures do + expect(::Gitlab::AppLogger).to( + receive(:warn).with( + message: a_string_matching(/decode error/), + invalid_state: '.', + error: an_instance_of(ArgumentError) + ) + ) + + new_state = described_class.new('.', 0) + + expect(new_state.offset).to eq(0) + expect(new_state.inherited_style).to eq({}) + expect(new_state.open_sections).to eq({}) + end + + it 'ignores invalid JSON and logs a warning', :aggregate_failures do + encoded = Base64.urlsafe_encode64('.') + expect(::Gitlab::AppLogger).to( + receive(:warn).with( + message: a_string_matching(/decode error/), + invalid_state: encoded, + error: an_instance_of(JSON::ParserError) + ) + ) + + new_state = described_class.new(encoded, 0) + expect(new_state.offset).to eq(0) + expect(new_state.inherited_style).to eq({}) + expect(new_state.open_sections).to eq({}) + end + end +end diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb index 0f8f3759834..12eeb8f6cac 100644 --- a/spec/lib/gitlab/ci/ansi2json_spec.rb +++ b/spec/lib/gitlab/ci/ansi2json_spec.rb @@ -2,10 +2,26 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Ansi2json do +RSpec.describe Gitlab::Ci::Ansi2json, feature_category: :continuous_integration do subject { described_class } describe 'lines' do + describe 'verify_state' do + it 'uses SignedState when true' do + expect(Gitlab::Ci::Ansi2json::State).not_to receive(:new) + expect(Gitlab::Ci::Ansi2json::SignedState).to receive(:new).and_call_original + + described_class.convert(StringIO.new('data'), verify_state: true) + end + + it 'uses State when false' do + expect(Gitlab::Ci::Ansi2json::State).to receive(:new).and_call_original + expect(Gitlab::Ci::Ansi2json::SignedState).not_to receive(:new) + + described_class.convert(StringIO.new('data'), verify_state: false) + end + end + it 'prints non-ansi as-is' do expect(convert_json('Hello')).to eq([{ offset: 0, content: [{ text: 'Hello' }] }]) end diff --git a/spec/lib/gitlab/ci/build/cache_spec.rb b/spec/lib/gitlab/ci/build/cache_spec.rb index a8fa14b4b4c..bfb8fb7f21c 100644 --- a/spec/lib/gitlab/ci/build/cache_spec.rb +++ b/spec/lib/gitlab/ci/build/cache_spec.rb @@ -3,16 +3,21 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Build::Cache do + let(:cache_config) { [] } + let(:pipeline) { double(::Ci::Pipeline) } + let(:cache_seed_a) { double(Gitlab::Ci::Pipeline::Seed::Build::Cache) } + let(:cache_seed_b) { double(Gitlab::Ci::Pipeline::Seed::Build::Cache) } + + subject(:cache) { described_class.new(cache_config, pipeline) } + describe '.initialize' do context 'when the cache is an array' do + let(:cache_config) { [{ key: 'key-a' }, { key: 'key-b' }] } + it 'instantiates an array of cache seeds' do - cache_config = [{ key: 'key-a' }, { key: 'key-b' }] - pipeline = double(::Ci::Pipeline) - cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache) - cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache) allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b) - cache = described_class.new(cache_config, pipeline) + cache expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' }, 0) expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' }, 1) @@ -21,16 +26,49 @@ RSpec.describe Gitlab::Ci::Build::Cache do end context 'when the cache is a hash' do + let(:cache_config) { { key: 'key-a' } } + it 'instantiates a cache seed' do - cache_config = { key: 'key-a' } - pipeline = double(::Ci::Pipeline) - cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache) - allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed) + allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a) - cache = described_class.new(cache_config, pipeline) + cache expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config, 0) - expect(cache.instance_variable_get(:@cache)).to eq([cache_seed]) + expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a]) + end + end + + context 'when the cache is an array with files inside hashes' do + let(:cache_config) { [{ key: { files: ['file1.json'] } }, { key: { files: ['file1.json', 'file2.json'] } }] } + + context 'with ci_fix_for_runner_cache_prefix disabled' do + before do + stub_feature_flags(ci_fix_for_runner_cache_prefix: false) + end + + it 'instantiates a cache seed' do + allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b) + + cache + + expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new) + .with(pipeline, cache_config.first, 0) + expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new) + .with(pipeline, cache_config.second, 1) + expect(cache.instance_variable_get(:@cache)).to match_array([cache_seed_a, cache_seed_b]) + end + end + + it 'instantiates a cache seed' do + allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b) + + cache + + expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new) + .with(pipeline, cache_config.first, '0_file1') + expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new) + .with(pipeline, cache_config.second, '1_file1_file2') + expect(cache.instance_variable_get(:@cache)).to match_array([cache_seed_a, cache_seed_b]) end end end @@ -38,10 +76,6 @@ RSpec.describe Gitlab::Ci::Build::Cache do describe '#cache_attributes' do context 'when there are no caches' do it 'returns an empty hash' do - cache_config = [] - pipeline = double(::Ci::Pipeline) - cache = described_class.new(cache_config, pipeline) - attributes = cache.cache_attributes expect(attributes).to eq({}) @@ -51,7 +85,6 @@ RSpec.describe Gitlab::Ci::Build::Cache do context 'when there are caches' do it 'returns the structured attributes for the caches' do cache_config = [{ key: 'key-a' }, { key: 'key-b' }] - pipeline = double(::Ci::Pipeline) cache = described_class.new(cache_config, pipeline) attributes = cache.cache_attributes diff --git a/spec/lib/gitlab/ci/build/context/build_spec.rb b/spec/lib/gitlab/ci/build/context/build_spec.rb index 4fdeffb033a..d4a2af0015f 100644 --- a/spec/lib/gitlab/ci/build/context/build_spec.rb +++ b/spec/lib/gitlab/ci/build/context/build_spec.rb @@ -13,14 +13,29 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co it { is_expected.to include('CI_PIPELINE_IID' => pipeline.iid.to_s) } it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } it { is_expected.to include('CI_JOB_NAME' => 'some-job') } - it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') } + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') } + end context 'without passed build-specific attributes' do let(:context) { described_class.new(pipeline) } - it { is_expected.to include('CI_JOB_NAME' => nil) } - it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') } - it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } + it { is_expected.to include('CI_JOB_NAME' => nil) } + it { is_expected.to include('CI_COMMIT_REF_NAME' => 'master') } + it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it { is_expected.to include('CI_BUILD_REF_NAME' => 'master') } + end end context 'when environment:name is provided' do diff --git a/spec/lib/gitlab/ci/build/context/global_spec.rb b/spec/lib/gitlab/ci/build/context/global_spec.rb index d4141eb8389..328b5eb62fa 100644 --- a/spec/lib/gitlab/ci/build/context/global_spec.rb +++ b/spec/lib/gitlab/ci/build/context/global_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Build::Context::Global do +RSpec.describe Gitlab::Ci::Build::Context::Global, feature_category: :pipeline_composition do let(:pipeline) { create(:ci_pipeline) } let(:yaml_variables) { {} } @@ -14,7 +14,14 @@ RSpec.describe Gitlab::Ci::Build::Context::Global do it { is_expected.to include('CI_PROJECT_PATH' => pipeline.project.full_path) } it { is_expected.not_to have_key('CI_JOB_NAME') } - it { is_expected.not_to have_key('CI_BUILD_REF_NAME') } + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it { is_expected.not_to have_key('CI_BUILD_REF_NAME') } + end context 'with passed yaml variables' do let(:yaml_variables) { [{ key: 'SUPPORTED', value: 'parsed', public: true }] } diff --git a/spec/lib/gitlab/ci/components/header_spec.rb b/spec/lib/gitlab/ci/components/header_spec.rb deleted file mode 100644 index b1af4ca9238..00000000000 --- a/spec/lib/gitlab/ci/components/header_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' - -RSpec.describe Gitlab::Ci::Components::Header, feature_category: :pipeline_composition do - subject { described_class.new(spec) } - - context 'when spec is valid' do - let(:spec) do - { - spec: { - inputs: { - website: nil, - run: { - options: %w[opt1 opt2] - } - } - } - } - end - - it 'fabricates a spec from valid data' do - expect(subject).not_to be_empty - end - - describe '#inputs' do - it 'fabricates input data' do - input = subject.inputs({ website: 'https//gitlab.com', run: 'opt1' }) - - expect(input.count).to eq 2 - end - end - - describe '#context' do - it 'fabricates interpolation context' do - ctx = subject.context({ website: 'https//gitlab.com', run: 'opt1' }) - - expect(ctx).to be_valid - end - end - end - - context 'when spec is empty' do - let(:spec) { { spec: {} } } - - it 'returns an empty header' do - expect(subject).to be_empty - end - end -end diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb index fbe5e0b9d42..e037c37c817 100644 --- a/spec/lib/gitlab/ci/components/instance_path_spec.rb +++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb @@ -98,6 +98,37 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline end end + context 'when version is `~latest`' do + let(:version) { '~latest' } + + context 'when project is a catalog resource' do + before do + create(:catalog_resource, project: existing_project) + end + + context 'when project has releases' do + let_it_be(:releases) do + [ + create(:release, project: existing_project, sha: 'sha-1', released_at: Time.zone.now - 1.day), + create(:release, project: existing_project, sha: 'sha-2', released_at: Time.zone.now) + ] + end + + it 'returns the sha of the latest release' do + expect(path.sha).to eq(releases.last.sha) + end + end + + context 'when project does not have releases' do + it { expect(path.sha).to be_nil } + end + end + + context 'when project is not a catalog resource' do + it { expect(path.sha).to be_nil } + end + end + context 'when project does not exist' do let(:project_path) { 'non-existent/project' } diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb index c8b4a8b8a0e..39a88fc7721 100644 --- a/spec/lib/gitlab/ci/config/entry/job_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb @@ -595,6 +595,39 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_compo end end end + + context 'when job is not a pages job' do + let(:name) { :rspec } + + context 'if the config contains a publish entry' do + let(:entry) { described_class.new({ script: 'echo', publish: 'foo' }, name: name) } + + it 'is invalid' do + expect(entry).not_to be_valid + expect(entry.errors).to include /job publish can only be used within a `pages` job/ + end + end + end + + context 'when job is a pages job' do + let(:name) { :pages } + + context 'when it does not have a publish entry' do + let(:entry) { described_class.new({ script: 'echo' }, name: name) } + + it 'is valid' do + expect(entry).to be_valid + end + end + + context 'when it has a publish entry' do + let(:entry) { described_class.new({ script: 'echo', publish: 'foo' }, name: name) } + + it 'is valid' do + expect(entry).to be_valid + end + end + end end describe '#relevant?' do diff --git a/spec/lib/gitlab/ci/config/entry/publish_spec.rb b/spec/lib/gitlab/ci/config/entry/publish_spec.rb new file mode 100644 index 00000000000..53ad868a05e --- /dev/null +++ b/spec/lib/gitlab/ci/config/entry/publish_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Config::Entry::Publish, feature_category: :pages do + let(:publish) { described_class.new(config) } + + describe 'validations' do + context 'when publish config value is correct' do + let(:config) { 'dist/static' } + + describe '#config' do + it 'returns the publish directory' do + expect(publish.config).to eq config + end + end + + describe '#valid?' do + it 'is valid' do + expect(publish).to be_valid + end + end + end + + context 'when the value has a wrong type' do + let(:config) { { test: true } } + + it 'reports an error' do + expect(publish.errors) + .to include 'publish config should be a string' + end + end + end + + describe '.default' do + it 'returns the default value' do + expect(described_class.default).to eq 'public' + end + end +end diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb index 52b8dcbcb44..ea1e42de901 100644 --- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb @@ -4,9 +4,11 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: :pipeline_composition do let(:parent_pipeline) { create(:ci_pipeline) } + let(:project) { parent_pipeline.project } let(:variables) {} let(:context) do - Gitlab::Ci::Config::External::Context.new(variables: variables, parent_pipeline: parent_pipeline) + Gitlab::Ci::Config::External::Context + .new(variables: variables, parent_pipeline: parent_pipeline, project: project) end let(:external_file) { described_class.new(params, context) } @@ -43,7 +45,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: : end describe 'when used in non child pipeline context' do - let(:parent_pipeline) { nil } + let(:context) { Gitlab::Ci::Config::External::Context.new } let(:params) { { artifact: 'generated.yml' } } let(:expected_error) do @@ -201,7 +203,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: : it { is_expected.to eq( - context_project: nil, + context_project: project.full_path, context_sha: nil, type: :artifact, location: 'generated.yml', @@ -218,7 +220,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: : it { is_expected.to eq( - context_project: nil, + context_project: project.full_path, context_sha: nil, type: :artifact, location: 'generated.yml', @@ -227,4 +229,35 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: : } end end + + describe '#to_hash' do + context 'when interpolation is being used' do + let!(:job) { create(:ci_build, name: 'generator', pipeline: parent_pipeline) } + let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) } + let!(:metadata) { create(:ci_job_artifact, :metadata, job: job) } + + before do + allow_next_instance_of(Gitlab::Ci::ArtifactFileReader) do |reader| + allow(reader).to receive(:read).and_return(template) + end + end + + let(:template) do + <<~YAML + spec: + inputs: + env: + --- + deploy: + script: deploy $[[ inputs.env ]] + YAML + end + + let(:params) { { artifact: 'generated.yml', job: 'generator', with: { env: 'production' } } } + + it 'correctly interpolates content' do + expect(external_file.to_hash).to eq({ deploy: { script: 'deploy production' } }) + end + end + end end diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb index 959dcdf31af..1c5918f77ca 100644 --- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb @@ -3,14 +3,15 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipeline_composition do + let_it_be(:project) { create(:project) } let(:variables) {} - let(:context_params) { { sha: 'HEAD', variables: variables } } - let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) } + let(:context_params) { { sha: 'HEAD', variables: variables, project: project } } + let(:ctx) { Gitlab::Ci::Config::External::Context.new(**context_params) } let(:test_class) do Class.new(described_class) do - def initialize(params, context) - @location = params + def initialize(params, ctx) + @location = params[:location] super end @@ -18,15 +19,18 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe def validate_context! # no-op end + + def content + params[:content] + end end end - subject(:file) { test_class.new(location, context) } + let(:content) { 'key: value' } - before do - allow_any_instance_of(test_class) - .to receive(:content).and_return('key: value') + subject(:file) { test_class.new({ location: location, content: content }, ctx) } + before do allow_any_instance_of(Gitlab::Ci::Config::External::Context) .to receive(:check_execution_time!) end @@ -51,7 +55,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe describe '#valid?' do subject(:valid?) do - Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([file]) + Gitlab::Ci::Config::External::Mapper::Verifier.new(ctx).process([file]) file.valid? end @@ -87,7 +91,12 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe context 'when there are YAML syntax errors' do let(:location) { 'some/file/secret_file_name.yml' } - let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file_name', 'masked' => true }]) } + + let(:variables) do + Gitlab::Ci::Variables::Collection.new( + [{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file_name', 'masked' => true }] + ) + end before do allow_any_instance_of(test_class) @@ -96,15 +105,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe it 'is not a valid file' do expect(valid?).to be_falsy - expect(file.error_message).to eq('Included file `some/file/xxxxxxxxxxxxxxxx.yml` does not have valid YAML syntax!') + expect(file.error_message) + .to eq('`some/file/xxxxxxxxxxxxxxxx.yml`: content does not have a valid YAML syntax') end end context 'when the class has no validate_context!' do let(:test_class) do Class.new(described_class) do - def initialize(params, context) - @location = params + def initialize(params, ctx) + @location = params[:location] super end @@ -117,6 +127,88 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe expect { valid? }.to raise_error(NotImplementedError) end end + + context 'when interpolation is disabled but there is a spec header' do + before do + stub_feature_flags(ci_includable_files_interpolation: false) + end + + let(:location) { 'some-location.yml' } + + let(:content) do + <<~YAML + spec: + include: + website: + --- + run: + script: deploy $[[ inputs.website ]] + YAML + end + + it 'returns an error saying that interpolation is disabled' do + expect(valid?).to be_falsy + expect(file.errors) + .to include('`some-location.yml`: can not evaluate included file because interpolation is disabled') + end + end + + context 'when interpolation was unsuccessful' do + let(:location) { 'some-location.yml' } + + context 'when context key is missing' do + let(:content) do + <<~YAML + spec: + inputs: + --- + run: + script: deploy $[[ inputs.abcd ]] + YAML + end + + it 'surfaces interpolation errors' do + expect(valid?).to be_falsy + expect(file.errors) + .to include('`some-location.yml`: interpolation interrupted by errors, unknown interpolation key: `abcd`') + end + end + + context 'when header is invalid' do + let(:content) do + <<~YAML + spec: + a: abc + --- + run: + script: deploy $[[ inputs.abcd ]] + YAML + end + + it 'surfaces header errors' do + expect(valid?).to be_falsy + expect(file.errors) + .to include('`some-location.yml`: header:spec config contains unknown keys: a') + end + end + + context 'when header is not a hash' do + let(:content) do + <<~YAML + spec: abcd + --- + run: + script: deploy $[[ inputs.abcd ]] + YAML + end + + it 'surfaces header errors' do + expect(valid?).to be_falsy + expect(file.errors) + .to contain_exactly('`some-location.yml`: header:spec config should be a hash') + end + end + end end describe '#to_hash' do @@ -142,7 +234,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe it { is_expected.to eq( - context_project: nil, + context_project: project.full_path, context_sha: 'HEAD' ) } @@ -154,13 +246,13 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe subject(:eql) { file.eql?(other_file) } context 'when the other file has the same params' do - let(:other_file) { test_class.new(location, context) } + let(:other_file) { test_class.new({ location: location, content: content }, ctx) } it { is_expected.to eq(true) } end context 'when the other file has not the same params' do - let(:other_file) { test_class.new('some/other/file', context) } + let(:other_file) { test_class.new({ location: 'some/other/file', content: content }, ctx) } it { is_expected.to eq(false) } end @@ -172,14 +264,15 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe subject(:filehash) { file.hash } context 'with a project' do - let(:project) { create(:project) } let(:context_params) { { project: project, sha: 'HEAD', variables: variables } } - it { is_expected.to eq([location, project.full_path, 'HEAD'].hash) } + it { is_expected.to eq([{ location: location, content: content }, project.full_path, 'HEAD'].hash) } end context 'without a project' do - it { is_expected.to eq([location, nil, 'HEAD'].hash) } + let(:context_params) { { sha: 'HEAD', variables: variables } } + + it { is_expected.to eq([{ location: location, content: content }, nil, 'HEAD'].hash) } end end end diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb index 1562e571060..fe811bce9fe 100644 --- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb @@ -121,7 +121,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category: it 'is invalid' do expect(subject).to be_falsy - expect(external_resource.error_message).to match(/does not have valid YAML syntax/) + expect(external_resource.error_message).to match(/does not have a valid YAML syntax/) end end end @@ -176,4 +176,35 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category: variables: context.variables) end end + + describe '#to_hash' do + context 'when interpolation is being used' do + let(:response) do + ServiceResponse.success(payload: { content: content, path: path }) + end + + let(:path) do + instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345') + end + + let(:content) do + <<~YAML + spec: + inputs: + env: + --- + deploy: + script: deploy $[[ inputs.env ]] + YAML + end + + let(:params) do + { component: 'gitlab.com/acme/components/my-component@1.0', with: { env: 'production' } } + end + + it 'correctly interpolates the content' do + expect(external_resource.to_hash).to eq({ deploy: { script: 'deploy production' } }) + end + end + end end diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb index 2bac8a6968b..6c0242050a6 100644 --- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb @@ -228,6 +228,34 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip expect(local_file.to_hash).to include(:rspec) end end + + context 'when interpolaton is being used' do + let(:local_file_content) do + <<~YAML + spec: + inputs: + website: + --- + test: + script: cap deploy $[[ inputs.website ]] + YAML + end + + let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' } + let(:params) { { local: location, with: { website: 'gitlab.com' } } } + + before do + allow_any_instance_of(described_class) + .to receive(:fetch_local_content) + .and_return(local_file_content) + end + + it 'correctly interpolates the local template' do + expect(local_file).to be_valid + expect(local_file.to_hash) + .to eq({ test: { script: 'cap deploy gitlab.com' } }) + end + end end describe '#metadata' do diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb index 0ef39a22932..59522e7ab7d 100644 --- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb @@ -289,4 +289,37 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project, feature_category: :p } end end + + describe '#to_hash' do + context 'when interpolation is being used' do + before do + project.repository.create_file( + user, + 'template-file.yml', + template, + message: 'Add template', + branch_name: 'master' + ) + end + + let(:template) do + <<~YAML + spec: + inputs: + name: + --- + rspec: + script: rspec --suite $[[ inputs.name ]] + YAML + end + + let(:params) do + { file: 'template-file.yml', ref: 'master', project: project.full_path, with: { name: 'abc' } } + end + + it 'correctly interpolates the content' do + expect(project_file.to_hash).to eq({ rspec: { script: 'rspec --suite abc' } }) + end + end + end end diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb index f8986e8fa10..30a407d3a8f 100644 --- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb @@ -234,15 +234,13 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi end describe '#to_hash' do - subject(:to_hash) { remote_file.to_hash } - before do stub_full_request(location).to_return(body: remote_file_content) end context 'with a valid remote file' do it 'returns the content as a hash' do - expect(to_hash).to eql( + expect(remote_file.to_hash).to eql( before_script: ["apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs", "ruby -v", "which ruby", @@ -262,7 +260,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi end it 'returns the content as a hash' do - expect(to_hash).to eql( + expect(remote_file.to_hash).to eql( include: [ { local: 'another-file.yml', rules: [{ exists: ['Dockerfile'] }] } @@ -270,5 +268,38 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi ) end end + + context 'when interpolation has been used' do + let_it_be(:project) { create(:project) } + + let(:remote_file_content) do + <<~YAML + spec: + inputs: + include: + --- + include: + - local: $[[ inputs.include ]] + rules: + - exists: [Dockerfile] + YAML + end + + let(:params) { { remote: location, with: { include: 'some-file.yml' } } } + + let(:context_params) do + { sha: '12345', variables: variables, project: project } + end + + it 'returns the content as a hash' do + expect(remote_file).to be_valid + expect(remote_file.to_hash).to eql( + include: [ + { local: 'some-file.yml', + rules: [{ exists: ['Dockerfile'] }] } + ] + ) + end + end end end diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb index 79fd4203c3e..89b8240ce9b 100644 --- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb @@ -130,4 +130,37 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template, feature_category: : ) } end + + describe '#to_hash' do + context 'when interpolation is being used' do + before do + allow(Gitlab::Template::GitlabCiYmlTemplate) + .to receive(:find) + .and_return(template_double) + end + + let(:template_double) do + instance_double(Gitlab::Template::GitlabCiYmlTemplate, content: template_content) + end + + let(:template_content) do + <<~YAML + spec: + inputs: + env: + --- + deploy: + script: deploy $[[ inputs.env ]] + YAML + end + + let(:params) do + { template: template, with: { env: 'production' } } + end + + it 'correctly interpolates the content' do + expect(template_file.to_hash).to eq({ deploy: { script: 'deploy production' } }) + end + end + end end diff --git a/spec/lib/gitlab/ci/config/external/interpolator_spec.rb b/spec/lib/gitlab/ci/config/external/interpolator_spec.rb new file mode 100644 index 00000000000..b274e5950e4 --- /dev/null +++ b/spec/lib/gitlab/ci/config/external/interpolator_spec.rb @@ -0,0 +1,312 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Config::External::Interpolator, feature_category: :pipeline_composition do + let_it_be(:project) { create(:project) } + + let(:ctx) { instance_double(Gitlab::Ci::Config::External::Context, project: project) } + let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(config: [header, content]) } + + subject { described_class.new(result, arguments, ctx) } + + context 'when input data is valid' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'correctly interpolates the config' do + subject.interpolate! + + expect(subject).to be_valid + expect(subject.to_hash).to eq({ test: 'deploy gitlab.com' }) + end + end + + context 'when config has a syntax error' do + let(:result) { ::Gitlab::Ci::Config::Yaml::Result.new(error: ArgumentError.new) } + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'surfaces an error about invalid config' do + subject.interpolate! + + expect(subject).not_to be_valid + expect(subject.error_message).to eq subject.errors.first + expect(subject.errors).to include 'content does not have a valid YAML syntax' + end + end + + context 'when spec header is invalid' do + let(:header) do + { spec: { arguments: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'surfaces an error about invalid header' do + subject.interpolate! + + expect(subject).not_to be_valid + expect(subject.error_message).to eq subject.errors.first + expect(subject.errors).to include('header:spec config contains unknown keys: arguments') + end + end + + context 'when interpolation block is invalid' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.abc ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'correctly interpolates the config' do + subject.interpolate! + + expect(subject).not_to be_valid + expect(subject.errors).to include 'unknown interpolation key: `abc`' + expect(subject.error_message).to eq 'interpolation interrupted by errors, unknown interpolation key: `abc`' + end + end + + context 'when provided interpolation argument is invalid' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) do + { website: ['gitlab.com'] } + end + + it 'correctly interpolates the config' do + subject.interpolate! + + expect(subject).not_to be_valid + expect(subject.error_message).to eq subject.errors.first + expect(subject.errors).to include 'unsupported value in input argument `website`' + end + end + + context 'when multiple interpolation blocks are invalid' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.something.abc ]] $[[ inputs.cde ]] $[[ efg ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'correctly interpolates the config' do + subject.interpolate! + + expect(subject).not_to be_valid + expect(subject.error_message).to eq 'interpolation interrupted by errors, unknown interpolation key: `something`' + end + end + + describe '#to_hash' do + context 'when interpolation is disabled' do + before do + stub_feature_flags(ci_includable_files_interpolation: false) + end + + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) { {} } + + it 'returns an empty hash' do + subject.interpolate! + + expect(subject.to_hash).to be_empty + end + end + + context 'when interpolation is not used' do + let(:result) do + ::Gitlab::Ci::Config::Yaml::Result.new(config: content) + end + + let(:content) do + { test: 'deploy production' } + end + + let(:arguments) { nil } + + it 'returns original content' do + subject.interpolate! + + expect(subject.to_hash).to eq(content) + end + end + + context 'when interpolation is available' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'correctly interpolates content' do + subject.interpolate! + + expect(subject.to_hash).to eq({ test: 'deploy gitlab.com' }) + end + end + end + + describe '#ready?' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.website ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + it 'returns false if interpolation has not been done yet' do + expect(subject).not_to be_ready + end + + it 'returns true if interpolation has been performed' do + subject.interpolate! + + expect(subject).to be_ready + end + + context 'when interpolation can not be performed' do + let(:result) do + ::Gitlab::Ci::Config::Yaml::Result.new(error: ArgumentError.new) + end + + it 'returns true if interpolator has preliminary errors' do + expect(subject).to be_ready + end + + it 'returns true if interpolation has been attempted' do + subject.interpolate! + + expect(subject).to be_ready + end + end + end + + describe '#interpolate?' do + let(:header) do + { spec: { inputs: { website: nil } } } + end + + let(:content) do + { test: 'deploy $[[ inputs.something.abc ]] $[[ inputs.cde ]] $[[ efg ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + context 'when interpolation can be performed' do + it 'will perform interpolation' do + expect(subject.interpolate?).to eq true + end + end + + context 'when interpolation is disabled' do + before do + stub_feature_flags(ci_includable_files_interpolation: false) + end + + it 'will not perform interpolation' do + expect(subject.interpolate?).to eq false + end + end + + context 'when an interpolation header is missing' do + let(:header) { nil } + + it 'will not perform interpolation' do + expect(subject.interpolate?).to eq false + end + end + + context 'when interpolator has preliminary errors' do + let(:result) do + ::Gitlab::Ci::Config::Yaml::Result.new(error: ArgumentError.new) + end + + it 'will not perform interpolation' do + expect(subject.interpolate?).to eq false + end + end + end + + describe '#has_header?' do + let(:content) do + { test: 'deploy $[[ inputs.something.abc ]] $[[ inputs.cde ]] $[[ efg ]]' } + end + + let(:arguments) do + { website: 'gitlab.com' } + end + + context 'when header is an empty hash' do + let(:header) { {} } + + it 'does not have a header available' do + expect(subject).not_to have_header + end + end + + context 'when header is not specified' do + let(:header) { nil } + + it 'does not have a header available' do + expect(subject).not_to have_header + end + end + end +end diff --git a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb index 6ca4fd24e61..719c75dca80 100644 --- a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb @@ -16,28 +16,56 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: subject(:matcher) { described_class.new(context) } describe '#process' do - let(:locations) do - [ - { local: 'file.yml' }, - { file: 'file.yml', project: 'namespace/project' }, - { component: 'gitlab.com/org/component@1.0' }, - { remote: 'https://example.com/.gitlab-ci.yml' }, - { template: 'file.yml' }, - { artifact: 'generated.yml', job: 'test' } - ] + subject(:process) { matcher.process(locations) } + + context 'with ci_include_components FF disabled' do + before do + stub_feature_flags(ci_include_components: false) + end + + let(:locations) do + [ + { local: 'file.yml' }, + { file: 'file.yml', project: 'namespace/project' }, + { remote: 'https://example.com/.gitlab-ci.yml' }, + { template: 'file.yml' }, + { artifact: 'generated.yml', job: 'test' } + ] + end + + it 'returns an array of file objects' do + is_expected.to contain_exactly( + an_instance_of(Gitlab::Ci::Config::External::File::Local), + an_instance_of(Gitlab::Ci::Config::External::File::Project), + an_instance_of(Gitlab::Ci::Config::External::File::Remote), + an_instance_of(Gitlab::Ci::Config::External::File::Template), + an_instance_of(Gitlab::Ci::Config::External::File::Artifact) + ) + end end - subject(:process) { matcher.process(locations) } + context 'with ci_include_components FF enabled' do + let(:locations) do + [ + { local: 'file.yml' }, + { file: 'file.yml', project: 'namespace/project' }, + { component: 'gitlab.com/org/component@1.0' }, + { remote: 'https://example.com/.gitlab-ci.yml' }, + { template: 'file.yml' }, + { artifact: 'generated.yml', job: 'test' } + ] + end - it 'returns an array of file objects' do - is_expected.to contain_exactly( - an_instance_of(Gitlab::Ci::Config::External::File::Local), - an_instance_of(Gitlab::Ci::Config::External::File::Project), - an_instance_of(Gitlab::Ci::Config::External::File::Component), - an_instance_of(Gitlab::Ci::Config::External::File::Remote), - an_instance_of(Gitlab::Ci::Config::External::File::Template), - an_instance_of(Gitlab::Ci::Config::External::File::Artifact) - ) + it 'returns an array of file objects' do + is_expected.to contain_exactly( + an_instance_of(Gitlab::Ci::Config::External::File::Local), + an_instance_of(Gitlab::Ci::Config::External::File::Project), + an_instance_of(Gitlab::Ci::Config::External::File::Component), + an_instance_of(Gitlab::Ci::Config::External::File::Remote), + an_instance_of(Gitlab::Ci::Config::External::File::Template), + an_instance_of(Gitlab::Ci::Config::External::File::Artifact) + ) + end end context 'when a location is not valid' do diff --git a/spec/lib/gitlab/ci/config/external/mapper/variables_expander_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/variables_expander_spec.rb index e27e8034faa..5def516bb1e 100644 --- a/spec/lib/gitlab/ci/config/external/mapper/variables_expander_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper/variables_expander_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::Mapper::VariablesExpander, feature_category: :pipeline_composition do +RSpec.describe Gitlab::Ci::Config::External::Mapper::VariablesExpander, feature_category: :secrets_management do let_it_be(:variables) do Gitlab::Ci::Variables::Collection.new.tap do |variables| variables.append(key: 'VARIABLE1', value: 'hello') diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index 97f600baf25..74afb3b1e97 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -221,7 +221,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel it 'raises an error' do expect { processor.perform }.to raise_error( described_class::IncludeError, - "Included file `lib/gitlab/ci/templates/template.yml` does not have valid YAML syntax!" + '`lib/gitlab/ci/templates/template.yml`: content does not have a valid YAML syntax' ) end end diff --git a/spec/lib/gitlab/ci/config/header/spec_spec.rb b/spec/lib/gitlab/ci/config/header/spec_spec.rb index cb4237f84ce..74cfb39dfd5 100644 --- a/spec/lib/gitlab/ci/config/header/spec_spec.rb +++ b/spec/lib/gitlab/ci/config/header/spec_spec.rb @@ -28,6 +28,18 @@ RSpec.describe Gitlab::Ci::Config::Header::Spec, feature_category: :pipeline_com end end + context 'when spec contains a required value' do + let(:spec_hash) do + { inputs: { foo: nil } } + end + + it 'parses the config correctly' do + expect(config).to be_valid + expect(config.errors).to be_empty + expect(config.value).to eq({ inputs: { foo: {} } }) + end + end + context 'when spec contains unknown keywords' do let(:spec_hash) { { test: 123 } } let(:expected_errors) { ['spec config contains unknown keys: test'] } diff --git a/spec/lib/gitlab/ci/config/yaml/result_spec.rb b/spec/lib/gitlab/ci/config/yaml/result_spec.rb index eda15ee9eb2..72d96349668 100644 --- a/spec/lib/gitlab/ci/config/yaml/result_spec.rb +++ b/spec/lib/gitlab/ci/config/yaml/result_spec.rb @@ -9,11 +9,34 @@ RSpec.describe Gitlab::Ci::Config::Yaml::Result, feature_category: :pipeline_com expect(result).not_to have_header end - it 'has a header when config is an array of hashes' do - result = described_class.new(config: [{ a: 1 }, { b: 2 }]) + context 'when config is an array of hashes' do + context 'when first document matches the header schema' do + it 'has a header' do + result = described_class.new(config: [{ spec: { inputs: {} } }, { b: 2 }]) + + expect(result).to have_header + expect(result.header).to eq({ spec: { inputs: {} } }) + expect(result.content).to eq({ b: 2 }) + end + end + + context 'when first document does not match the header schema' do + it 'does not have header' do + result = described_class.new(config: [{ a: 1 }, { b: 2 }]) + + expect(result).not_to have_header + expect(result.content).to eq({ a: 1 }) + end + end + end + + context 'when the first document is undefined' do + it 'does not have header' do + result = described_class.new(config: [nil, { a: 1 }]) - expect(result).to have_header - expect(result.header).to eq({ a: 1 }) + expect(result).not_to have_header + expect(result.content).to be_nil + end end it 'raises an error when reading a header when there is none' do diff --git a/spec/lib/gitlab/ci/config/yaml_spec.rb b/spec/lib/gitlab/ci/config/yaml_spec.rb index f4b70069bbe..beb872071d2 100644 --- a/spec/lib/gitlab/ci/config/yaml_spec.rb +++ b/spec/lib/gitlab/ci/config/yaml_spec.rb @@ -113,18 +113,85 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition end describe '.load_result!' do + let_it_be(:project) { create(:project) } + + subject(:result) { described_class.load_result!(yaml, project: project) } + context 'when syntax is invalid' do let(:yaml) { 'some: invalid: syntax' } it 'returns an invalid result object' do - result = described_class.load_result!(yaml) - expect(result).not_to be_valid expect(result.error).to be_a ::Gitlab::Config::Loader::FormatError end end - context 'when syntax is valid and contains a header document' do + context 'when the first document is a header' do + context 'with explicit document start marker' do + let(:yaml) do + <<~YAML + --- + spec: + --- + b: 2 + YAML + end + + it 'considers the first document as header and the second as content' do + expect(result).to be_valid + expect(result.error).to be_nil + expect(result.header).to eq({ spec: nil }) + expect(result.content).to eq({ b: 2 }) + end + end + end + + context 'when first document is empty' do + let(:yaml) do + <<~YAML + --- + --- + b: 2 + YAML + end + + it 'considers the first document as header and the second as content' do + expect(result).not_to have_header + end + end + + context 'when first document is an empty hash' do + let(:yaml) do + <<~YAML + {} + --- + b: 2 + YAML + end + + it 'returns second document as a content' do + expect(result).not_to have_header + expect(result.content).to eq({ b: 2 }) + end + end + + context 'when first an array' do + let(:yaml) do + <<~YAML + --- + - a + - b + --- + b: 2 + YAML + end + + it 'considers the first document as header and the second as content' do + expect(result).not_to have_header + end + end + + context 'when the first document is not a header' do let(:yaml) do <<~YAML a: 1 @@ -133,15 +200,62 @@ RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_composition YAML end - let(:project) { create(:project) } + it 'considers the first document as content for backwards compatibility' do + expect(result).to be_valid + expect(result.error).to be_nil + expect(result).not_to have_header + expect(result.content).to eq({ a: 1 }) + end + + context 'with explicit document start marker' do + let(:yaml) do + <<~YAML + --- + a: 1 + --- + b: 2 + YAML + end + + it 'considers the first document as content for backwards compatibility' do + expect(result).to be_valid + expect(result.error).to be_nil + expect(result).not_to have_header + expect(result.content).to eq({ a: 1 }) + end + end + end - it 'returns a result object' do - result = described_class.load_result!(yaml, project: project) + context 'when the first document is not a header and second document is empty' do + let(:yaml) do + <<~YAML + a: 1 + --- + YAML + end + it 'considers the first document as content' do expect(result).to be_valid expect(result.error).to be_nil - expect(result.header).to eq({ a: 1 }) - expect(result.content).to eq({ b: 2 }) + expect(result).not_to have_header + expect(result.content).to eq({ a: 1 }) + end + + context 'with explicit document start marker' do + let(:yaml) do + <<~YAML + --- + a: 1 + --- + YAML + end + + it 'considers the first document as content' do + expect(result).to be_valid + expect(result.error).to be_nil + expect(result).not_to have_header + expect(result.content).to eq({ a: 1 }) + end end end end diff --git a/spec/lib/gitlab/ci/input/arguments/default_spec.rb b/spec/lib/gitlab/ci/input/arguments/default_spec.rb index 6b5dd441eb7..bc0cee6ac4e 100644 --- a/spec/lib/gitlab/ci/input/arguments/default_spec.rb +++ b/spec/lib/gitlab/ci/input/arguments/default_spec.rb @@ -27,6 +27,12 @@ RSpec.describe Gitlab::Ci::Input::Arguments::Default, feature_category: :pipelin expect(argument.to_hash).to eq({ website: 'https://gitlab.com' }) end + it 'returns an error if the default argument has not been recognized' do + argument = described_class.new(:website, { default: ['gitlab.com'] }, 'abc') + + expect(argument).not_to be_valid + end + it 'returns an error if the argument has not been fabricated correctly' do argument = described_class.new(:website, { required: 'https://gitlab.com' }, 'https://example.gitlab.com') @@ -40,6 +46,8 @@ RSpec.describe Gitlab::Ci::Input::Arguments::Default, feature_category: :pipelin it 'does not match specs different configuration keyword' do expect(described_class.matches?({ options: %w[a b] })).to be false + expect(described_class.matches?('a b c')).to be false + expect(described_class.matches?(%w[default a])).to be false end end end diff --git a/spec/lib/gitlab/ci/input/arguments/options_spec.rb b/spec/lib/gitlab/ci/input/arguments/options_spec.rb index afa279ad48d..17e3469b294 100644 --- a/spec/lib/gitlab/ci/input/arguments/options_spec.rb +++ b/spec/lib/gitlab/ci/input/arguments/options_spec.rb @@ -29,7 +29,7 @@ RSpec.describe Gitlab::Ci::Input::Arguments::Options, feature_category: :pipelin argument = described_class.new(:website, { options: { a: 1 } }, 'opt1') expect(argument).not_to be_valid - expect(argument.errors.first).to eq '`website` input: argument value opt1 not allowlisted' + expect(argument.errors.first).to eq '`website` input: argument specification invalid' end it 'returns an empty value if it is allowlisted' do @@ -47,6 +47,8 @@ RSpec.describe Gitlab::Ci::Input::Arguments::Options, feature_category: :pipelin it 'does not match specs different configuration keyword' do expect(described_class.matches?({ default: 'abc' })).to be false + expect(described_class.matches?(['options'])).to be false + expect(described_class.matches?('options')).to be false end end end diff --git a/spec/lib/gitlab/ci/input/arguments/required_spec.rb b/spec/lib/gitlab/ci/input/arguments/required_spec.rb index 0c2ffc282ea..847272998c2 100644 --- a/spec/lib/gitlab/ci/input/arguments/required_spec.rb +++ b/spec/lib/gitlab/ci/input/arguments/required_spec.rb @@ -34,6 +34,10 @@ RSpec.describe Gitlab::Ci::Input::Arguments::Required, feature_category: :pipeli expect(described_class.matches?('')).to be true end + it 'matches specs with an empty hash configuration' do + expect(described_class.matches?({})).to be true + end + it 'does not match specs with configuration' do expect(described_class.matches?({ options: %w[a b] })).to be false end diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb index 5eeab658a8e..21fd7e3adcf 100644 --- a/spec/lib/gitlab/ci/jwt_v2_spec.rb +++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb @@ -5,7 +5,13 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::JwtV2 do let(:namespace) { build_stubbed(:namespace) } let(:project) { build_stubbed(:project, namespace: namespace) } - let(:user) { build_stubbed(:user) } + let(:user) do + build_stubbed( + :user, + identities: [build_stubbed(:identity, extern_uid: '1', provider: 'github')] + ) + end + let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'auto-deploy-2020-03-19') } let(:aud) { described_class::DEFAULT_AUD } @@ -33,6 +39,18 @@ RSpec.describe Gitlab::Ci::JwtV2 do end end + it 'includes user identities when enabled' do + expect(user).to receive(:pass_user_identities_to_ci_jwt).and_return(true) + identities = payload[:user_identities].map { |identity| identity.slice(:extern_uid, :provider) } + expect(identities).to eq([{ extern_uid: '1', provider: 'github' }]) + end + + it 'does not include user identities when disabled' do + expect(user).to receive(:pass_user_identities_to_ci_jwt).and_return(false) + + expect(payload).not_to include(:user_identities) + end + context 'when given an aud' do let(:aud) { 'AWS' } diff --git a/spec/lib/gitlab/ci/parsers/security/sast_spec.rb b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb index f6113308201..d1ce6808d23 100644 --- a/spec/lib/gitlab/ci/parsers/security/sast_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb @@ -13,8 +13,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Sast do context "when passing valid report" do # rubocop: disable Layout/LineLength where(:report_format, :report_version, :scanner_length, :finding_length, :identifier_length, :file_path, :start_line, :end_line, :primary_identifiers_length) do - :sast | '14.0.0' | 1 | 5 | 6 | 'groovy/src/main/java/com/gitlab/security_products/tests/App.groovy' | 47 | 47 | nil - :sast_semgrep_for_multiple_findings | '14.0.4' | 1 | 2 | 6 | 'app/app.py' | 39 | nil | 2 + :sast | '15.0.0' | 1 | 5 | 6 | 'groovy/src/main/java/com/gitlab/security_products/tests/App.groovy' | 47 | 47 | nil + :sast_semgrep_for_multiple_findings | '15.0.4' | 1 | 2 | 6 | 'app/app.py' | 39 | nil | 2 end # rubocop: enable Layout/LineLength diff --git a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb index e8f1d617cb7..13999b2a9e5 100644 --- a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb @@ -39,7 +39,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::SecretDetection do end it "generates expected metadata_version" do - expect(report.findings.first.metadata_version).to eq('14.1.2') + expect(report.findings.first.metadata_version).to eq('15.0.0') end end end diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb index c264ea3bece..e6ff82810ae 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb @@ -7,8 +7,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do let_it_be(:head_sha) { project.repository.head_commit.id } let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: head_sha) } let(:index) { 1 } + let(:cache_prefix) { index } - let(:processor) { described_class.new(pipeline, config, index) } + let(:processor) { described_class.new(pipeline, config, cache_prefix) } describe '#attributes' do subject { processor.attributes } @@ -32,18 +33,38 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do } end - it { is_expected.to include(config.merge(key: "a_key")) } + it { is_expected.to include(config.merge(key: 'a_key')) } end context 'with cache:key:files' do + context 'with ci_fix_for_runner_cache_prefix disabled' do + before do + stub_feature_flags(ci_fix_for_runner_cache_prefix: false) + end + + shared_examples 'default key' do + let(:config) do + { key: { files: files } } + end + + context 'without a prefix' do + it 'uses default key with an index as a prefix' do + expected = { key: '1-default' } + + is_expected.to include(expected) + end + end + end + end + shared_examples 'default key' do let(:config) do { key: { files: files } } end context 'without a prefix' do - it 'uses default key with an index as a prefix' do - expected = { key: '1-default' } + it 'uses default key with an index and file names as a prefix' do + expected = { key: "#{cache_prefix}-default" } is_expected.to include(expected) end @@ -61,9 +82,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do end context 'without a prefix' do - it 'builds a string key with an index as a prefix' do + it 'builds a string key with an index and file names as a prefix' do expected = { - key: '1-703ecc8fef1635427a1f86a8a1a308831c122392', + key: "#{cache_prefix}-703ecc8fef1635427a1f86a8a1a308831c122392", paths: ['vendor/ruby'] } @@ -74,30 +95,41 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do context 'with existing files' do let(:files) { ['VERSION', 'Gemfile.zip'] } + let(:cache_prefix) { '1_VERSION_Gemfile' } it_behaves_like 'version and gemfile files' end context 'with files starting with ./' do let(:files) { ['Gemfile.zip', './VERSION'] } + let(:cache_prefix) { '1_Gemfile_' } it_behaves_like 'version and gemfile files' end + context 'with no files' do + let(:files) { [] } + + it_behaves_like 'default key' + end + context 'with files ending with /' do let(:files) { ['Gemfile.zip/'] } + let(:cache_prefix) { '1_Gemfile' } it_behaves_like 'default key' end context 'with new line in filenames' do - let(:files) { ["Gemfile.zip\nVERSION"] } + let(:files) { ['Gemfile.zip\nVERSION'] } + let(:cache_prefix) { '1_Gemfile' } it_behaves_like 'default key' end context 'with missing files' do let(:files) { ['project-gemfile.lock', ''] } + let(:cache_prefix) { '1_project-gemfile_' } it_behaves_like 'default key' end @@ -113,8 +145,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do end context 'without a prefix' do - it 'builds a string key with an index as a prefix' do - expected = { key: '1-74bf43fb1090f161bdd4e265802775dbda2f03d1' } + it 'builds a string key with an index and file names as a prefix' do + expected = { key: "#{cache_prefix}-74bf43fb1090f161bdd4e265802775dbda2f03d1" } is_expected.to include(expected) end @@ -123,18 +155,21 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do context 'with directory' do let(:files) { ['foo/bar'] } + let(:cache_prefix) { '1_foo/bar' } it_behaves_like 'foo/bar directory key' end context 'with directory ending in slash' do let(:files) { ['foo/bar/'] } + let(:cache_prefix) { '1_foo/bar/' } it_behaves_like 'foo/bar directory key' end context 'with directories ending in slash star' do let(:files) { ['foo/bar/*'] } + let(:cache_prefix) { '1_foo/bar/*' } it_behaves_like 'foo/bar directory key' end diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index ce68e741d00..86a11111283 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -152,7 +152,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co it 'includes cache options' do cache_options = { options: { - cache: [a_hash_including(key: '0-f155568ad0933d8358f66b846133614f76dd0ca4')] + cache: [a_hash_including(key: '0_VERSION-f155568ad0933d8358f66b846133614f76dd0ca4')] } } @@ -798,7 +798,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co [ [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]], [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]], - [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]] + [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]] ] end @@ -811,6 +811,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_co end end + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + context 'with an explicit `when: on_failure`' do + where(:rule_set) do + [ + [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]], + [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]], + [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]] + ] + end + + with_them do + it { is_expected.to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_failure') + end + end + end + end + context 'with an explicit `when: delayed`' do where(:rule_set) do [ diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb index cceabc35e85..cbf0976c976 100644 --- a/spec/lib/gitlab/ci/status/composite_spec.rb +++ b/spec/lib/gitlab/ci/status/composite_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Status::Composite do +RSpec.describe Gitlab::Ci::Status::Composite, feature_category: :continuous_integration do let_it_be(:pipeline) { create(:ci_pipeline) } before_all do @@ -15,6 +15,18 @@ RSpec.describe Gitlab::Ci::Status::Composite do end end + describe '.initialize' do + subject(:composite_status) { described_class.new(all_statuses) } + + context 'when passing a single status' do + let(:all_statuses) { @statuses[:success] } + + it 'raises ArgumentError' do + expect { composite_status }.to raise_error(ArgumentError, 'all_jobs needs to respond to `.pluck`') + end + end + end + describe '#status' do using RSpec::Parameterized::TableSyntax @@ -51,8 +63,8 @@ RSpec.describe Gitlab::Ci::Status::Composite do %i(created success pending) | false | 'running' | false %i(skipped success failed) | false | 'failed' | false %i(skipped success failed) | true | 'skipped' | false - %i(success manual) | true | 'pending' | false - %i(success failed created) | true | 'pending' | false + %i(success manual) | true | 'manual' | false + %i(success failed created) | true | 'running' | false end with_them do diff --git a/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb index 26087fd771c..e1baa1097e4 100644 --- a/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb +++ b/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb @@ -2,12 +2,25 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Status::Processable::WaitingForResource do +RSpec.describe Gitlab::Ci::Status::Processable::WaitingForResource, feature_category: :continuous_integration do let(:user) { create(:user) } + let(:processable) { create(:ci_build, :waiting_for_resource, :resource_group) } - subject do - processable = create(:ci_build, :waiting_for_resource, :resource_group) - described_class.new(Gitlab::Ci::Status::Core.new(processable, user)) + subject { described_class.new(Gitlab::Ci::Status::Core.new(processable, user)) } + + it 'fabricates status with correct details' do + expect(subject.has_action?).to eq false + end + + context 'when resource is retained by a build' do + before do + processable.resource_group.assign_resource_to(create(:ci_build)) + end + + it 'fabricates status with correct details' do + expect(subject.has_action?).to eq true + expect(subject.action_path).to include 'jobs' + end end describe '#illustration' do diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb index 63625244fe8..7a926a06f16 100644 --- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb +++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb @@ -446,15 +446,5 @@ RSpec.describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do expect(Ci::BuildTraceChunk.where(build: build).count).to eq(0) end - - context 'when the job does not have archived trace' do - it 'leaves a message in sidekiq log' do - expect(Sidekiq.logger).to receive(:warn).with( - message: 'The job does not have archived trace but going to be destroyed.', - job_id: build.id).and_call_original - - subject - end - end end end diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb index f8770457083..0a079a69682 100644 --- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipeline_composition do +RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secrets_management do let_it_be(:project) { create_default(:project, :repository, create_tag: 'test').freeze } let_it_be(:user) { create(:user) } @@ -30,15 +30,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipe CI_COMMIT_REF_PROTECTED CI_COMMIT_TIMESTAMP CI_COMMIT_AUTHOR - CI_BUILD_REF - CI_BUILD_BEFORE_SHA - CI_BUILD_REF_NAME - CI_BUILD_REF_SLUG ]) end - context 'when the pipeline is running for a tag' do - let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) } + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end it 'includes all predefined variables in a valid order' do keys = subject.pluck(:key) @@ -52,6 +50,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipe CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME CI_COMMIT_REF_SLUG + CI_COMMIT_BRANCH CI_COMMIT_MESSAGE CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION @@ -62,11 +61,69 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipe CI_BUILD_BEFORE_SHA CI_BUILD_REF_NAME CI_BUILD_REF_SLUG + ]) + end + end + + context 'when the pipeline is running for a tag' do + let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) } + + it 'includes all predefined variables in a valid order' do + keys = subject.pluck(:key) + + expect(keys).to contain_exactly(*%w[ + CI_PIPELINE_IID + CI_PIPELINE_SOURCE + CI_PIPELINE_CREATED_AT + CI_COMMIT_SHA + CI_COMMIT_SHORT_SHA + CI_COMMIT_BEFORE_SHA + CI_COMMIT_REF_NAME + CI_COMMIT_REF_SLUG + CI_COMMIT_MESSAGE + CI_COMMIT_TITLE + CI_COMMIT_DESCRIPTION + CI_COMMIT_REF_PROTECTED + CI_COMMIT_TIMESTAMP + CI_COMMIT_AUTHOR CI_COMMIT_TAG CI_COMMIT_TAG_MESSAGE - CI_BUILD_TAG ]) end + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it 'includes all predefined variables in a valid order' do + keys = subject.pluck(:key) + + expect(keys).to contain_exactly(*%w[ + CI_PIPELINE_IID + CI_PIPELINE_SOURCE + CI_PIPELINE_CREATED_AT + CI_COMMIT_SHA + CI_COMMIT_SHORT_SHA + CI_COMMIT_BEFORE_SHA + CI_COMMIT_REF_NAME + CI_COMMIT_REF_SLUG + CI_COMMIT_MESSAGE + CI_COMMIT_TITLE + CI_COMMIT_DESCRIPTION + CI_COMMIT_REF_PROTECTED + CI_COMMIT_TIMESTAMP + CI_COMMIT_AUTHOR + CI_BUILD_REF + CI_BUILD_BEFORE_SHA + CI_BUILD_REF_NAME + CI_BUILD_REF_SLUG + CI_COMMIT_TAG + CI_COMMIT_TAG_MESSAGE + CI_BUILD_TAG + ]) + end + end end context 'when merge request is present' do @@ -305,10 +362,24 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipe expect(subject.to_hash.keys) .not_to include( 'CI_COMMIT_TAG', - 'CI_COMMIT_TAG_MESSAGE', - 'CI_BUILD_TAG' + 'CI_COMMIT_TAG_MESSAGE' ) end + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it 'does not expose tag variables' do + expect(subject.to_hash.keys) + .not_to include( + 'CI_COMMIT_TAG', + 'CI_COMMIT_TAG_MESSAGE', + 'CI_BUILD_TAG' + ) + end + end end context 'without a commit' do diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb index 215b18ea614..10974993fa4 100644 --- a/spec/lib/gitlab/ci/variables/builder_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, feature_category: :pipeline_composition do +RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, feature_category: :secrets_management do include Ci::TemplateHelpers let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, :repository, namespace: group) } @@ -35,10 +35,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur value: '1' }, { key: 'CI_ENVIRONMENT_NAME', value: 'test' }, - { key: 'CI_BUILD_NAME', - value: 'rspec:test 1' }, - { key: 'CI_BUILD_STAGE', - value: job.stage_name }, { key: 'CI', value: 'true' }, { key: 'GITLAB_CI', @@ -51,6 +47,10 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur value: Gitlab.config.gitlab.port.to_s }, { key: 'CI_SERVER_PROTOCOL', value: Gitlab.config.gitlab.protocol }, + { key: 'CI_SERVER_SHELL_SSH_HOST', + value: Gitlab.config.gitlab_shell.ssh_host.to_s }, + { key: 'CI_SERVER_SHELL_SSH_PORT', + value: Gitlab.config.gitlab_shell.ssh_port.to_s }, { key: 'CI_SERVER_NAME', value: 'GitLab' }, { key: 'CI_SERVER_VERSION', @@ -101,6 +101,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur value: project.pages_url }, { key: 'CI_API_V4_URL', value: API::Helpers::Version.new('v4').root_url }, + { key: 'CI_API_GRAPHQL_URL', + value: Gitlab::Routing.url_helpers.api_graphql_url }, { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host }, { key: 'CI_PIPELINE_IID', @@ -133,14 +135,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur value: pipeline.git_commit_timestamp }, { key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text }, - { key: 'CI_BUILD_REF', - value: job.sha }, - { key: 'CI_BUILD_BEFORE_SHA', - value: job.before_sha }, - { key: 'CI_BUILD_REF_NAME', - value: job.ref }, - { key: 'CI_BUILD_REF_SLUG', - value: job.ref_slug }, { key: 'YAML_VARIABLE', value: 'value' }, { key: 'GITLAB_USER_ID', @@ -160,6 +154,151 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur it { expect(subject.to_runner_variables).to eq(predefined_variables) } + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + let(:predefined_variables) do + [ + { key: 'CI_JOB_NAME', + value: 'rspec:test 1' }, + { key: 'CI_JOB_NAME_SLUG', + value: 'rspec-test-1' }, + { key: 'CI_JOB_STAGE', + value: job.stage_name }, + { key: 'CI_NODE_TOTAL', + value: '1' }, + { key: 'CI_ENVIRONMENT_NAME', + value: 'test' }, + { key: 'CI_BUILD_NAME', + value: 'rspec:test 1' }, + { key: 'CI_BUILD_STAGE', + value: job.stage_name }, + { key: 'CI', + value: 'true' }, + { key: 'GITLAB_CI', + value: 'true' }, + { key: 'CI_SERVER_URL', + value: Gitlab.config.gitlab.url }, + { key: 'CI_SERVER_HOST', + value: Gitlab.config.gitlab.host }, + { key: 'CI_SERVER_PORT', + value: Gitlab.config.gitlab.port.to_s }, + { key: 'CI_SERVER_PROTOCOL', + value: Gitlab.config.gitlab.protocol }, + { key: 'CI_SERVER_SHELL_SSH_HOST', + value: Gitlab.config.gitlab_shell.ssh_host.to_s }, + { key: 'CI_SERVER_SHELL_SSH_PORT', + value: Gitlab.config.gitlab_shell.ssh_port.to_s }, + { key: 'CI_SERVER_NAME', + value: 'GitLab' }, + { key: 'CI_SERVER_VERSION', + value: Gitlab::VERSION }, + { key: 'CI_SERVER_VERSION_MAJOR', + value: Gitlab.version_info.major.to_s }, + { key: 'CI_SERVER_VERSION_MINOR', + value: Gitlab.version_info.minor.to_s }, + { key: 'CI_SERVER_VERSION_PATCH', + value: Gitlab.version_info.patch.to_s }, + { key: 'CI_SERVER_REVISION', + value: Gitlab.revision }, + { key: 'GITLAB_FEATURES', + value: project.licensed_features.join(',') }, + { key: 'CI_PROJECT_ID', + value: project.id.to_s }, + { key: 'CI_PROJECT_NAME', + value: project.path }, + { key: 'CI_PROJECT_TITLE', + value: project.title }, + { key: 'CI_PROJECT_DESCRIPTION', + value: project.description }, + { key: 'CI_PROJECT_PATH', + value: project.full_path }, + { key: 'CI_PROJECT_PATH_SLUG', + value: project.full_path_slug }, + { key: 'CI_PROJECT_NAMESPACE', + value: project.namespace.full_path }, + { key: 'CI_PROJECT_NAMESPACE_ID', + value: project.namespace.id.to_s }, + { key: 'CI_PROJECT_ROOT_NAMESPACE', + value: project.namespace.root_ancestor.path }, + { key: 'CI_PROJECT_URL', + value: project.web_url }, + { key: 'CI_PROJECT_VISIBILITY', + value: "private" }, + { key: 'CI_PROJECT_REPOSITORY_LANGUAGES', + value: project.repository_languages.map(&:name).join(',').downcase }, + { key: 'CI_PROJECT_CLASSIFICATION_LABEL', + value: project.external_authorization_classification_label }, + { key: 'CI_DEFAULT_BRANCH', + value: project.default_branch }, + { key: 'CI_CONFIG_PATH', + value: project.ci_config_path_or_default }, + { key: 'CI_PAGES_DOMAIN', + value: Gitlab.config.pages.host }, + { key: 'CI_PAGES_URL', + value: project.pages_url }, + { key: 'CI_API_V4_URL', + value: API::Helpers::Version.new('v4').root_url }, + { key: 'CI_API_GRAPHQL_URL', + value: Gitlab::Routing.url_helpers.api_graphql_url }, + { key: 'CI_TEMPLATE_REGISTRY_HOST', + value: template_registry_host }, + { key: 'CI_PIPELINE_IID', + value: pipeline.iid.to_s }, + { key: 'CI_PIPELINE_SOURCE', + value: pipeline.source }, + { key: 'CI_PIPELINE_CREATED_AT', + value: pipeline.created_at.iso8601 }, + { key: 'CI_COMMIT_SHA', + value: job.sha }, + { key: 'CI_COMMIT_SHORT_SHA', + value: job.short_sha }, + { key: 'CI_COMMIT_BEFORE_SHA', + value: job.before_sha }, + { key: 'CI_COMMIT_REF_NAME', + value: job.ref }, + { key: 'CI_COMMIT_REF_SLUG', + value: job.ref_slug }, + { key: 'CI_COMMIT_BRANCH', + value: job.ref }, + { key: 'CI_COMMIT_MESSAGE', + value: pipeline.git_commit_message }, + { key: 'CI_COMMIT_TITLE', + value: pipeline.git_commit_title }, + { key: 'CI_COMMIT_DESCRIPTION', + value: pipeline.git_commit_description }, + { key: 'CI_COMMIT_REF_PROTECTED', + value: (!!pipeline.protected_ref?).to_s }, + { key: 'CI_COMMIT_TIMESTAMP', + value: pipeline.git_commit_timestamp }, + { key: 'CI_COMMIT_AUTHOR', + value: pipeline.git_author_full_text }, + { key: 'CI_BUILD_REF', + value: job.sha }, + { key: 'CI_BUILD_BEFORE_SHA', + value: job.before_sha }, + { key: 'CI_BUILD_REF_NAME', + value: job.ref }, + { key: 'CI_BUILD_REF_SLUG', + value: job.ref_slug }, + { key: 'YAML_VARIABLE', + value: 'value' }, + { key: 'GITLAB_USER_ID', + value: user.id.to_s }, + { key: 'GITLAB_USER_EMAIL', + value: user.email }, + { key: 'GITLAB_USER_LOGIN', + value: user.username }, + { key: 'GITLAB_USER_NAME', + value: user.name } + ].map { |var| var.merge(public: true, masked: false) } + end + + it { expect(subject.to_runner_variables).to eq(predefined_variables) } + end + context 'variables ordering' do def var(name, value) { key: name, value: value.to_s, public: true, masked: false } diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb index 668f1173675..181e37de9b9 100644 --- a/spec/lib/gitlab/ci/variables/collection_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Variables::Collection, feature_category: :pipeline_composition do +RSpec.describe Gitlab::Ci::Variables::Collection, feature_category: :secrets_management do describe '.new' do it 'can be initialized with an array' do variable = { key: 'VAR', value: 'value', public: true, masked: false } diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index b00d9b46bc7..d7dcfe64c74 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -2395,10 +2395,16 @@ module Gitlab end end - context 'undefined need' do + context 'when need is an undefined job' do let(:needs) { ['undefined'] } it_behaves_like 'returns errors', 'test1 job: undefined need: undefined' + + context 'when need is optional' do + let(:needs) { [{ job: 'undefined', optional: true }] } + + it { is_expected.to be_valid } + end end context 'needs to deploy' do diff --git a/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb b/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb index f63aacecce6..438f3e5b17a 100644 --- a/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb +++ b/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb @@ -3,7 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::Config::Loader::MultiDocYaml, feature_category: :pipeline_composition do - let(:loader) { described_class.new(yml, max_documents: 2) } + let(:loader) { described_class.new(yml, max_documents: 2, reject_empty: reject_empty) } + let(:reject_empty) { false } describe '#load!' do context 'when a simple single delimiter is being used' do @@ -141,6 +142,27 @@ RSpec.describe Gitlab::Config::Loader::MultiDocYaml, feature_category: :pipeline expect(loader.load!).to contain_exactly({ a: 1 }, { b: 2 }) end end + + context 'when the YAML contains empty documents' do + let(:yml) do + <<~YAML + a: 1 + --- + YAML + end + + it 'raises an error' do + expect { loader.load! }.to raise_error(::Gitlab::Config::Loader::Yaml::NotHashError) + end + + context 'when reject_empty: true' do + let(:reject_empty) { true } + + it 'loads only non empty documents' do + expect(loader.load!).to contain_exactly({ a: 1 }) + end + end + end end describe '#load_raw!' do diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index ffb651fe23c..b40829d72a0 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -178,53 +178,6 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end end - context 'when KAS is configured' do - before do - stub_config_setting(host: 'gitlab.example.com') - allow(::Gitlab::Kas).to receive(:enabled?).and_return true - end - - context 'when user access feature flag is disabled' do - before do - stub_feature_flags(kas_user_access: false) - end - - it 'does not add KAS url to CSP' do - expect(directives['connect_src']).not_to eq("'self' ws://gitlab.example.com #{::Gitlab::Kas.tunnel_url}") - end - end - - context 'when user access feature flag is enabled' do - before do - stub_feature_flags(kas_user_access: true) - end - - context 'when KAS is on same domain as rails' do - let_it_be(:kas_tunnel_url) { "ws://gitlab.example.com/-/k8s-proxy/" } - - before do - allow(::Gitlab::Kas).to receive(:tunnel_url).and_return(kas_tunnel_url) - end - - it 'does not add KAS url to CSP' do - expect(directives['connect_src']).not_to eq("'self' ws://gitlab.example.com #{::Gitlab::Kas.tunnel_url}") - end - end - - context 'when KAS is on subdomain' do - let_it_be(:kas_tunnel_url) { "ws://kas.gitlab.example.com/k8s-proxy/" } - - before do - allow(::Gitlab::Kas).to receive(:tunnel_url).and_return(kas_tunnel_url) - end - - it 'does add KAS url to CSP' do - expect(directives['connect_src']).to eq("'self' ws://gitlab.example.com #{kas_tunnel_url}") - end - end - end - end - context 'when CUSTOMER_PORTAL_URL is set' do let(:customer_portal_url) { 'https://customers.example.com' } diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb index 7c5c368fcb5..b2ba1a60fbb 100644 --- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb @@ -143,6 +143,92 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_categor end end + describe '#prepare_async_index_from_sql' do + let(:index_definition) { "CREATE INDEX CONCURRENTLY #{index_name} ON #{table_name} USING btree(id)" } + + subject(:prepare_async_index_from_sql) do + migration.prepare_async_index_from_sql(index_definition) + end + + before do + connection.create_table(table_name) + + allow(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!).and_call_original + end + + it 'requires ddl mode' do + prepare_async_index_from_sql + + expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to have_received(:require_ddl_mode!) + end + + context 'when the given index is invalid' do + let(:index_definition) { "SELECT FROM users" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Index statement not found!') + end + end + + context 'when the given index is valid' do + context 'when the index algorithm is not concurrent' do + let(:index_definition) { "CREATE INDEX #{index_name} ON #{table_name} USING btree(id)" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Index must be created concurrently!') + end + end + + context 'when the index algorithm is concurrent' do + context 'when the statement tries to create an index for non-existing table' do + let(:index_definition) { "CREATE INDEX CONCURRENTLY #{index_name} ON foo_table USING btree(id)" } + + it 'raises a RuntimeError' do + expect { prepare_async_index_from_sql }.to raise_error(RuntimeError, 'Table does not exist!') + end + end + + context 'when the statement tries to create an index for an existing table' do + context 'when the async index creation is not available' do + before do + connection.drop_table(:postgres_async_indexes) + end + + it 'does not raise an error' do + expect { prepare_async_index_from_sql }.not_to raise_error + end + end + + context 'when the async index creation is available' do + context 'when there is already an index with the given name' do + before do + connection.add_index(table_name, 'id', name: index_name) + end + + it 'does not create the async index record' do + expect { prepare_async_index_from_sql }.not_to change { index_model.where(name: index_name).count } + end + end + + context 'when there is no index with the given name' do + let(:async_index) { index_model.find_by(name: index_name) } + + it 'creates the async index record' do + expect { prepare_async_index_from_sql }.to change { index_model.where(name: index_name).count }.by(1) + end + + it 'sets the async index attributes correctly' do + prepare_async_index_from_sql + + expect(async_index).to have_attributes(table_name: table_name, definition: index_definition) + end + end + end + end + end + end + end + describe '#prepare_async_index_removal' do before do connection.create_table(table_name) diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb index 073a30e7839..d9b81a2be30 100644 --- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb @@ -378,41 +378,27 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d let(:attempts) { 0 } let(:batch_size) { 10 } let(:sub_batch_size) { 6 } - let(:feature_flag) { :reduce_sub_batch_size_on_timeouts } let(:job) do create(:batched_background_migration_job, attempts: attempts, batch_size: batch_size, sub_batch_size: sub_batch_size) end - where(:feature_flag_state, :within_boundaries, :outside_boundaries, :limit_reached) do - [ - [true, true, false, false], - [false, false, false, false] - ] - end - - with_them do - before do - stub_feature_flags(feature_flag => feature_flag_state) - end + context 'when the number of attempts is lower than the limit and batch size are within boundaries' do + let(:attempts) { 1 } - context 'when the number of attempts is lower than the limit and batch size are within boundaries' do - let(:attempts) { 1 } - - it { expect(job.can_reduce_sub_batch_size?).to be(within_boundaries) } - end + it { expect(job.can_reduce_sub_batch_size?).to be(true) } + end - context 'when the number of attempts is lower than the limit and batch size are outside boundaries' do - let(:batch_size) { 1 } + context 'when the number of attempts is lower than the limit and batch size are outside boundaries' do + let(:batch_size) { 1 } - it { expect(job.can_reduce_sub_batch_size?).to be(outside_boundaries) } - end + it { expect(job.can_reduce_sub_batch_size?).to be(false) } + end - context 'when the number of attempts is greater than the limit and batch size are within boundaries' do - let(:attempts) { 3 } + context 'when the number of attempts is greater than the limit and batch size are within boundaries' do + let(:attempts) { 3 } - it { expect(job.can_reduce_sub_batch_size?).to be(limit_reached) } - end + it { expect(job.can_reduce_sub_batch_size?).to be(false) } end end diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index d132559acea..546f9353808 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model do +RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :model, feature_category: :database do it_behaves_like 'having unique enum values' it { is_expected.to be_a Gitlab::Database::SharedModel } @@ -328,6 +328,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end end + describe '.finalizing' do + let!(:migration1) { create(:batched_background_migration, :active) } + let!(:migration2) { create(:batched_background_migration, :paused) } + let!(:migration3) { create(:batched_background_migration, :finalizing) } + let!(:migration4) { create(:batched_background_migration, :finished) } + + it 'returns only finalizing migrations' do + expect(described_class.finalizing).to contain_exactly(migration3) + end + end + describe '.successful_rows_counts' do let!(:migration1) { create(:batched_background_migration) } let!(:migration2) { create(:batched_background_migration) } diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb new file mode 100644 index 00000000000..d3102a105ea --- /dev/null +++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/patroni_apdex_spec.rb @@ -0,0 +1,148 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::PatroniApdex, :aggregate_failures, feature_category: :database do # rubocop:disable Layout/LineLength + let(:schema) { :main } + let(:connection) { Gitlab::Database.database_base_models[schema].connection } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end + + describe '#evaluate' do + let(:prometheus_url) { 'http://thanos:9090' } + let(:prometheus_config) { [prometheus_url, { allow_local_requests: true, verify: true }] } + + let(:prometheus_client) { instance_double(Gitlab::PrometheusClient) } + + let(:context) do + Gitlab::Database::BackgroundMigration::HealthStatus::Context + .new(connection, ['users'], gitlab_schema) + end + + let(:gitlab_schema) { "gitlab_#{schema}" } + let(:client_ready) { true } + let(:database_apdex_sli_query_main) { 'Apdex query for main' } + let(:database_apdex_sli_query_ci) { 'Apdex query for ci' } + let(:database_apdex_slo_main) { 0.99 } + let(:database_apdex_slo_ci) { 0.95 } + let(:database_apdex_settings) do + { + prometheus_api_url: prometheus_url, + apdex_sli_query: { + main: database_apdex_sli_query_main, + ci: database_apdex_sli_query_ci + }, + apdex_slo: { + main: database_apdex_slo_main, + ci: database_apdex_slo_ci + } + } + end + + subject(:evaluate) { described_class.new(context).evaluate } + + before do + stub_application_setting(database_apdex_settings: database_apdex_settings) + + allow(Gitlab::PrometheusClient).to receive(:new).with(*prometheus_config).and_return(prometheus_client) + allow(prometheus_client).to receive(:ready?).and_return(client_ready) + end + + shared_examples 'Patroni Apdex Evaluator' do |schema| + context "with #{schema} schema" do + let(:schema) { schema } + let(:apdex_slo_above_sli) { { main: 0.991, ci: 0.951 } } + let(:apdex_slo_below_sli) { { main: 0.989, ci: 0.949 } } + + it 'returns NoSignal signal in case the feature flag is disabled' do + stub_feature_flags(batched_migrations_health_status_patroni_apdex: false) + + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable) + expect(evaluate.reason).to include('indicator disabled') + end + + context 'without database_apdex_settings' do + let(:database_apdex_settings) { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Patroni Apdex Settings not configured') + end + end + + context 'when Prometheus client is not ready' do + let(:client_ready) { false } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Prometheus client is not ready') + end + end + + context 'when apdex SLI query is not configured' do + let(:"database_apdex_sli_query_#{schema}") { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Apdex SLI query is not configured') + end + end + + context 'when slo is not configured' do + let(:"database_apdex_slo_#{schema}") { nil } + + it 'returns Unknown signal' do + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Apdex SLO is not configured') + end + end + + it 'returns Normal signal when Patroni apdex SLI is above SLO' do + expect(prometheus_client).to receive(:query) + .with(send("database_apdex_sli_query_#{schema}")) + .and_return([{ "value" => [1662423310.878, apdex_slo_above_sli[schema]] }]) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal) + expect(evaluate.reason).to include('Patroni service apdex is above SLO') + end + + it 'returns Stop signal when Patroni apdex is below SLO' do + expect(prometheus_client).to receive(:query) + .with(send("database_apdex_sli_query_#{schema}")) + .and_return([{ "value" => [1662423310.878, apdex_slo_below_sli[schema]] }]) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop) + expect(evaluate.reason).to include('Patroni service apdex is below SLO') + end + + context 'when Patroni apdex can not be calculated' do + where(:result) do + [ + nil, + [], + [{}], + [{ 'value' => 1 }], + [{ 'value' => [1] }] + ] + end + + with_them do + it 'returns Unknown signal' do + expect(prometheus_client).to receive(:query).and_return(result) + expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown) + expect(evaluate.reason).to include('Patroni service apdex can not be calculated') + end + end + end + end + end + + Gitlab::Database.database_base_models.each do |database_base_model, connection| + next unless connection.present? + + it_behaves_like 'Patroni Apdex Evaluator', database_base_model.to_sym + end + end +end diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb index 8bc04d80fa1..e14440f1fb4 100644 --- a/spec/lib/gitlab/database/background_migration/health_status_spec.rb +++ b/spec/lib/gitlab/database/background_migration/health_status_spec.rb @@ -19,8 +19,10 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus } let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable } let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog } + let(:patroni_apdex_indicator_class) { health_status::Indicators::PatroniApdex } let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) } let(:wal_indicator) { instance_double(wal_indicator_class) } + let(:patroni_apdex_indicator) { instance_double(patroni_apdex_indicator_class) } before do allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator) @@ -36,8 +38,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal) expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator) expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal) + expect(patroni_apdex_indicator_class).to receive(:new).with(migration.health_context) + .and_return(patroni_apdex_indicator) + expect(patroni_apdex_indicator).to receive(:evaluate).and_return(not_available_signal) - expect(evaluate).to contain_exactly(normal_signal, not_available_signal) + expect(evaluate).to contain_exactly(normal_signal, not_available_signal, not_available_signal) end end diff --git a/spec/lib/gitlab/database/consistency_checker_spec.rb b/spec/lib/gitlab/database/consistency_checker_spec.rb index c0f0c349ddd..be03bd00619 100644 --- a/spec/lib/gitlab/database/consistency_checker_spec.rb +++ b/spec/lib/gitlab/database/consistency_checker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::ConsistencyChecker, feature_category: :pods do +RSpec.describe Gitlab::Database::ConsistencyChecker, feature_category: :cell do let(:batch_size) { 10 } let(:max_batches) { 4 } let(:max_runtime) { described_class::MAX_RUNTIME } diff --git a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb index 768855464c1..a57f02b22df 100644 --- a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb @@ -2,18 +2,13 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store do +RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store, feature_category: :shared do describe '.wrapper' do - it 'uses primary and then releases the connection and clears the session' do + it 'releases the connection and clears the session' do expect(Gitlab::Database::LoadBalancing).to receive(:release_hosts) expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session) - described_class.wrapper.call( - nil, - lambda do - expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to eq(true) - end - ) + described_class.wrapper.call(nil, lambda {}) end context 'with an exception' do diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb index 7eb20f77417..83fc614bde3 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb @@ -67,16 +67,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature let(:location) { '0/D525E3A8' } - context 'when feature flag is disabled' do - let(:expected_consistency) { :always } - - before do - stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) - end - - include_examples 'does not pass database locations' - end - context 'when write was not performed' do before do allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(false) @@ -106,7 +96,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware, feature expected_location = {} Gitlab::Database::LoadBalancing.each_load_balancer do |lb| - expect(lb).to receive(:host).and_return(nil) + expect(lb).to receive(:host).at_least(:once).and_return(nil) expect(lb).to receive(:primary_write_location).and_return(location) expected_location[lb.name] = location diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb index abf10456d0a..7ad0ddbca8e 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do +RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues, feature_category: :scalability do let(:middleware) { described_class.new } let(:worker) { worker_class.new } let(:location) { '0/D525E3A8' } @@ -15,6 +15,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ replication_lag!(false) Gitlab::Database::LoadBalancing::Session.clear_session + + stub_const("#{described_class.name}::REPLICA_WAIT_SLEEP_SECONDS", 0.0) end after do @@ -76,14 +78,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end shared_examples_for 'sticks based on data consistency' do - context 'when load_balancing_for_test_data_consistency_worker is disabled' do - before do - stub_feature_flags(load_balancing_for_test_data_consistency_worker: false) - end - - include_examples 'stick to the primary', 'primary' - end - context 'when database wal location is set' do let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'wal_locations' => wal_locations } } @@ -119,9 +113,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end end - shared_examples_for 'sleeps when necessary' do + shared_examples_for 'essential sleep' do context 'when WAL locations are blank', :freeze_time do - let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3) } } + let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", "wal_locations" => {}, "created_at" => Time.current.to_f - (described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2) } } it 'does not sleep' do expect(middleware).not_to receive(:sleep) @@ -134,7 +128,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } context 'when delay interval has not elapsed' do - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + let(:elapsed_time) { described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2 } context 'when replica is up to date' do before do @@ -158,30 +152,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ end it 'sleeps until the minimum delay is reached' do - expect(middleware).to receive(:sleep).with(be_within(0.01).of(described_class::MINIMUM_DELAY_INTERVAL_SECONDS - elapsed_time)) + expect(middleware).to receive(:sleep).with(described_class::REPLICA_WAIT_SLEEP_SECONDS) run_middleware end end - end - - context 'when delay interval has elapsed' do - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS + 0.3 } - - it 'does not sleep' do - expect(middleware).not_to receive(:sleep) - run_middleware - end - end - - context 'when created_at is in the future' do - let(:elapsed_time) { -5 } + context 'when replica is never not up to date' do + before do + Gitlab::Database::LoadBalancing.each_load_balancer do |lb| + allow(lb).to receive(:select_up_to_date_host).and_return(false, false) + end + end - it 'does not sleep' do - expect(middleware).not_to receive(:sleep) + it 'sleeps until the maximum delay is reached' do + expect(middleware).to receive(:sleep).exactly(3).times.with(described_class::REPLICA_WAIT_SLEEP_SECONDS) - run_middleware + run_middleware + end end end end @@ -200,7 +188,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ context 'when delay interval has not elapsed', :freeze_time do let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations, "created_at" => Time.current.to_f - elapsed_time } } - let(:elapsed_time) { described_class::MINIMUM_DELAY_INTERVAL_SECONDS - 0.3 } + let(:elapsed_time) { described_class::REPLICA_WAIT_SLEEP_SECONDS + 0.2 } it 'does not sleep' do expect(middleware).not_to receive(:sleep) @@ -214,7 +202,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_context 'data consistency worker class', :delayed, :load_balancing_for_test_data_consistency_worker include_examples 'sticks based on data consistency' - include_examples 'sleeps when necessary' + include_examples 'essential sleep' context 'when replica is not up to date' do before do @@ -263,7 +251,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ include_context 'data consistency worker class', :sticky, :load_balancing_for_test_data_consistency_worker include_examples 'sticks based on data consistency' - include_examples 'sleeps when necessary' + include_examples 'essential sleep' context 'when replica is not up to date' do before do diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb index 59e16e6ca8b..7196b4bc337 100644 --- a/spec/lib/gitlab/database/load_balancing_spec.rb +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection, feature_category: :pods do +RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection, feature_category: :cell do describe '.base_models' do it 'returns the models to apply load balancing to' do models = described_class.base_models @@ -497,14 +497,14 @@ RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validat where(:queries, :expected_role) do [ # Reload cache. The schema loading queries should be handled by - # primary. + # replica. [ -> { model.connection.clear_cache! model.connection.schema_cache.add('users') model.connection.pool.release_connection }, - :primary + :replica ], # Call model's connection method diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb index c06c463d918..2aa95372338 100644 --- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb +++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :pods do +RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :cell do let(:connection) { ApplicationRecord.connection } let(:test_table) { '_test_table' } let(:logger) { instance_double(Logger) } @@ -122,6 +122,13 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : } end + it 'returns result hash with action skipped' do + subject.lock_writes + + expect(subject.lock_writes).to eq({ action: "skipped", database: "main", dry_run: false, +table: test_table }) + end + context 'when running in dry_run mode' do let(:dry_run) { true } @@ -146,6 +153,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : connection.execute("truncate #{test_table}") end.not_to raise_error end + + it 'returns result hash with action locked' do + expect(subject.lock_writes).to eq({ action: "locked", database: "main", dry_run: dry_run, +table: test_table }) + end end end @@ -186,6 +198,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : subject.unlock_writes end + it 'returns result hash with action unlocked' do + expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run, +table: test_table }) + end + context 'when running in dry_run mode' do let(:dry_run) { true } @@ -206,6 +223,11 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: : connection.execute("delete from #{test_table}") end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/) end + + it 'returns result hash with dry_run true' do + expect(subject.unlock_writes).to eq({ action: "unlocked", database: "main", dry_run: dry_run, +table: test_table }) + end end end diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb index 3c2d9ca82f2..552df64096a 100644 --- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb +++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb @@ -85,31 +85,40 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do end end - describe '.definitions' do - subject(:definitions) { described_class.definitions } - - it 'contains at least all parent tables that have triggers' do - all_definition_parent_tables = definitions.map { |d| d.to_table }.to_set + context 'all tables have correct triggers installed' do + let(:all_tables_from_yaml) { described_class.definitions.pluck(:to_table).uniq } + let(:all_tables_with_triggers) do triggers_query = <<~SQL - SELECT event_object_table, trigger_name - FROM information_schema.triggers + SELECT event_object_table FROM information_schema.triggers WHERE trigger_name LIKE '%_loose_fk_trigger' - GROUP BY event_object_table, trigger_name SQL - all_triggers = ApplicationRecord.connection.execute(triggers_query) - - all_triggers.each do |trigger| - table = trigger['event_object_table'] - trigger_name = trigger['trigger_name'] - error_message = <<~END - Missing a loose foreign key definition for parent table: #{table} with trigger: #{trigger_name}. - Loose foreign key definitions must be added before triggers are added and triggers must be removed before removing the loose foreign key definition. - Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html ." - END - expect(all_definition_parent_tables).to include(table), error_message - end + ApplicationRecord.connection.execute(triggers_query) + .pluck('event_object_table').uniq + end + + it 'all YAML tables do have `track_record_deletions` installed' do + missing_trigger_tables = all_tables_from_yaml - all_tables_with_triggers + + expect(missing_trigger_tables).to be_empty, <<~END + The loose foreign keys definitions require using `track_record_deletions` + for the following tables: #{missing_trigger_tables}. + Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html." + END + end + + it 'no extra tables have `track_record_deletions` installed' do + extra_trigger_tables = all_tables_with_triggers - all_tables_from_yaml + + pending 'This result of this test is informatory, and not critical' if extra_trigger_tables.any? + + expect(extra_trigger_tables).to be_empty, <<~END + The following tables have unused `track_record_deletions` triggers installed, + but they are not referenced by any of the loose foreign key definitions: #{extra_trigger_tables}. + You can remove them in one of the future releases as part of `db/post_migrate`. + Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html." + END end end diff --git a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb index be9346e3829..090a9f53523 100644 --- a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, - :reestablished_active_record_base, :delete, query_analyzers: false, feature_category: :pods do + :reestablished_active_record_base, :delete, query_analyzers: false, feature_category: :cell do using RSpec::Parameterized::TableSyntax let(:schema_class) { Class.new(Gitlab::Database::Migration[2.1]) } @@ -86,7 +86,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) } before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end it 'does not lock any newly created tables' do @@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end let(:migration_class) { create_table_migration(table_name, skip_automatic_lock_on_writes) } @@ -238,7 +238,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when renaming a table' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) create_table_migration(old_table_name).migrate(:up) # create the table first before renaming it end @@ -277,7 +277,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:config_model) { Gitlab::Database.database_base_models[:main] } before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end it 'does not lock any newly created tables' do @@ -305,7 +305,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) migration_class.connection.execute("CREATE TABLE #{table_name}()") migration_class.migrate(:up) end diff --git a/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb index b1971977e7c..cee5f54bd6a 100644 --- a/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/convert_to_bigint_spec.rb @@ -7,9 +7,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers::ConvertToBigint, feature_cate using RSpec::Parameterized::TableSyntax where(:dot_com, :dev_or_test, :jh, :expectation) do - true | true | true | false + true | true | true | true true | false | true | false - false | true | true | false + false | true | true | true false | false | true | false true | true | false | true true | false | false | true diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb index 25fc676d09e..2b58cdff931 100644 --- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb @@ -7,20 +7,22 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do ActiveRecord::Migration.new.extend(described_class) end + let_it_be(:table_name) { :_test_loose_fk_test_table } + let(:model) do Class.new(ApplicationRecord) do - self.table_name = '_test_loose_fk_test_table' + self.table_name = :_test_loose_fk_test_table end end before(:all) do - migration.create_table :_test_loose_fk_test_table do |t| + migration.create_table table_name do |t| t.timestamps end end after(:all) do - migration.drop_table :_test_loose_fk_test_table + migration.drop_table table_name end before do @@ -33,11 +35,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do expect(LooseForeignKeys::DeletedRecord.count).to eq(0) end + + it { expect(migration.has_loose_foreign_key?(table_name)).to be_falsy } end context 'when the record deletion tracker trigger is installed' do before do - migration.track_record_deletions(:_test_loose_fk_test_table) + migration.track_record_deletions(table_name) end it 'stores the record deletion' do @@ -55,7 +59,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do .first expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id) - expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table') + expect(deleted_record.fully_qualified_table_name).to eq("public.#{table_name}") expect(deleted_record.partition_number).to eq(1) end @@ -64,5 +68,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do expect(LooseForeignKeys::DeletedRecord.count).to eq(3) end + + it { expect(migration.has_loose_foreign_key?(table_name)).to be_truthy } end end diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb index 714fbab5aff..faf0447c054 100644 --- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, - stub_feature_flags: false, feature_category: :pods do + stub_feature_flags: false, feature_category: :cell do let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) } # We keep only the GitlabSchemasValidateConnection analyzer running @@ -506,7 +506,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a def down; end end, query_matcher: /FROM ci_builds/, - setup: -> (_) { skip_if_multiple_databases_not_setup }, + setup: -> (_) { skip_if_shared_database(:ci) }, expected: { no_gitlab_schema: { main: :cross_schema_error, diff --git a/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb new file mode 100644 index 00000000000..eb67e81f677 --- /dev/null +++ b/spec/lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::MigrationHelpers::WraparoundVacuumHelpers, feature_category: :database do + include Database::DatabaseHelpers + + let(:table_name) { 'ci_builds' } + + describe '#check_if_wraparound_in_progress' do + let(:migration) do + ActiveRecord::Migration.new.extend(described_class) + end + + subject { migration.check_if_wraparound_in_progress(table_name) } + + it 'delegates to the wraparound class' do + expect(described_class::WraparoundCheck) + .to receive(:new) + .with(table_name, migration: migration) + .and_call_original + + expect { subject }.not_to raise_error + end + end + + describe described_class::WraparoundCheck do + let(:migration) do + ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers::WraparoundVacuumHelpers) + end + + describe '#execute' do + subject do + described_class.new(table_name, migration: migration).execute + end + + context 'with wraparound vacuuum running' do + before do + swapout_view_for_table(:pg_stat_activity, connection: migration.connection) + + migration.connection.execute(<<~SQL.squish) + INSERT INTO pg_stat_activity ( + datid, datname, pid, backend_start, xact_start, query_start, + state_change, wait_event_type, wait_event, state, backend_xmin, + query, backend_type) + VALUES ( + 16401, 'gitlabhq_dblab', 178, '2023-03-30 08:10:50.851322+00', + '2023-03-30 08:10:50.890485+00', now() - '150 minutes'::interval, + '2023-03-30 08:10:50.890485+00', 'IO', 'DataFileRead', 'active','3214790381'::xid, + 'autovacuum: VACUUM public.ci_builds (to prevent wraparound)', 'autovacuum worker') + SQL + end + + it 'outputs a message related to autovacuum' do + expect { subject } + .to output(/Autovacuum with wraparound prevention mode is running on `ci_builds`/).to_stdout + end + + it { expect { subject }.to output(/autovacuum: VACUUM public.ci_builds \(to prevent wraparound\)/).to_stdout } + it { expect { subject }.to output(/Current duration: 2 hours, 30 minutes/).to_stdout } + it { expect { subject }.to output(/Process id: 178/).to_stdout } + it { expect { subject }.to output(/`select pg_cancel_backend\(178\);`/).to_stdout } + + context 'when GITLAB_MIGRATIONS_DISABLE_WRAPAROUND_CHECK is set' do + before do + stub_env('GITLAB_MIGRATIONS_DISABLE_WRAPAROUND_CHECK' => 'true') + end + + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + + it 'is disabled on .com' do + expect(Gitlab).to receive(:com?).and_return(true) + + expect { subject }.not_to raise_error + end + end + + context 'when executed by self-managed' do + before do + allow(Gitlab).to receive(:com?).and_return(false) + allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) + end + + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + end + end + + context 'with wraparound vacuuum not running' do + it { expect { subject }.not_to output(/autovacuum/i).to_stdout } + end + + context 'when the table does not exist' do + let(:table_name) { :no_table } + + it { expect { subject }.to raise_error described_class::WraparoundError, /no_table/ } + end + end + end +end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 3f6528558b1..a3eab560c67 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -14,6 +14,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d allow(model).to receive(:puts) end + it { expect(model.singleton_class.ancestors).to include(described_class::WraparoundVacuumHelpers) } + describe 'overridden dynamic model helpers' do let(:test_table) { '_test_batching_table' } @@ -120,157 +122,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d end end - describe '#create_table_with_constraints' do - let(:table_name) { :test_table } - let(:column_attributes) do - [ - { name: 'id', sql_type: 'bigint', null: false, default: nil }, - { name: 'created_at', sql_type: 'timestamp with time zone', null: false, default: nil }, - { name: 'updated_at', sql_type: 'timestamp with time zone', null: false, default: nil }, - { name: 'some_id', sql_type: 'integer', null: false, default: nil }, - { name: 'active', sql_type: 'boolean', null: false, default: 'true' }, - { name: 'name', sql_type: 'text', null: true, default: nil } - ] - end - - before do - allow(model).to receive(:transaction_open?).and_return(true) - end - - context 'when no check constraints are defined' do - it 'creates the table as expected' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - end - - expect_table_columns_to_match(column_attributes, table_name) - end - end - - context 'when check constraints are defined' do - context 'when the text_limit is explicity named' do - it 'creates the table as expected' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255, name: 'check_name_length' - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_name_length', 'char_length(name) <= 255') - expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') - end - end - - context 'when the text_limit is not named' do - it 'creates the table as expected, naming the text limit' do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') - expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') - end - end - - it 'runs the change within a with_lock_retries' do - expect(model).to receive(:with_lock_retries).ordered.and_yield - expect(model).to receive(:create_table).ordered.and_call_original - expect(model).to receive(:execute).with(<<~SQL).ordered - ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) - SQL - - model.create_table_with_constraints table_name do |t| - t.text :name - t.text_limit :name, 255 - end - end - - context 'when with_lock_retries re-runs the block' do - it 'only creates constraint for unique definitions' do - expected_sql = <<~SQL - ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) - SQL - - expect(model).to receive(:create_table).twice.and_call_original - - expect(model).to receive(:execute).with(expected_sql).and_raise(ActiveRecord::LockWaitTimeout) - expect(model).to receive(:execute).with(expected_sql).and_call_original - - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - end - - expect_table_columns_to_match(column_attributes, table_name) - - expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') - end - end - - context 'when constraints are given invalid names' do - let(:expected_max_length) { described_class::MAX_IDENTIFIER_NAME_LENGTH } - let(:expected_error_message) { "The maximum allowed constraint name is #{expected_max_length} characters" } - - context 'when the explicit text limit name is not valid' do - it 'raises an error' do - too_long_length = expected_max_length + 1 - - expect do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255, name: ('a' * too_long_length) - t.check_constraint :some_id_is_positive, 'some_id > 0' - end - end.to raise_error(expected_error_message) - end - end - - context 'when a check constraint name is not valid' do - it 'raises an error' do - too_long_length = expected_max_length + 1 - - expect do - model.create_table_with_constraints table_name do |t| - t.timestamps_with_timezone - t.integer :some_id, null: false - t.boolean :active, null: false, default: true - t.text :name - - t.text_limit :name, 255 - t.check_constraint ('a' * too_long_length), 'some_id > 0' - end - end.to raise_error(expected_error_message) - end - end - end - end - end - describe '#add_concurrent_index' do context 'outside a transaction' do before do @@ -1199,6 +1050,38 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d it_behaves_like 'foreign key checks' end + context 'if the schema cache does not include the constrained_columns column' do + let(:target_table) { nil } + + around do |ex| + model.transaction do + require_migration!('add_columns_to_postgres_foreign_keys') + AddColumnsToPostgresForeignKeys.new.down + Gitlab::Database::PostgresForeignKey.reset_column_information + Gitlab::Database::PostgresForeignKey.columns_hash # Force populate the column hash in the old schema + AddColumnsToPostgresForeignKeys.new.up + + # Rolling back reverts the schema cache information, so we need to run the example here before the rollback. + ex.run + + raise ActiveRecord::Rollback + end + + # make sure that we're resetting the schema cache here so that we don't leak the change to other tests. + Gitlab::Database::PostgresForeignKey.reset_column_information + # Double-check that the column information is back to normal + expect(Gitlab::Database::PostgresForeignKey.columns_hash.keys).to include('constrained_columns') + end + + # This test verifies that the situation we're trying to set up for the shared examples is actually being + # set up correctly + it 'correctly sets up the test without the column in the columns_hash' do + expect(Gitlab::Database::PostgresForeignKey.columns_hash.keys).not_to include('constrained_columns') + end + + it_behaves_like 'foreign key checks' + end + it 'compares by target table if no column given' do expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey end diff --git a/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb new file mode 100644 index 00000000000..515f59345ee --- /dev/null +++ b/spec/lib/gitlab/database/migrations/pg_backend_pid_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::PgBackendPid, feature_category: :database do + describe Gitlab::Database::Migrations::PgBackendPid::MigratorPgBackendPid do + let(:klass) do + Class.new do + def with_advisory_lock_connection + yield :conn + end + end + end + + it 're-yields with same arguments and wraps it with calls to .say' do + patched_instance = klass.prepend(described_class).new + expect(Gitlab::Database::Migrations::PgBackendPid).to receive(:say).twice + + expect { |b| patched_instance.with_advisory_lock_connection(&b) }.to yield_with_args(:conn) + end + end + + describe '.patch!' do + it 'patches ActiveRecord::Migrator' do + expect(ActiveRecord::Migrator).to receive(:prepend).with(described_class::MigratorPgBackendPid) + + described_class.patch! + end + end + + describe '.say' do + it 'outputs the connection information' do + conn = ActiveRecord::Base.connection + + expect(conn).to receive(:object_id).and_return(9876) + expect(conn).to receive(:select_value).with('SELECT pg_backend_pid()').and_return(12345) + expect(Gitlab::Database).to receive(:db_config_name).with(conn).and_return('main') + + expected_output = "main: == [advisory_lock_connection] object_id: 9876, pg_backend_pid: 12345\n" + + expect { described_class.say(conn) }.to output(expected_output).to_stdout + end + end +end diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb index 66eb5a5de51..7c71076e8f3 100644 --- a/spec/lib/gitlab/database/migrations/runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/runner_spec.rb @@ -65,7 +65,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_recor end before do - skip_if_multiple_databases_not_setup unless database == :main + skip_if_shared_database(database) stub_const('Gitlab::Database::Migrations::Runner::BASE_RESULT_DIR', base_result_dir) allow(ActiveRecord::Migrator).to receive(:new) do |dir, _all_migrations, _schema_migration_class, version_to_migrate| diff --git a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb index cd3a94f5737..f4b13033270 100644 --- a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb +++ b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb @@ -2,11 +2,15 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition do +RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition, feature_category: :database do include Gitlab::Database::DynamicModelHelpers include Database::TableSchemaHelpers - let(:migration_context) { Gitlab::Database::Migration[2.0].new } + let(:migration_context) do + Gitlab::Database::Migration[2.0].new.tap do |migration| + migration.extend Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers + end + end let(:connection) { migration_context.connection } let(:table_name) { '_test_table_to_partition' } @@ -73,7 +77,9 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition end describe "#prepare_for_partitioning" do - subject(:prepare) { converter.prepare_for_partitioning } + subject(:prepare) { converter.prepare_for_partitioning(async: async) } + + let(:async) { false } it 'adds a check constraint' do expect { prepare }.to change { @@ -83,9 +89,100 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition .count }.from(0).to(1) end + + context 'when it fails to add constraint' do + before do + allow(migration_context).to receive(:add_check_constraint) + end + + it 'raises UnableToPartition error' do + expect { prepare } + .to raise_error(described_class::UnableToPartition) + .and change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.by(0) + end + end + + context 'when async' do + let(:async) { true } + + it 'adds a NOT VALID check constraint' do + expect { prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.from(0).to(1) + + constraint = + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .last + + expect(constraint.definition).to end_with('NOT VALID') + end + + it 'adds a PostgresAsyncConstraintValidation record' do + expect { prepare }.to change { + Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count + }.from(0).to(1) + + record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.last + expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME + expect(record).to be_check_constraint + end + + context 'when constraint exists but is not valid' do + before do + converter.prepare_for_partitioning(async: true) + end + + it 'validates the check constraint' do + expect { prepare }.to change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier).first.constraint_valid? + }.from(false).to(true) + end + + context 'when it fails to validate constraint' do + before do + allow(migration_context).to receive(:validate_check_constraint) + end + + it 'raises UnableToPartition error' do + expect { prepare } + .to raise_error(described_class::UnableToPartition, + starting_with('Error validating partitioning constraint')) + .and change { + Gitlab::Database::PostgresConstraint + .check_constraints + .by_table_identifier(table_identifier) + .count + }.by(0) + end + end + end + + context 'when constraint exists and is valid' do + before do + converter.prepare_for_partitioning(async: false) + end + + it 'raises UnableToPartition error' do + expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do')) + prepare + end + end + end end - describe '#revert_prepare_for_partitioning' do + describe '#revert_preparation_for_partitioning' do before do converter.prepare_for_partitioning end @@ -102,11 +199,13 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition end end - describe "#convert_to_zero_partition" do + describe "#partition" do subject(:partition) { converter.partition } + let(:async) { false } + before do - converter.prepare_for_partitioning + converter.prepare_for_partitioning(async: async) end context 'when the primary key is incorrect' do @@ -130,7 +229,15 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition end it 'throws a reasonable error message' do - expect { partition }.to raise_error(described_class::UnableToPartition, /constraint /) + expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./) + end + end + + context 'when supporting check constraint is not valid' do + let(:async) { true } + + it 'throws a reasonable error message' do + expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./) end end @@ -203,7 +310,7 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition proc do allow(migration_context.connection).to receive(:add_foreign_key).and_call_original expect(migration_context.connection).to receive(:add_foreign_key).with(from_table, to_table, any_args) - .and_wrap_original(&fail_first_time) + .and_wrap_original(&fail_first_time) end end @@ -231,9 +338,24 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition end end end + + context 'when table has LFK triggers' do + before do + migration_context.track_record_deletions(table_name) + end + + it 'moves the trigger on the parent table', :aggregate_failures do + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + + expect { partition }.not_to raise_error + + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy + end + end end - describe '#revert_conversion_to_zero_partition' do + describe '#revert_partitioning' do before do converter.prepare_for_partitioning converter.partition @@ -269,5 +391,21 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }.from(0) .and change { converter.send(:sequences_owned_by, parent_table_name).count }.to(0) end + + context 'when table has LFK triggers' do + before do + migration_context.track_record_deletions(parent_table_name) + migration_context.track_record_deletions(table_name) + end + + it 'restores the trigger on the partition', :aggregate_failures do + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy + + expect { revert_conversion }.not_to raise_error + + expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy + end + end end end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb index 1885e84ac4c..fc279051800 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb @@ -54,6 +54,11 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition allow(backfill_job).to receive(:sleep) end + after do + connection.drop_table source_table + connection.drop_table destination_table + end + let(:source_model) { Class.new(ActiveRecord::Base) } let(:destination_model) { Class.new(ActiveRecord::Base) } let(:timestamp) { Time.utc(2020, 1, 2).round } @@ -82,7 +87,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition end it 'breaks the assigned batch into smaller batches' do - expect_next_instance_of(described_class::BulkCopy) do |bulk_copy| + expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BulkCopy) do |bulk_copy| expect(bulk_copy).to receive(:copy_between).with(source1.id, source2.id) expect(bulk_copy).to receive(:copy_between).with(source3.id, source3.id) end diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb index e76b1da3834..d87ef7a0953 100644 --- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb +++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb @@ -2,10 +2,11 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do +RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers, feature_category: :database do include Database::PartitioningHelpers include Database::TriggerHelpers include Database::TableSchemaHelpers + include MigrationsHelpers let(:migration) do ActiveRecord::Migration.new.extend(described_class) @@ -98,7 +99,8 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe migration.prepare_constraint_for_list_partitioning(table_name: source_table, partitioning_column: partition_column, parent_table_name: partitioned_table, - initial_partitioning_value: min_date) + initial_partitioning_value: min_date, + async: false) end end end @@ -484,17 +486,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe end context 'when records exist in the source table' do - let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' } + let(:migration_class) { described_class::MIGRATION } let(:sub_batch_size) { described_class::SUB_BATCH_SIZE } - let(:pause_seconds) { described_class::PAUSE_SECONDS } let!(:first_id) { source_model.create!(name: 'Bob', age: 20).id } let!(:second_id) { source_model.create!(name: 'Alice', age: 30).id } let!(:third_id) { source_model.create!(name: 'Sam', age: 40).id } before do stub_const("#{described_class.name}::BATCH_SIZE", 2) - - expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original + stub_const("#{described_class.name}::SUB_BATCH_SIZE", 1) end it 'enqueues jobs to copy each batch of data' do @@ -503,13 +503,13 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe Sidekiq::Testing.fake! do migration.enqueue_partitioning_data_migration source_table - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - - first_job_arguments = [first_id, second_id, source_table.to_s, partitioned_table, 'id'] - expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments]) - - second_job_arguments = [third_id, third_id, source_table.to_s, partitioned_table, 'id'] - expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments]) + expect(migration_class).to have_scheduled_batched_migration( + table_name: source_table, + column_name: :id, + job_arguments: [partitioned_table], + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) end end end diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb index 4c0fde46b2f..4aa9d5f6df0 100644 --- a/spec/lib/gitlab/database/partitioning_spec.rb +++ b/spec/lib/gitlab/database/partitioning_spec.rb @@ -2,11 +2,11 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Partitioning do +RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do include Database::PartitioningHelpers include Database::TableSchemaHelpers - let(:connection) { ApplicationRecord.connection } + let(:main_connection) { ApplicationRecord.connection } around do |example| previously_registered_models = described_class.registered_models.dup @@ -84,7 +84,7 @@ RSpec.describe Gitlab::Database::Partitioning do before do table_names.each do |table_name| - connection.execute(<<~SQL) + execute_on_each_database(<<~SQL) CREATE TABLE #{table_name} ( id serial not null, created_at timestamptz not null, @@ -101,32 +101,12 @@ RSpec.describe Gitlab::Database::Partitioning do end context 'with multiple databases' do - before do - table_names.each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") - - ci_connection.execute(<<~SQL) - CREATE TABLE #{table_name} ( - id serial not null, - created_at timestamptz not null, - PRIMARY KEY (id, created_at)) - PARTITION BY RANGE (created_at); - SQL - end - end - - after do - table_names.each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") - end - end - it 'creates partitions in each database' do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) expect { described_class.sync_partitions(models) } - .to change { find_partitions(table_names.first, conn: connection).size }.from(0) - .and change { find_partitions(table_names.last, conn: connection).size }.from(0) + .to change { find_partitions(table_names.first, conn: main_connection).size }.from(0) + .and change { find_partitions(table_names.last, conn: main_connection).size }.from(0) .and change { find_partitions(table_names.first, conn: ci_connection).size }.from(0) .and change { find_partitions(table_names.last, conn: ci_connection).size }.from(0) end @@ -161,10 +141,12 @@ RSpec.describe Gitlab::Database::Partitioning do end before do + skip_if_shared_database(:ci) + (table_names + ['partitioning_test3']).each do |table_name| - ci_connection.execute("DROP TABLE IF EXISTS #{table_name}") + execute_on_each_database("DROP TABLE IF EXISTS #{table_name}") - ci_connection.execute(<<~SQL) + execute_on_each_database(<<~SQL) CREATE TABLE #{table_name} ( id serial not null, created_at timestamptz not null, @@ -181,14 +163,12 @@ RSpec.describe Gitlab::Database::Partitioning do end it 'manages partitions for models for the given database', :aggregate_failures do - skip_if_multiple_databases_not_setup(:ci) - expect { described_class.sync_partitions([models.first, ci_model], only_on: 'ci') } .to change { find_partitions(ci_model.table_name, conn: ci_connection).size }.from(0) - expect(find_partitions(models.first.table_name).size).to eq(0) + expect(find_partitions(models.first.table_name, conn: main_connection).size).to eq(0) expect(find_partitions(models.first.table_name, conn: ci_connection).size).to eq(0) - expect(find_partitions(ci_model.table_name).size).to eq(0) + expect(find_partitions(ci_model.table_name, conn: main_connection).size).to eq(0) end end end diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb index c128c56c708..03343c134ae 100644 --- a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb +++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb @@ -203,7 +203,7 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end - context 'when supporting foreign keys to inherited tables' do + context 'when supporting foreign keys on partitioned tables' do before do ApplicationRecord.connection.execute(<<~SQL) create table #{schema_table_name('parent')} ( @@ -246,6 +246,40 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end + context 'with two tables both partitioned' do + before do + ApplicationRecord.connection.execute(<<~SQL) + create table #{table_name('parent')} ( + id bigserial primary key not null + ) partition by hash(id); + + create table #{table_name('child')} + partition of #{table_name('parent')} for values with (remainder 1, modulus 2); + + create table #{table_name('ref_parent')} ( + id bigserial primary key not null + ) partition by hash(id); + + create table #{table_name('ref_child_1')} + partition of #{table_name('ref_parent')} for values with (remainder 1, modulus 3); + + create table #{table_name('ref_child_2')} + partition of #{table_name('ref_parent')} for values with (remainder 2, modulus 3); + + alter table #{table_name('parent')} add constraint fk foreign key (id) references #{table_name('ref_parent')} (id); + SQL + end + + describe '#child_foreign_keys' do + it 'is the child foreign keys of the partitioned parent fk' do + fk = described_class.by_constrained_table_name(table_name('parent')).first + children = fk.child_foreign_keys + expect(children.count).to eq(1) + expect(children.first.constrained_table_name).to eq(table_name('child')) + end + end + end + def schema_table_name(name) "public.#{table_name(name)}" end diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb index d31be6cb883..ed05d1ce169 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection, query_analyzers: false, - feature_category: :pods do + feature_category: :cell do let(:analyzer) { described_class } # We keep only the GitlabSchemasValidateConnection analyzer running @@ -28,19 +28,19 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ApplicationRecord, sql: "SELECT 1 FROM projects LEFT JOIN ci_builds ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["projects", "ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => { model: ApplicationRecord, sql: "SELECT 1 FROM ci_builds LEFT JOIN projects ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["ci_builds", "projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing main table from CI database" => { model: Ci::ApplicationRecord, sql: "SELECT 1 FROM projects", expect_error: /The query tried to access \["projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing CI database" => { model: Ci::ApplicationRecord, @@ -51,13 +51,13 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ::ApplicationRecord, sql: "SELECT 1 FROM ci_builds", expect_error: /The query tried to access \["ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } }, "for query accessing unknown gitlab_schema" => { model: ::ApplicationRecord, sql: "SELECT 1 FROM new_table", expect_error: /The query tried to access \["new_table"\] \(of undefined_new_table\)/, - setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } + setup: -> (_) { skip_if_shared_database(:ci) } } } end @@ -77,7 +77,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection context "when analyzer is enabled for tests", :query_analyzers do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end it "throws an error when trying to access a table that belongs to the gitlab_main schema from the ci database" do diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb index a4322689bf9..887dd7c9838 100644 --- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification, query_analyzers: false, - feature_category: :pods do + feature_category: :cell do let_it_be(:pipeline, refind: true) { create(:ci_pipeline) } let_it_be(:project, refind: true) { create(:project) } @@ -118,6 +118,18 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio end end + context 'when ci_pipelines are ignored for cross modification' do + it 'does not raise error' do + Project.transaction do + expect do + described_class.temporary_ignore_tables_in_transaction(%w[ci_pipelines], url: 'TODO') do + run_queries + end + end.not_to raise_error + end + end + end + context 'when data modification happens in nested transactions' do it 'raises error' do Project.transaction(requires_new: true) do diff --git a/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb new file mode 100644 index 00000000000..13c4bc0b054 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/adapters/column_database_adapter_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ColumnDatabaseAdapter, feature_category: :database do + subject(:adapter) { described_class.new(db_result) } + + let(:column_name) { 'email' } + let(:column_default) { "'no-reply@gitlab.com'::character varying" } + let(:not_null) { true } + let(:db_result) do + { + 'table_name' => 'projects', + 'column_name' => column_name, + 'data_type' => 'character varying', + 'column_default' => column_default, + 'not_null' => not_null + } + end + + describe '#name' do + it { expect(adapter.name).to eq('email') } + end + + describe '#table_name' do + it { expect(adapter.table_name).to eq('projects') } + end + + describe '#data_type' do + it { expect(adapter.data_type).to eq('character varying') } + end + + describe '#default' do + context "when there's no default value in the column" do + let(:column_default) { nil } + + it { expect(adapter.default).to be_nil } + end + + context 'when the column name is id' do + let(:column_name) { 'id' } + + it { expect(adapter.default).to be_nil } + end + + context 'when the column default includes nextval' do + let(:column_default) { "nextval('my_seq'::regclass)" } + + it { expect(adapter.default).to be_nil } + end + + it { expect(adapter.default).to eq("DEFAULT 'no-reply@gitlab.com'::character varying") } + end + + describe '#nullable' do + context 'when column is not null' do + it { expect(adapter.nullable).to eq('NOT NULL') } + end + + context 'when column is nullable' do + let(:not_null) { false } + + it { expect(adapter.nullable).to be_nil } + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb b/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb new file mode 100644 index 00000000000..d7e5c6e896e --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/adapters/column_structure_sql_adapter_spec.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Adapters::ColumnStructureSqlAdapter, feature_category: :database do + subject(:adapter) { described_class.new(table_name, column_def) } + + let(:table_name) { 'my_table' } + let(:file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:table_stmts) { PgQuery.parse(File.read(file_path)).tree.stmts.filter_map { |s| s.stmt.create_stmt } } + let(:column_stmts) { table_stmts.find { |table| table.relation.relname == 'test_table' }.table_elts } + let(:column_def) { column_stmts.find { |col| col.column_def.colname == column_name }.column_def } + + where(:column_name, :data_type, :default_value, :nullable) do + [ + ['id', 'bigint', nil, 'NOT NULL'], + ['integer_column', 'integer', nil, nil], + ['integer_with_default_column', 'integer', 'DEFAULT 1', nil], + ['smallint_with_default_column', 'smallint', 'DEFAULT 0', 'NOT NULL'], + ['double_precision_with_default_column', 'double precision', 'DEFAULT 1.0', nil], + ['numeric_with_default_column', 'numeric', 'DEFAULT 1.0', 'NOT NULL'], + ['boolean_with_default_colum', 'boolean', 'DEFAULT true', 'NOT NULL'], + ['varying_with_default_column', 'character varying', "DEFAULT 'DEFAULT'::character varying", 'NOT NULL'], + ['varying_with_limit_and_default_column', 'character varying(255)', "DEFAULT 'DEFAULT'::character varying", nil], + ['text_with_default_column', 'text', "DEFAULT ''::text", 'NOT NULL'], + ['array_with_default_column', 'character varying(255)[]', "DEFAULT '{one,two}'::character varying[]", 'NOT NULL'], + ['jsonb_with_default_column', 'jsonb', "DEFAULT '[]'::jsonb", 'NOT NULL'], + ['timestamptz_with_default_column', 'timestamp(6) with time zone', "DEFAULT now()", nil], + ['timestamp_with_default_column', 'timestamp(6) without time zone', + "DEFAULT '2022-01-23 00:00:00+00'::timestamp without time zone", 'NOT NULL'], + ['date_with_default_column', 'date', 'DEFAULT 2023-04-05', nil], + ['inet_with_default_column', 'inet', "DEFAULT '0.0.0.0'::inet", 'NOT NULL'], + ['macaddr_with_default_column', 'macaddr', "DEFAULT '00-00-00-00-00-000'::macaddr", 'NOT NULL'], + ['uuid_with_default_column', 'uuid', "DEFAULT '00000000-0000-0000-0000-000000000000'::uuid", 'NOT NULL'], + ['bytea_with_default_column', 'bytea', "DEFAULT '\\xDEADBEEF'::bytea", nil] + ] + end + + with_them do + describe '#name' do + it { expect(adapter.name).to eq(column_name) } + end + + describe '#table_name' do + it { expect(adapter.table_name).to eq(table_name) } + end + + describe '#data_type' do + it { expect(adapter.data_type).to eq(data_type) } + end + + describe '#nullable' do + it { expect(adapter.nullable).to eq(nullable) } + end + + describe '#default' do + it { expect(adapter.default).to eq(default_value) } + end + end + + context 'when the data type is not mapped' do + let(:column_name) { 'unmapped_column_type' } + let(:error_class) { Gitlab::Database::SchemaValidation::Adapters::UndefinedPGType } + + describe '#data_type' do + it { expect { adapter.data_type }.to raise_error(error_class) } + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/database_spec.rb b/spec/lib/gitlab/database/schema_validation/database_spec.rb index eadaf683a29..8fd98382625 100644 --- a/spec/lib/gitlab/database/schema_validation/database_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/database_spec.rb @@ -2,109 +2,90 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do +RSpec.shared_examples 'database schema assertions for' do |fetch_by_name_method, exists_method, all_objects_method| subject(:database) { described_class.new(connection) } let(:database_model) { Gitlab::Database.database_base_models['main'] } let(:connection) { database_model.connection } - context 'when having indexes' do - let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index } - let(:results) do - [['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']] - end + before do + allow(connection).to receive(:select_rows).and_return(results) + allow(connection).to receive(:exec_query).and_return(results) + end - before do - allow(connection).to receive(:select_rows).and_return(results) + describe "##{fetch_by_name_method}" do + it 'returns nil when schema object does not exists' do + expect(database.public_send(fetch_by_name_method, 'invalid-object-name')).to be_nil end - describe '#fetch_index_by_name' do - context 'when index does not exist' do - it 'returns nil' do - index = database.fetch_index_by_name('non_existing_index') - - expect(index).to be_nil - end - end - - it 'returns index by name' do - index = database.fetch_index_by_name('index') - - expect(index.name).to eq('index') - end + it 'returns the schema object by name' do + expect(database.public_send(fetch_by_name_method, valid_schema_object_name).name).to eq(valid_schema_object_name) end + end - describe '#index_exists?' do - context 'when index exists' do - it 'returns true' do - index_exists = database.index_exists?('index') + describe "##{exists_method}" do + it 'returns true when schema object exists' do + expect(database.public_send(exists_method, valid_schema_object_name)).to be_truthy + end - expect(index_exists).to be_truthy - end - end + it 'returns false when schema object does not exists' do + expect(database.public_send(exists_method, 'invalid-object')).to be_falsey + end + end - context 'when index does not exist' do - it 'returns false' do - index_exists = database.index_exists?('non_existing_index') + describe "##{all_objects_method}" do + it 'returns all the schema objects' do + schema_objects = database.public_send(all_objects_method) - expect(index_exists).to be_falsey - end - end + expect(schema_objects).to all(be_a(schema_object)) + expect(schema_objects.map(&:name)).to eq([valid_schema_object_name]) end + end +end - describe '#indexes' do - it 'returns indexes' do - indexes = database.indexes - - expect(indexes).to all(be_a(schema_object)) - expect(indexes.map(&:name)).to eq(['index']) - end +RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do + context 'when having indexes' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index } + let(:valid_schema_object_name) { 'index' } + let(:results) do + [['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']] end + + include_examples 'database schema assertions for', 'fetch_index_by_name', 'index_exists?', 'indexes' end context 'when having triggers' do let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Trigger } + let(:valid_schema_object_name) { 'my_trigger' } let(:results) do - { 'my_trigger' => 'CREATE TRIGGER my_trigger BEFORE INSERT ON todos FOR EACH ROW EXECUTE FUNCTION trigger()' } + [['my_trigger', 'CREATE TRIGGER my_trigger BEFORE INSERT ON todos FOR EACH ROW EXECUTE FUNCTION trigger()']] end - before do - allow(database).to receive(:fetch_triggers).and_return(results) - end - - describe '#fetch_trigger_by_name' do - context 'when trigger does not exist' do - it 'returns nil' do - expect(database.fetch_trigger_by_name('non_existing_trigger')).to be_nil - end - end - - it 'returns trigger by name' do - expect(database.fetch_trigger_by_name('my_trigger').name).to eq('my_trigger') - end - end + include_examples 'database schema assertions for', 'fetch_trigger_by_name', 'trigger_exists?', 'triggers' + end - describe '#trigger_exists?' do - context 'when trigger exists' do - it 'returns true' do - expect(database.trigger_exists?('my_trigger')).to be_truthy - end - end - - context 'when trigger does not exist' do - it 'returns false' do - expect(database.trigger_exists?('non_existing_trigger')).to be_falsey - end - end + context 'when having tables' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Table } + let(:valid_schema_object_name) { 'my_table' } + let(:results) do + [ + { + 'table_name' => 'my_table', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'bigint', + 'column_default' => "nextval('audit_events_id_seq'::regclass)" + }, + { + 'table_name' => 'my_table', + 'column_name' => 'details', + 'not_null' => false, + 'data_type' => 'text', + 'column_default' => nil + } + ] end - describe '#triggers' do - it 'returns triggers' do - triggers = database.triggers - - expect(triggers).to all(be_a(schema_object)) - expect(triggers.map(&:name)).to eq(['my_trigger']) - end - end + include_examples 'database schema assertions for', 'fetch_table_by_name', 'table_exists?', 'tables' end end diff --git a/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb new file mode 100644 index 00000000000..cb3df75b3fb --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/inconsistency_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Inconsistency, feature_category: :database do + let(:validator) { Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes } + + let(:database_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + let(:structure_sql_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (id)' } + + let(:structure_stmt) { PgQuery.parse(structure_sql_statement).tree.stmts.first.stmt.index_stmt } + let(:database_stmt) { PgQuery.parse(database_statement).tree.stmts.first.stmt.index_stmt } + + let(:structure_sql_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(structure_stmt) } + let(:database_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(database_stmt) } + + subject(:inconsistency) { described_class.new(validator, structure_sql_object, database_object) } + + describe '#object_name' do + it 'returns the index name' do + expect(inconsistency.object_name).to eq('index_name') + end + end + + describe '#diff' do + it 'returns a diff between the structure.sql and the database' do + expect(inconsistency.diff).to be_a(Diffy::Diff) + expect(inconsistency.diff.string1).to eq("#{structure_sql_statement}\n") + expect(inconsistency.diff.string2).to eq("#{database_statement}\n") + end + end + + describe '#error_message' do + it 'returns the error message' do + stub_const "#{validator}::ERROR_MESSAGE", 'error message %s' + + expect(inconsistency.error_message).to eq('error message index_name') + end + end + + describe '#type' do + it 'returns the type of the validator' do + expect(inconsistency.type).to eq('different_definition_indexes') + end + end + + describe '#table_name' do + it 'returns the table name' do + expect(inconsistency.table_name).to eq('achievements') + end + end + + describe '#inspect' do + let(:expected_output) do + <<~MSG + ------------------------------------------------------ + The index_name index has a different statement between structure.sql and database + Diff: + \e[31m-CREATE INDEX index_name ON public.achievements USING btree (id)\e[0m + \e[32m+CREATE INDEX index_name ON public.achievements USING btree (namespace_id)\e[0m + + ------------------------------------------------------ + MSG + end + + it 'prints the inconsistency message' do + expect(inconsistency.inspect).to eql(expected_output) + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/runner_spec.rb b/spec/lib/gitlab/database/schema_validation/runner_spec.rb index ddbdedcd8b4..f5d1c6ba31b 100644 --- a/spec/lib/gitlab/database/schema_validation/runner_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/runner_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Gitlab::Database::SchemaValidation::Runner, feature_category: :da subject(:inconsistencies) { described_class.new(structure_sql, database, validators: validators).execute } let(:class_name) { 'Gitlab::Database::SchemaValidation::Validators::ExtraIndexes' } - let(:inconsistency_class_name) { 'Gitlab::Database::SchemaValidation::Validators::BaseValidator::Inconsistency' } + let(:inconsistency_class_name) { 'Gitlab::Database::SchemaValidation::Inconsistency' } let(:extra_indexes) { class_double(class_name) } let(:instace_extra_index) { instance_double(class_name, execute: [inconsistency]) } diff --git a/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb new file mode 100644 index 00000000000..7d6a279def9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_inconsistency_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaInconsistency, type: :model, feature_category: :database do + it { is_expected.to be_a ApplicationRecord } + + describe 'associations' do + it { is_expected.to belong_to(:issue) } + end + + describe "Validations" do + it { is_expected.to validate_presence_of(:object_name) } + it { is_expected.to validate_presence_of(:valitador_name) } + it { is_expected.to validate_presence_of(:table_name) } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb new file mode 100644 index 00000000000..74bc5f43b50 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/column_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Column, feature_category: :database do + subject(:column) { described_class.new(adapter) } + + let(:database_adapter) { 'Gitlab::Database::SchemaValidation::Adapters::ColumnDatabaseAdapter' } + let(:adapter) do + instance_double(database_adapter, name: 'id', table_name: 'projects', + data_type: 'bigint', default: nil, nullable: 'NOT NULL') + end + + describe '#name' do + it { expect(column.name).to eq('id') } + end + + describe '#table_name' do + it { expect(column.table_name).to eq('projects') } + end + + describe '#statement' do + it { expect(column.statement).to eq('id bigint NOT NULL') } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb index 1aaa994e3bb..43d8fa38ec8 100644 --- a/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/index_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Index, feature_category: :database do let(:statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } let(:name) { 'index_name' } + let(:table_name) { 'achievements' } include_examples 'schema objects assertions for', 'index_stmt' end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb new file mode 100644 index 00000000000..6c2efee056b --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/table_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Table, feature_category: :database do + subject(:table) { described_class.new(name, columns) } + + let(:name) { 'my_table' } + let(:column_class) { 'Gitlab::Database::SchemaValidation::SchemaObjects::Column' } + let(:columns) do + [ + instance_double(column_class, name: 'id', statement: 'id bigint NOT NULL'), + instance_double(column_class, name: 'col', statement: 'col text') + ] + end + + describe '#name' do + it { expect(table.name).to eq('my_table') } + end + + describe '#table_name' do + it { expect(table.table_name).to eq('my_table') } + end + + describe '#statement' do + it { expect(table.statement).to eq('CREATE TABLE my_table (id bigint NOT NULL, col text)') } + end + + describe '#fetch_column_by_name' do + it { expect(table.fetch_column_by_name('col')).not_to be_nil } + + it { expect(table.fetch_column_by_name('invalid')).to be_nil } + end + + describe '#column_exists?' do + it { expect(table.column_exists?('col')).to eq(true) } + + it { expect(table.column_exists?('invalid')).to eq(false) } + end +end diff --git a/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb b/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb index 8000a54ee27..3c2481dfae0 100644 --- a/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/schema_objects/trigger_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::SchemaValidation::SchemaObjects::Trigger, feature_category: :database do let(:statement) { 'CREATE TRIGGER my_trigger BEFORE INSERT ON todos FOR EACH ROW EXECUTE FUNCTION trigger()' } let(:name) { 'my_trigger' } + let(:table_name) { 'todos' } include_examples 'schema objects assertions for', 'create_trig_stmt' end diff --git a/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb b/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb index cc0bd4125ef..b0c056ff5db 100644 --- a/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/structure_sql_spec.rb @@ -2,81 +2,65 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::SchemaValidation::StructureSql, feature_category: :database do - let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } - let(:schema_name) { 'public' } - +RSpec.shared_examples 'structure sql schema assertions for' do |object_exists_method, all_objects_method| subject(:structure_sql) { described_class.new(structure_file_path, schema_name) } - context 'when having indexes' do - describe '#index_exists?' do - subject(:index_exists) { structure_sql.index_exists?(index_name) } + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:schema_name) { 'public' } - context 'when the index does not exist' do - let(:index_name) { 'non-existent-index' } + describe "##{object_exists_method}" do + it 'returns true when schema object exists' do + expect(structure_sql.public_send(object_exists_method, valid_schema_object_name)).to be_truthy + end - it 'returns false' do - expect(index_exists).to be_falsey - end - end + it 'returns false when schema object does not exists' do + expect(structure_sql.public_send(object_exists_method, 'invalid-object-name')).to be_falsey + end + end - context 'when the index exists' do - let(:index_name) { 'index' } + describe "##{all_objects_method}" do + it 'returns all the schema objects' do + schema_objects = structure_sql.public_send(all_objects_method) - it 'returns true' do - expect(index_exists).to be_truthy - end - end + expect(schema_objects).to all(be_a(schema_object)) + expect(schema_objects.map(&:name)).to eq(expected_objects) end + end +end - describe '#indexes' do - it 'returns indexes' do - indexes = structure_sql.indexes +RSpec.describe Gitlab::Database::SchemaValidation::StructureSql, feature_category: :database do + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:schema_name) { 'public' } - expected_indexes = %w[ - missing_index - wrong_index - index - index_namespaces_public_groups_name_id - index_on_deploy_keys_id_and_type_and_public - index_users_on_public_email_excluding_null_and_empty - ] + subject(:structure_sql) { described_class.new(structure_file_path, schema_name) } - expect(indexes).to all(be_a(Gitlab::Database::SchemaValidation::SchemaObjects::Index)) - expect(indexes.map(&:name)).to eq(expected_indexes) - end + context 'when having indexes' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index } + let(:valid_schema_object_name) { 'index' } + let(:expected_objects) do + %w[missing_index wrong_index index index_namespaces_public_groups_name_id + index_on_deploy_keys_id_and_type_and_public index_users_on_public_email_excluding_null_and_empty] end + + include_examples 'structure sql schema assertions for', 'index_exists?', 'indexes' end context 'when having triggers' do - describe '#trigger_exists?' do - subject(:trigger_exists) { structure_sql.trigger_exists?(name) } + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Trigger } + let(:valid_schema_object_name) { 'trigger' } + let(:expected_objects) { %w[trigger wrong_trigger missing_trigger_1 projects_loose_fk_trigger] } - context 'when the trigger does not exist' do - let(:name) { 'non-existent-trigger' } - - it 'returns false' do - expect(trigger_exists).to be_falsey - end - end - - context 'when the trigger exists' do - let(:name) { 'trigger' } + include_examples 'structure sql schema assertions for', 'trigger_exists?', 'triggers' + end - it 'returns true' do - expect(trigger_exists).to be_truthy - end - end + context 'when having tables' do + let(:schema_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Table } + let(:valid_schema_object_name) { 'test_table' } + let(:expected_objects) do + %w[test_table ci_project_mirrors wrong_table extra_table_columns missing_table missing_table_columns + operations_user_lists] end - describe '#triggers' do - it 'returns triggers' do - triggers = structure_sql.triggers - expected_triggers = %w[trigger wrong_trigger missing_trigger_1 projects_loose_fk_trigger] - - expect(triggers).to all(be_a(Gitlab::Database::SchemaValidation::SchemaObjects::Trigger)) - expect(triggers.map(&:name)).to eq(expected_triggers) - end - end + include_examples 'structure sql schema assertions for', 'table_exists?', 'tables' end end diff --git a/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb new file mode 100644 index 00000000000..84db721fc2d --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/track_inconsistency_spec.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::TrackInconsistency, feature_category: :database do + describe '#execute' do + let(:validator) { Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes } + + let(:database_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + let(:structure_sql_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (id)' } + + let(:structure_stmt) { PgQuery.parse(structure_sql_statement).tree.stmts.first.stmt.index_stmt } + let(:database_stmt) { PgQuery.parse(database_statement).tree.stmts.first.stmt.index_stmt } + + let(:structure_sql_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(structure_stmt) } + let(:database_object) { Gitlab::Database::SchemaValidation::SchemaObjects::Index.new(database_stmt) } + + let(:inconsistency) do + Gitlab::Database::SchemaValidation::Inconsistency.new(validator, structure_sql_object, database_object) + end + + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + subject(:execute) { described_class.new(inconsistency, project, user).execute } + + before do + stub_spam_services + end + + context 'when is not GitLab.com' do + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(false) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when the issue creation fails' do + let(:issue_creation) { instance_double(Mutations::Issues::Create, resolve: { errors: 'error' }) } + + before do + allow(Mutations::Issues::Create).to receive(:new).and_return(issue_creation) + end + + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when a new inconsistency is found' do + before do + project.add_developer(user) + end + + it 'creates a new schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + + context 'when the schema inconsistency already exists' do + before do + project.add_developer(user) + end + + let!(:schema_inconsistency) do + create(:schema_inconsistency, object_name: 'index_name', table_name: 'achievements', + valitador_name: 'different_definition_indexes') + end + + it 'does not create a schema inconsistency record' do + allow(Gitlab).to receive(:com?).and_return(true) + + expect { execute }.not_to change { Gitlab::Database::SchemaValidation::SchemaInconsistency.count } + end + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb index 2f38c25cf68..036ad6424f0 100644 --- a/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb +++ b/spec/lib/gitlab/database/schema_validation/validators/base_validator_spec.rb @@ -8,10 +8,15 @@ RSpec.describe Gitlab::Database::SchemaValidation::Validators::BaseValidator, fe it 'returns an array of all validators' do expect(all_validators).to eq([ + Gitlab::Database::SchemaValidation::Validators::ExtraTables, + Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns, Gitlab::Database::SchemaValidation::Validators::ExtraIndexes, Gitlab::Database::SchemaValidation::Validators::ExtraTriggers, + Gitlab::Database::SchemaValidation::Validators::MissingTables, + Gitlab::Database::SchemaValidation::Validators::MissingTableColumns, Gitlab::Database::SchemaValidation::Validators::MissingIndexes, Gitlab::Database::SchemaValidation::Validators::MissingTriggers, + Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables, Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionIndexes, Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTriggers ]) diff --git a/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb new file mode 100644 index 00000000000..746418b757e --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/different_definition_tables_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::DifferentDefinitionTables, feature_category: :database do + include_examples 'table validators', described_class, ['wrong_table'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb new file mode 100644 index 00000000000..9d17a2fffa9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_table_columns_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraTableColumns, feature_category: :database do + include_examples 'table validators', described_class, ['extra_table_columns'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb new file mode 100644 index 00000000000..edaf79e3c93 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/extra_tables_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::ExtraTables, feature_category: :database do + include_examples 'table validators', described_class, ['extra_table'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb new file mode 100644 index 00000000000..de2956b4dd9 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_table_columns_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingTableColumns, feature_category: :database do + include_examples 'table validators', described_class, ['missing_table_columns'] +end diff --git a/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb b/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb new file mode 100644 index 00000000000..7c80923e860 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/validators/missing_tables_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Validators::MissingTables, feature_category: :database do + missing_tables = %w[ci_project_mirrors missing_table operations_user_lists test_table] + + include_examples 'table validators', described_class, missing_tables +end diff --git a/spec/lib/gitlab/database/tables_locker_spec.rb b/spec/lib/gitlab/database/tables_locker_spec.rb index 30f0f9376c8..aaafe27f7ca 100644 --- a/spec/lib/gitlab/database/tables_locker_spec.rb +++ b/spec/lib/gitlab/database/tables_locker_spec.rb @@ -3,9 +3,13 @@ require 'spec_helper' RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate_connection, :silence_stdout, - feature_category: :pods do + feature_category: :cell do let(:default_lock_writes_manager) do - instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil, unlock_writes: nil) + instance_double( + Gitlab::Database::LockWritesManager, + lock_writes: { action: 'any action' }, + unlock_writes: { action: 'unlocked' } + ) end before do @@ -81,6 +85,10 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate subject end + + it 'returns list of actions' do + expect(subject).to include({ action: 'any action' }) + end end shared_examples "unlock tables" do |gitlab_schema, database_name| @@ -110,6 +118,10 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate subject end + + it 'returns list of actions' do + expect(subject).to include({ action: 'unlocked' }) + end end shared_examples "lock partitions" do |partition_identifier, database_name| @@ -154,7 +166,7 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate context 'when running on single database' do before do - skip_if_multiple_databases_are_setup(:ci) + skip_if_database_exists(:ci) end describe '#lock_writes' do @@ -191,7 +203,7 @@ RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate context 'when running on multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end describe '#lock_writes' do diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb index 3bb2f4e982c..bcbed0332e2 100644 --- a/spec/lib/gitlab/database/tables_truncate_spec.rb +++ b/spec/lib/gitlab/database/tables_truncate_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_base, - :suppress_gitlab_schemas_validate_connection, feature_category: :pods do + :suppress_gitlab_schemas_validate_connection, feature_category: :cell do include MigrationsHelpers let(:min_batch_size) { 1 } @@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba end before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) # Creating some test tables on the main database main_tables_sql = <<~SQL @@ -79,8 +79,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201; SQL - main_connection.execute(main_tables_sql) - ci_connection.execute(main_tables_sql) + execute_on_each_database(main_tables_sql) ci_tables_sql = <<~SQL CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY); @@ -92,15 +91,13 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba ); SQL - main_connection.execute(ci_tables_sql) - ci_connection.execute(ci_tables_sql) + execute_on_each_database(ci_tables_sql) internal_tables_sql = <<~SQL CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY); SQL - main_connection.execute(internal_tables_sql) - ci_connection.execute(internal_tables_sql) + execute_on_each_database(internal_tables_sql) # Filling the tables 5.times do |i| @@ -314,8 +311,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba context 'when running with multiple shared databases' do before do skip_if_multiple_databases_not_setup(:ci) - ci_db_config = Ci::ApplicationRecord.connection_db_config - allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main') + skip_if_database_exists(:ci) end it 'raises an error when truncating the main database that it is a single database setup' do diff --git a/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb index 5b68f9a3757..2725b22ca9d 100644 --- a/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb +++ b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::TransactionTimeoutSettings, feature_category: :pods do +RSpec.describe Gitlab::Database::TransactionTimeoutSettings, feature_category: :cell do let(:connection) { ActiveRecord::Base.connection } subject { described_class.new(connection) } diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 26d6ff431ec..f2be888e6eb 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database do +RSpec.describe Gitlab::Database, feature_category: :database do before do stub_const('MigrationTest', Class.new { include Gitlab::Database }) end @@ -66,6 +66,48 @@ RSpec.describe Gitlab::Database do end end + describe '.has_database?' do + context 'three tier database config' do + it 'returns true for main' do + expect(described_class.has_database?(:main)).to eq(true) + end + + it 'returns false for shared database' do + skip_if_multiple_databases_not_setup(:ci) + skip_if_database_exists(:ci) + + expect(described_class.has_database?(:ci)).to eq(false) + end + + it 'returns false for non-existent' do + expect(described_class.has_database?(:nonexistent)).to eq(false) + end + end + end + + describe '.database_mode' do + context 'three tier database config' do + it 'returns single-database if ci is not configured' do + skip_if_multiple_databases_are_setup(:ci) + + expect(described_class.database_mode).to eq(::Gitlab::Database::MODE_SINGLE_DATABASE) + end + + it 'returns single-database-ci-connection if ci is shared with main database' do + skip_if_multiple_databases_not_setup(:ci) + skip_if_database_exists(:ci) + + expect(described_class.database_mode).to eq(::Gitlab::Database::MODE_SINGLE_DATABASE_CI_CONNECTION) + end + + it 'returns multiple-database if ci has its own database' do + skip_if_shared_database(:ci) + + expect(described_class.database_mode).to eq(::Gitlab::Database::MODE_MULTIPLE_DATABASES) + end + end + end + describe '.check_for_non_superuser' do subject { described_class.check_for_non_superuser } diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb index 33e9360ee01..43e4f28b4df 100644 --- a/spec/lib/gitlab/diff/highlight_cache_spec.rb +++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do +RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache, feature_category: :source_code_management do let_it_be(:merge_request) { create(:merge_request_with_diffs) } let(:diff_hash) do @@ -282,17 +282,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do end it 'returns cache key' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, true, true])}") - end - - context 'when the `use_marker_ranges` feature flag is disabled' do - before do - stub_feature_flags(use_marker_ranges: false) - end - - it 'returns the original version of the cache' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, false, true])}") - end + is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, true])}") end context 'when the `diff_line_syntax_highlighting` feature flag is disabled' do @@ -301,7 +291,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do end it 'returns the original version of the cache' do - is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, true, false])}") + is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, false])}") end end end diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb index c378ecb8134..233dddbdad7 100644 --- a/spec/lib/gitlab/diff/highlight_spec.rb +++ b/spec/lib/gitlab/diff/highlight_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Diff::Highlight do +RSpec.describe Gitlab::Diff::Highlight, feature_category: :source_code_management do include RepoHelpers let_it_be(:project) { create(:project, :repository) } @@ -15,7 +15,6 @@ RSpec.describe Gitlab::Diff::Highlight do let(:code) { '

    Test

    ' } before do - allow(Gitlab::Diff::InlineDiff).to receive(:for_lines).and_return([]) allow_any_instance_of(Gitlab::Diff::Line).to receive(:text).and_return(code) end @@ -121,18 +120,6 @@ RSpec.describe Gitlab::Diff::Highlight do end end - context 'when `use_marker_ranges` feature flag is disabled' do - it 'returns the same result' do - with_feature_flag = described_class.new(diff_file, repository: project.repository).highlight - - stub_feature_flags(use_marker_ranges: false) - - without_feature_flag = described_class.new(diff_file, repository: project.repository).highlight - - expect(with_feature_flag.map(&:rich_text)).to eq(without_feature_flag.map(&:rich_text)) - end - end - context 'when no inline diffs' do it_behaves_like 'without inline diffs' end diff --git a/spec/lib/gitlab/email/hook/silent_mode_interceptor_spec.rb b/spec/lib/gitlab/email/hook/silent_mode_interceptor_spec.rb new file mode 100644 index 00000000000..cc371643bee --- /dev/null +++ b/spec/lib/gitlab/email/hook/silent_mode_interceptor_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::Hook::SilentModeInterceptor, :mailer, feature_category: :geo_replication do + let_it_be(:user) { create(:user) } + + before do + Mail.register_interceptor(described_class) + end + + after do + Mail.unregister_interceptor(described_class) + end + + context 'when silent mode is enabled' do + it 'prevents mail delivery' do + stub_application_setting(silent_mode_enabled: true) + + deliver_mails(user) + + should_not_email_anyone + end + + it 'logs the suppression' do + stub_application_setting(silent_mode_enabled: true) + + expect(Gitlab::AppJsonLogger).to receive(:info).with( + message: 'SilentModeInterceptor prevented sending mail', + mail_subject: 'Two-factor authentication disabled', + silent_mode_enabled: true + ) + expect(Gitlab::AppJsonLogger).to receive(:info).with( + message: 'SilentModeInterceptor prevented sending mail', + mail_subject: 'Welcome to GitLab!', + silent_mode_enabled: true + ) + + deliver_mails(user) + end + end + + context 'when silent mode is disabled' do + it 'does not prevent mail delivery' do + stub_application_setting(silent_mode_enabled: false) + + deliver_mails(user) + + should_email(user, times: 2) + end + + it 'debug logs the no-op' do + stub_application_setting(silent_mode_enabled: false) + + expect(Gitlab::AppJsonLogger).to receive(:debug).with( + message: 'SilentModeInterceptor did nothing', + mail_subject: 'Two-factor authentication disabled', + silent_mode_enabled: false + ) + expect(Gitlab::AppJsonLogger).to receive(:debug).with( + message: 'SilentModeInterceptor did nothing', + mail_subject: 'Welcome to GitLab!', + silent_mode_enabled: false + ) + + deliver_mails(user) + end + end + + def deliver_mails(user) + Notify.disabled_two_factor_email(user).deliver_now + DeviseMailer.user_admin_approval(user).deliver_now + end +end diff --git a/spec/lib/gitlab/email/incoming_email_spec.rb b/spec/lib/gitlab/email/incoming_email_spec.rb new file mode 100644 index 00000000000..123b050aee7 --- /dev/null +++ b/spec/lib/gitlab/email/incoming_email_spec.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::IncomingEmail, feature_category: :service_desk do + let(:setting_name) { :incoming_email } + + it_behaves_like 'common email methods' + + describe 'self.key_from_address' do + before do + stub_incoming_email_setting(address: 'replies+%{key}@example.com') + end + + it "returns reply key" do + expect(described_class.key_from_address("replies+key@example.com")).to eq("key") + end + + it 'does not match emails with extra bits' do + expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be nil + end + + context 'when a custom wildcard address is used' do + let(:wildcard_address) { 'custom.address+%{key}@example.com' } + + it 'finds key if email matches address pattern' do + key = described_class.key_from_address( + 'custom.address+foo@example.com', wildcard_address: wildcard_address + ) + expect(key).to eq('foo') + end + end + end +end diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb index 865e40d4ecb..e58da2478bf 100644 --- a/spec/lib/gitlab/email/receiver_spec.rb +++ b/spec/lib/gitlab/email/receiver_spec.rb @@ -11,9 +11,10 @@ RSpec.describe Gitlab::Email::Receiver do shared_examples 'successful receive' do let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) } let(:client_id) { 'email/jake@example.com' } + let(:mail_key) { 'gitlabhq/gitlabhq+auth_token' } it 'correctly finds the mail key' do - expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler) + expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), mail_key).and_return(handler) receiver.execute end @@ -92,6 +93,16 @@ RSpec.describe Gitlab::Email::Receiver do it_behaves_like 'successful receive' end + context 'when mail key is in the references header with a comma' do + let(:email_raw) { fixture_file('emails/valid_reply_with_references_in_comma.eml') } + let(:meta_key) { :references } + let(:meta_value) { ['",,"'] } + + it_behaves_like 'successful receive' do + let(:mail_key) { '59d8df8370b7e95c5a49fbf86aeb2c93' } + end + end + context 'when all other headers are missing' do let(:email_raw) { fixture_file('emails/missing_delivered_to_header.eml') } let(:meta_key) { :received_recipients } diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb index e4c68dbba92..35065b74eff 100644 --- a/spec/lib/gitlab/email/reply_parser_spec.rb +++ b/spec/lib/gitlab/email/reply_parser_spec.rb @@ -3,7 +3,7 @@ require "spec_helper" # Inspired in great part by Discourse's Email::Receiver -RSpec.describe Gitlab::Email::ReplyParser do +RSpec.describe Gitlab::Email::ReplyParser, feature_category: :team_planning do describe '#execute' do def test_parse_body(mail_string, params = {}) described_class.new(Mail::Message.new(mail_string), **params).execute @@ -188,67 +188,36 @@ RSpec.describe Gitlab::Email::ReplyParser do ) end - context 'properly renders email reply from gmail web client' do - context 'when feature flag is enabled' do - it do - expect(test_parse_body(fixture_file("emails/html_only.eml"))) - .to eq( - <<-BODY.strip_heredoc.chomp - ### This is a reply from standard GMail in Google Chrome. - - The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. - - Here's some **bold** text, **strong** text and *italic* in Markdown. - - Here's a link http://example.com - - Here's an img ![Miro](http://img.png)
    - - One - Some details
    - -
    - - Two - Some details
    - - Test reply. - - First paragraph. - - Second paragraph. - BODY - ) - end - end - - context 'when feature flag is disabled' do - before do - stub_feature_flags(service_desk_html_to_text_email_handler: false) - end + context 'properly renders email reply from gmail web client', feature_category: :service_desk do + it do + expect(test_parse_body(fixture_file("emails/html_only.eml"))) + .to eq( + <<-BODY.strip_heredoc.chomp + ### This is a reply from standard GMail in Google Chrome. - it do - expect(test_parse_body(fixture_file("emails/html_only.eml"))) - .to eq( - <<-BODY.strip_heredoc.chomp - ### This is a reply from standard GMail in Google Chrome. + The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. - The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. + Here's some **bold** text, **strong** text and *italic* in Markdown. - Here's some **bold** text, strong text and italic in Markdown. + Here's a link http://example.com - Here's a link http://example.com + Here's an img ![Miro](http://img.png)
    + + One + Some details
    - Here's an img [Miro]One Some details Two Some details +
    + + Two + Some details
    - Test reply. + Test reply. - First paragraph. + First paragraph. - Second paragraph. - BODY - ) - end + Second paragraph. + BODY + ) end end diff --git a/spec/lib/gitlab/email/service_desk_email_spec.rb b/spec/lib/gitlab/email/service_desk_email_spec.rb new file mode 100644 index 00000000000..d59b8aa2cf7 --- /dev/null +++ b/spec/lib/gitlab/email/service_desk_email_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::ServiceDeskEmail, feature_category: :service_desk do + let(:setting_name) { :service_desk_email } + + it_behaves_like 'common email methods' + + describe '.key_from_address' do + context 'when service desk address is set' do + before do + stub_service_desk_email_setting(address: 'address+%{key}@example.com') + end + + it 'returns key' do + expect(described_class.key_from_address('address+key@example.com')).to eq('key') + end + end + + context 'when service desk address is not set' do + before do + stub_service_desk_email_setting(address: nil) + end + + it 'returns nil' do + expect(described_class.key_from_address('address+key@example.com')).to be_nil + end + end + end + + describe '.address_for_key' do + context 'when service desk address is set' do + before do + stub_service_desk_email_setting(address: 'address+%{key}@example.com') + end + + it 'returns address' do + expect(described_class.address_for_key('foo')).to eq('address+foo@example.com') + end + end + + context 'when service desk address is not set' do + before do + stub_service_desk_email_setting(address: nil) + end + + it 'returns nil' do + expect(described_class.key_from_address('foo')).to be_nil + end + end + end +end diff --git a/spec/lib/gitlab/emoji_spec.rb b/spec/lib/gitlab/emoji_spec.rb index 0db3b5f3b11..44b2ec12246 100644 --- a/spec/lib/gitlab/emoji_spec.rb +++ b/spec/lib/gitlab/emoji_spec.rb @@ -3,23 +3,6 @@ require 'spec_helper' RSpec.describe Gitlab::Emoji do - describe '.emoji_image_tag' do - it 'returns emoji image tag' do - emoji_image = described_class.emoji_image_tag('emoji_one', 'src_url') - - expect(emoji_image).to eq("\":emoji_one:\"") - end - - it 'escapes emoji image attrs to prevent XSS' do - xss_payload = "" - escaped_xss_payload = html_escape(xss_payload) - - emoji_image = described_class.emoji_image_tag(xss_payload, 'http://aaa#' + xss_payload) - - expect(emoji_image).to eq("\":#{escaped_xss_payload}:\"") - end - end - describe '.gl_emoji_tag' do it 'returns gl emoji tag if emoji is found' do emoji = TanukiEmoji.find_by_alpha_code('small_airplane') diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb index 0f056ee9eac..79016335a40 100644 --- a/spec/lib/gitlab/error_tracking_spec.rb +++ b/spec/lib/gitlab/error_tracking_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require 'raven/transports/dummy' require 'sentry/transport/dummy_transport' -RSpec.describe Gitlab::ErrorTracking do +RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do let(:exception) { RuntimeError.new('boom') } let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' } let(:extra) { { issue_url: issue_url, some_other_info: 'info' } } @@ -58,7 +58,7 @@ RSpec.describe Gitlab::ErrorTracking do stub_feature_flags(enable_new_sentry_integration: true) stub_sentry_settings - allow(described_class).to receive(:sentry_configurable?) { true } + allow(described_class).to receive(:sentry_configurable?).and_return(true) allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid') allow(I18n).to receive(:locale).and_return('en') @@ -82,7 +82,7 @@ RSpec.describe Gitlab::ErrorTracking do describe '.track_and_raise_for_dev_exception' do context 'when exceptions for dev should be raised' do before do - expect(described_class).to receive(:should_raise_for_dev?).and_return(true) + allow(described_class).to receive(:should_raise_for_dev?).and_return(true) end it 'raises the exception' do @@ -101,7 +101,7 @@ RSpec.describe Gitlab::ErrorTracking do context 'when exceptions for dev should not be raised' do before do - expect(described_class).to receive(:should_raise_for_dev?).and_return(false) + allow(described_class).to receive(:should_raise_for_dev?).and_return(false) end it 'logs the exception with all attributes passed' do @@ -219,7 +219,7 @@ RSpec.describe Gitlab::ErrorTracking do end end - context 'the exception implements :sentry_extra_data' do + context 'when the exception implements :sentry_extra_data' do let(:extra_info) { { event: 'explosion', size: :massive } } before do @@ -239,7 +239,7 @@ RSpec.describe Gitlab::ErrorTracking do end end - context 'the exception implements :sentry_extra_data, which returns nil' do + context 'when the exception implements :sentry_extra_data, which returns nil' do let(:extra) { { issue_url: issue_url } } before do @@ -260,7 +260,7 @@ RSpec.describe Gitlab::ErrorTracking do end end - context 'event processors' do + describe 'event processors' do subject(:track_exception) { described_class.track_exception(exception, extra) } before do @@ -269,7 +269,16 @@ RSpec.describe Gitlab::ErrorTracking do allow(Gitlab::ErrorTracking::Logger).to receive(:error) end - context 'custom GitLab context when using Raven.capture_exception directly' do + # This is a workaround for restoring Raven's user context below. + # Raven.user_context(&block) does not restore the user context correctly. + around do |example| + previous_user_context = Raven.context.user.dup + example.run + ensure + Raven.context.user = previous_user_context + end + + context 'with custom GitLab context when using Raven.capture_exception directly' do subject(:track_exception) { Raven.capture_exception(exception) } it 'merges a default set of tags into the existing tags' do @@ -289,7 +298,7 @@ RSpec.describe Gitlab::ErrorTracking do end end - context 'custom GitLab context when using Sentry.capture_exception directly' do + context 'with custom GitLab context when using Sentry.capture_exception directly' do subject(:track_exception) { Sentry.capture_exception(exception) } it 'merges a default set of tags into the existing tags' do @@ -401,15 +410,17 @@ RSpec.describe Gitlab::ErrorTracking do end ['Gitlab::SidekiqMiddleware::RetryError', 'SubclassRetryError'].each do |ex| - let(:exception) { ex.constantize.new } + context "with #{ex} exception" do + let(:exception) { ex.constantize.new } - it "does not report #{ex} exception to Sentry" do - expect(Gitlab::ErrorTracking::Logger).to receive(:error) + it "does not report exception to Sentry" do + expect(Gitlab::ErrorTracking::Logger).to receive(:error) - track_exception + track_exception - expect(Raven.client.transport.events).to eq([]) - expect(Sentry.get_current_client.transport.events).to eq([]) + expect(Raven.client.transport.events).to eq([]) + expect(Sentry.get_current_client.transport.events).to eq([]) + end end end end @@ -491,7 +502,7 @@ RSpec.describe Gitlab::ErrorTracking do end end - context 'Sentry performance monitoring' do + describe 'Sentry performance monitoring' do context 'when ENABLE_SENTRY_PERFORMANCE_MONITORING env is disabled' do before do stub_env('ENABLE_SENTRY_PERFORMANCE_MONITORING', false) diff --git a/spec/lib/gitlab/favicon_spec.rb b/spec/lib/gitlab/favicon_spec.rb index 884425dab3b..033fa5d1b42 100644 --- a/spec/lib/gitlab/favicon_spec.rb +++ b/spec/lib/gitlab/favicon_spec.rb @@ -40,14 +40,22 @@ RSpec.describe Gitlab::Favicon, :request_store do end end - describe '.status_overlay' do - subject { described_class.status_overlay('favicon_status_created') } + describe '.ci_status_overlay' do + subject { described_class.ci_status_overlay('favicon_status_created') } it 'returns the overlay for the status' do expect(subject).to match_asset_path '/assets/ci_favicons/favicon_status_created.png' end end + describe '.mr_status_overlay' do + subject { described_class.mr_status_overlay('favicon_status_merged') } + + it 'returns the overlay for the status' do + expect(subject).to match_asset_path '/assets/mr_favicons/favicon_status_merged.png' + end + end + describe '.available_status_names' do subject { described_class.available_status_names } diff --git a/spec/lib/gitlab/git/blame_mode_spec.rb b/spec/lib/gitlab/git/blame_mode_spec.rb new file mode 100644 index 00000000000..1fc6f12c552 --- /dev/null +++ b/spec/lib/gitlab/git/blame_mode_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Git::BlameMode, feature_category: :source_code_management do + subject(:blame_mode) { described_class.new(project, params) } + + let_it_be(:project) { build(:project) } + let(:params) { {} } + + describe '#streaming_supported?' do + subject { blame_mode.streaming_supported? } + + it { is_expected.to be_truthy } + + context 'when `blame_page_streaming` is disabled' do + before do + stub_feature_flags(blame_page_streaming: false) + end + + it { is_expected.to be_falsey } + end + end + + describe '#streaming?' do + subject { blame_mode.streaming? } + + it { is_expected.to be_falsey } + + context 'when streaming param is provided' do + let(:params) { { streaming: true } } + + it { is_expected.to be_truthy } + + context 'when `blame_page_streaming` is disabled' do + before do + stub_feature_flags(blame_page_streaming: false) + end + + it { is_expected.to be_falsey } + end + end + end + + describe '#pagination?' do + subject { blame_mode.pagination? } + + it { is_expected.to be_truthy } + + context 'when `streaming` params is enabled' do + let(:params) { { streaming: true } } + + it { is_expected.to be_falsey } + end + + context 'when `no_pagination` param is provided' do + let(:params) { { no_pagination: true } } + + it { is_expected.to be_falsey } + end + + context 'when `blame_page_pagination` is disabled' do + before do + stub_feature_flags(blame_page_pagination: false) + end + + it { is_expected.to be_falsey } + end + end + + describe '#full?' do + subject { blame_mode.full? } + + it { is_expected.to be_falsey } + + context 'when `blame_page_pagination` is disabled' do + before do + stub_feature_flags(blame_page_pagination: false) + end + + it { is_expected.to be_truthy } + end + end +end diff --git a/spec/lib/gitlab/git/blame_pagination_spec.rb b/spec/lib/gitlab/git/blame_pagination_spec.rb new file mode 100644 index 00000000000..1f3c0c0342e --- /dev/null +++ b/spec/lib/gitlab/git/blame_pagination_spec.rb @@ -0,0 +1,175 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Git::BlamePagination, feature_category: :source_code_management do + subject(:blame_pagination) { described_class.new(blob, blame_mode, params) } + + let_it_be(:project) { create(:project, :repository) } + let_it_be(:commit) { project.repository.commit } + let_it_be(:blob) { project.repository.blob_at('HEAD', 'README.md') } + + let(:blame_mode) do + instance_double( + 'Gitlab::Git::BlameMode', + 'streaming?' => streaming_mode, + 'full?' => full_mode + ) + end + + let(:params) { { page: page } } + let(:page) { 1 } + let(:streaming_mode) { false } + let(:full_mode) { false } + + using RSpec::Parameterized::TableSyntax + + describe '#page' do + subject { blame_pagination.page } + + where(:page, :expected_page) do + nil | 1 + 1 | 1 + 5 | 5 + -1 | 1 + 'a' | 1 + end + + with_them do + it { is_expected.to eq(expected_page) } + end + end + + describe '#per_page' do + subject { blame_pagination.per_page } + + it { is_expected.to eq(described_class::PAGINATION_PER_PAGE) } + + context 'when blame mode is streaming' do + let(:streaming_mode) { true } + + it { is_expected.to eq(described_class::STREAMING_PER_PAGE) } + end + end + + describe '#total_pages' do + subject { blame_pagination.total_pages } + + before do + stub_const("#{described_class.name}::PAGINATION_PER_PAGE", 2) + end + + it { is_expected.to eq(2) } + end + + describe '#total_extra_pages' do + subject { blame_pagination.total_extra_pages } + + before do + stub_const("#{described_class.name}::PAGINATION_PER_PAGE", 2) + end + + it { is_expected.to eq(1) } + end + + describe '#pagination' do + subject { blame_pagination.paginator } + + before do + stub_const("#{described_class.name}::PAGINATION_PER_PAGE", 2) + end + + it 'returns a pagination object' do + is_expected.to be_kind_of(Kaminari::PaginatableArray) + + expect(subject.current_page).to eq(1) + expect(subject.total_pages).to eq(2) + expect(subject.total_count).to eq(4) + end + + context 'when user disabled the pagination' do + let(:full_mode) { true } + + it { is_expected.to be_nil } + end + + context 'when user chose streaming' do + let(:streaming_mode) { true } + + it { is_expected.to be_nil } + end + + context 'when per_page is above the global max per page limit' do + before do + stub_const("#{described_class.name}::PAGINATION_PER_PAGE", 1000) + allow(blob).to receive_message_chain(:data, :lines, :count) { 500 } + end + + it 'returns a correct pagination object' do + is_expected.to be_kind_of(Kaminari::PaginatableArray) + + expect(subject.current_page).to eq(1) + expect(subject.total_pages).to eq(1) + expect(subject.total_count).to eq(500) + end + end + + describe 'Pagination attributes' do + where(:page, :current_page, :total_pages) do + 1 | 1 | 2 + 2 | 2 | 2 + 0 | 1 | 2 # Incorrect + end + + with_them do + it 'returns the correct pagination attributes' do + expect(subject.current_page).to eq(current_page) + expect(subject.total_pages).to eq(total_pages) + end + end + end + end + + describe '#blame_range' do + subject { blame_pagination.blame_range } + + before do + stub_const("#{described_class.name}::PAGINATION_PER_PAGE", 2) + end + + where(:page, :expected_range) do + 1 | (1..2) + 2 | (3..4) + 0 | (1..2) + end + + with_them do + it { is_expected.to eq(expected_range) } + end + + context 'when user disabled the pagination' do + let(:full_mode) { true } + + it { is_expected.to be_nil } + end + + context 'when streaming is enabled' do + let(:streaming_mode) { true } + + before do + stub_const("#{described_class.name}::STREAMING_FIRST_PAGE_SIZE", 1) + stub_const("#{described_class.name}::STREAMING_PER_PAGE", 1) + end + + where(:page, :expected_range) do + 1 | (1..1) + 2 | (2..2) + 0 | (1..1) + end + + with_them do + it { is_expected.to eq(expected_range) } + end + end + end +end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 15bce16bd7f..e78e01ae129 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -2452,107 +2452,6 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen end end - describe '#squash' do - let(:branch_name) { 'fix' } - let(:start_sha) { TestEnv::BRANCH_SHA['master'] } - let(:end_sha) { '12d65c8dd2b2676fa3ac47d955accc085a37a9c1' } - - subject do - opts = { - branch: branch_name, - start_sha: start_sha, - end_sha: end_sha, - author: user, - message: 'Squash commit message' - } - - repository.squash(user, opts) - end - - # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234 - skip 'sparse checkout' do - let(:expected_files) { %w(files files/js files/js/application.js) } - - it 'checks out only the files in the diff' do - allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| - m.call(*args) do - worktree_path = args[0] - files_pattern = File.join(worktree_path, '**', '*') - expected = expected_files.map do |path| - File.expand_path(path, worktree_path) - end - - expect(Dir[files_pattern]).to eq(expected) - end - end - - subject - end - - context 'when the diff contains a rename' do - let(:end_sha) do - repository.commit_files( - user, - branch_name: repository.root_ref, - message: 'Move CHANGELOG to encoding/', - actions: [{ - action: :move, - previous_path: 'CHANGELOG', - file_path: 'encoding/CHANGELOG', - content: 'CHANGELOG' - }] - ).newrev - end - - after do - # Erase our commits so other tests get the original repo - repository.write_ref(repository.root_ref, TestEnv::BRANCH_SHA['master']) - end - - it 'does not include the renamed file in the sparse checkout' do - allow(repository).to receive(:with_worktree).and_wrap_original do |m, *args| - m.call(*args) do - worktree_path = args[0] - files_pattern = File.join(worktree_path, '**', '*') - - expect(Dir[files_pattern]).not_to include('CHANGELOG') - expect(Dir[files_pattern]).not_to include('encoding/CHANGELOG') - end - end - - subject - end - end - end - - # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234 - skip 'with an ASCII-8BIT diff' do - let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+✓ testme\n ======\n \n Sample repo for testing gitlab features\n" } - - it 'applies a ASCII-8BIT diff' do - allow(repository).to receive(:run_git!).and_call_original - allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT')) - - expect(subject).to match(/\h{40}/) - end - end - - # Should be ported to gitaly-ruby rspec suite https://gitlab.com/gitlab-org/gitaly/issues/1234 - skip 'with trailing whitespace in an invalid patch' do - let(:diff) { "diff --git a/README.md b/README.md\nindex faaf198..43c5edf 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,4 +1,4 @@\n-testme\n+ \n ====== \n \n Sample repo for testing gitlab features\n" } - - it 'does not include whitespace warnings in the error' do - allow(repository).to receive(:run_git!).and_call_original - allow(repository).to receive(:run_git!).with(%W(diff --binary #{start_sha}...#{end_sha})).and_return(diff.force_encoding('ASCII-8BIT')) - - expect { subject }.to raise_error do |error| - expect(error).to be_a(described_class::GitError) - expect(error.message).not_to include('trailing whitespace') - end - end - end - end - def create_remote_branch(remote_name, branch_name, source_branch_name) source_branch = repository.find_branch(source_branch_name) repository.write_ref("refs/remotes/#{remote_name}/#{branch_name}", source_branch.dereferenced_target.sha) diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb index 03dd4e7b89b..1a79817130c 100644 --- a/spec/lib/gitlab/git_ref_validator_spec.rb +++ b/spec/lib/gitlab/git_ref_validator_spec.rb @@ -37,6 +37,11 @@ RSpec.describe Gitlab::GitRefValidator do it { expect(described_class.validate("\xA0\u0000\xB0")).to be false } it { expect(described_class.validate("")).to be false } it { expect(described_class.validate(nil)).to be false } + it { expect(described_class.validate('HEAD')).to be false } + + context 'when skip_head_ref_check is true' do + it { expect(described_class.validate('HEAD', skip_head_ref_check: true)).to be true } + end end describe '.validate_merge_request_branch' do diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb index 136ddb566aa..28fbd4d883f 100644 --- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb +++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb @@ -13,6 +13,8 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers :object_type end + private + def model Label end @@ -26,85 +28,153 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers end describe '#build_database_rows' do - it 'returns an Array containing the rows to insert and validation errors if object invalid' do - object = double(:object, title: 'Foo') - - expect(importer) - .to receive(:build_attributes) - .with(object) - .and_return({ title: 'Foo' }) - - expect(Label) - .to receive(:new) - .with({ title: 'Foo' }) - .and_return(label) - - expect(importer) - .to receive(:already_imported?) - .with(object) - .and_return(false) - - expect(Gitlab::Import::Logger) - .to receive(:info) - .with( - import_type: :github, - project_id: 1, - importer: 'MyImporter', - message: '1 object_types fetched' - ) - - expect(Gitlab::GithubImport::ObjectCounter) - .to receive(:increment) - .with( - project, - :object_type, - :fetched, - value: 1 - ) - - enum = [[object, 1]].to_enum - - rows, errors = importer.build_database_rows(enum) + context 'without validation errors' do + let(:object) { double(:object, title: 'Foo') } + + it 'returns an array containing the rows to insert' do + expect(importer) + .to receive(:build_attributes) + .with(object) + .and_return({ title: 'Foo' }) + + expect(Label) + .to receive(:new) + .with({ title: 'Foo' }) + .and_return(label) + + expect(importer) + .to receive(:already_imported?) + .with(object) + .and_return(false) + + expect(Gitlab::Import::Logger) + .to receive(:info) + .with( + import_type: :github, + project_id: 1, + importer: 'MyImporter', + message: '1 object_types fetched' + ) + + expect(Gitlab::GithubImport::ObjectCounter) + .to receive(:increment) + .with( + project, + :object_type, + :fetched, + value: 1 + ) + + enum = [[object, 1]].to_enum + + rows, errors = importer.build_database_rows(enum) + + expect(rows).to match_array([{ title: 'Foo' }]) + expect(errors).to be_empty + end - expect(rows).to match_array([{ title: 'Foo' }]) - expect(errors).to be_empty + it 'does not import objects that have already been imported' do + expect(importer) + .not_to receive(:build_attributes) + + expect(importer) + .to receive(:already_imported?) + .with(object) + .and_return(true) + + expect(Gitlab::Import::Logger) + .to receive(:info) + .with( + import_type: :github, + project_id: 1, + importer: 'MyImporter', + message: '0 object_types fetched' + ) + + expect(Gitlab::GithubImport::ObjectCounter) + .to receive(:increment) + .with( + project, + :object_type, + :fetched, + value: 0 + ) + + enum = [[object, 1]].to_enum + + rows, errors = importer.build_database_rows(enum) + + expect(rows).to be_empty + expect(errors).to be_empty + end end - it 'does not import objects that have already been imported' do - object = double(:object, title: 'Foo') - - expect(importer) - .not_to receive(:build_attributes) + context 'with validation errors' do + let(:object) { double(:object, id: 12345, title: 'bug,bug') } - expect(importer) - .to receive(:already_imported?) - .with(object) - .and_return(true) + before do + allow(importer) + .to receive(:already_imported?) + .with(object) + .and_return(false) - expect(Gitlab::Import::Logger) - .to receive(:info) - .with( - import_type: :github, - project_id: 1, - importer: 'MyImporter', - message: '0 object_types fetched' - ) - - expect(Gitlab::GithubImport::ObjectCounter) - .to receive(:increment) - .with( - project, - :object_type, - :fetched, - value: 0 - ) + allow(importer) + .to receive(:build_attributes) + .with(object) + .and_return({ title: 'bug,bug' }) + end - enum = [[object, 1]].to_enum + context 'without implemented github_identifiers method' do + it 'raises NotImplementedError' do + enum = [[object, 1]].to_enum - rows, errors = importer.build_database_rows(enum) + expect { importer.build_database_rows(enum) }.to raise_error(NotImplementedError) + end + end - expect(rows).to be_empty - expect(errors).to be_empty + context 'with implemented github_identifiers method' do + it 'returns an array containing the validation errors and logs them' do + expect(importer) + .to receive(:github_identifiers) + .with(object) + .and_return( + { + id: object.id, + title: object.title, + object_type: importer.object_type + } + ) + + expect(Gitlab::Import::Logger) + .to receive(:error) + .with( + import_type: :github, + project_id: 1, + importer: 'MyImporter', + message: ['Title is invalid'], + github_identifiers: { id: 12345, title: 'bug,bug', object_type: :object_type } + ) + + expect(Gitlab::GithubImport::ObjectCounter) + .to receive(:increment) + .with( + project, + :object_type, + :fetched, + value: 0 + ) + + enum = [[object, 1]].to_enum + + rows, errors = importer.build_database_rows(enum) + + expect(rows).to be_empty + expect(errors).not_to be_empty + + expect(errors[0][:validation_errors].full_messages).to match_array(['Title is invalid']) + expect(errors[0][:github_identifiers]).to eq({ id: 12345, title: 'bug,bug', object_type: :object_type }) + end + end end end @@ -157,7 +227,8 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers exception_message: 'Title invalid', correlation_id_value: 'cid', retry_count: nil, - created_at: Time.zone.now + created_at: Time.zone.now, + external_identifiers: { id: 123456 } }] end @@ -170,8 +241,23 @@ RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers expect(import_failures).to receive(:insert_all).with(formatted_errors) expect(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return('cid') - importer.bulk_insert_failures([error]) + importer.bulk_insert_failures([{ + validation_errors: error, + github_identifiers: { id: 123456 } + }]) end end end + + describe '#object_type' do + let(:importer_class) do + Class.new do + include Gitlab::GithubImport::BulkImporting + end + end + + it 'raises NotImplementedError' do + expect { importer.object_type }.to raise_error(NotImplementedError) + end + end end diff --git a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb index 85bc67376d3..7890561bf2d 100644 --- a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb @@ -17,6 +17,8 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter do let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] } it 'imports each project issue attachments' do + expect(project.issues).to receive(:select).with(:id, :description, :iid).and_call_original + expect_next_instances_of( Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs ) do |note_attachments_importer| diff --git a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb index e4718c2d17c..e5aa17dd81e 100644 --- a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb @@ -17,6 +17,8 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporte let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] } it 'imports each project merge request attachments' do + expect(project.merge_requests).to receive(:select).with(:id, :description, :iid).and_call_original + expect_next_instances_of( Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs ) do |note_attachments_importer| diff --git a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb index b989345ae09..e1b009c3eeb 100644 --- a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb @@ -17,6 +17,8 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter do let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] } it 'imports each project release' do + expect(project.releases).to receive(:select).with(:id, :description, :tag).and_call_original + expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new) .with(*importer_attrs).twice.and_return(importer_stub) expect(importer_stub).to receive(:execute).twice diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb index 9e295ab215a..fc8d9cee066 100644 --- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb @@ -56,14 +56,14 @@ feature_category: :importers do project_id: project.id, importer: described_class.name, message: ['Title is invalid'], - github_identifier: 1 + github_identifiers: { title: 'bug,bug', object_type: :label } ) rows, errors = importer.build_labels expect(rows).to be_empty expect(errors.length).to eq(1) - expect(errors[0].full_messages).to match_array(['Title is invalid']) + expect(errors[0][:validation_errors].full_messages).to match_array(['Title is invalid']) end end diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb index 47b9a41c364..cf44d510c80 100644 --- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb @@ -74,7 +74,7 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab end it 'does not build milestones that are invalid' do - milestone = { id: 1, title: nil } + milestone = { id: 123456, title: nil, number: 2 } expect(importer) .to receive(:each_milestone) @@ -86,14 +86,14 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab project_id: project.id, importer: described_class.name, message: ["Title can't be blank"], - github_identifier: 1 + github_identifiers: { iid: 2, object_type: :milestone, title: nil } ) rows, errors = importer.build_milestones expect(rows).to be_empty expect(errors.length).to eq(1) - expect(errors[0].full_messages).to match_array(["Title can't be blank"]) + expect(errors[0][:validation_errors].full_messages).to match_array(["Title can't be blank"]) end end diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb index 536983fea06..9e9d6c6e9cd 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb @@ -86,6 +86,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor project.id, { merge_request_id: merge_request_1.id, + merge_request_iid: merge_request_1.iid, users: [ { id: 4, login: 'alice' }, { id: 5, login: 'bob' } @@ -97,6 +98,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor project.id, { merge_request_id: merge_request_2.id, + merge_request_iid: merge_request_2.iid, users: [ { id: 4, login: 'alice' } ] diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb index 5f9c73cbfff..92f7d906f61 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb @@ -53,6 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do subject.each_object_to_import {} expect(review[:merge_request_id]).to eq(merge_request.id) + expect(review[:merge_request_iid]).to eq(merge_request.iid) end it 'skips cached pages' do diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb index fe4d3e9d90b..a3d20af22c7 100644 --- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb @@ -18,6 +18,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_categor let(:github_release) do { + id: 123456, tag_name: '1.0', name: github_release_name, body: 'This is my release', @@ -144,7 +145,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_categor expect(releases).to be_empty expect(errors.length).to eq(1) - expect(errors[0].full_messages).to match_array(['Description is too long (maximum is 1000000 characters)']) + expect(errors[0][:validation_errors].full_messages).to match_array( + ['Description is too long (maximum is 1000000 characters)'] + ) + expect(errors[0][:github_identifiers]).to eq({ tag: '1.0', object_type: :release }) end end diff --git a/spec/lib/gitlab/github_import/representation/collaborator_spec.rb b/spec/lib/gitlab/github_import/representation/collaborator_spec.rb index d5952f9459b..cc52c34ec74 100644 --- a/spec/lib/gitlab/github_import/representation/collaborator_spec.rb +++ b/spec/lib/gitlab/github_import/representation/collaborator_spec.rb @@ -20,6 +20,17 @@ RSpec.describe Gitlab::GithubImport::Representation::Collaborator, feature_categ it 'includes the role' do expect(collaborator.role_name).to eq('maintainer') end + + describe '#github_identifiers' do + it 'returns a hash with needed identifiers' do + expect(collaborator.github_identifiers).to eq( + { + id: 42, + login: 'alice' + } + ) + end + end end end diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb index 0dd281cb3b0..33f0c6d3c64 100644 --- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb +++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb @@ -156,7 +156,11 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do describe '#github_identifiers' do it 'returns a hash with needed identifiers' do - expect(issue_event.github_identifiers).to eq({ id: 6501124486 }) + expect(issue_event.github_identifiers).to eq( + id: 6501124486, + iid: 2, + event: 'closed' + ) end end end diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb index 263ef8b1708..39447da0fac 100644 --- a/spec/lib/gitlab/github_import/representation/issue_spec.rb +++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb @@ -192,7 +192,8 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do it 'returns a hash with needed identifiers' do github_identifiers = { iid: 42, - issuable_type: 'MergeRequest' + issuable_type: 'MergeRequest', + title: 'Implement cool feature' } other_attributes = { pull_request: true, something_else: '_something_else_' } issue = described_class.new(github_identifiers.merge(other_attributes)) diff --git a/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb b/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb index 6663a7366a5..799a77afb0c 100644 --- a/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb +++ b/spec/lib/gitlab/github_import/representation/lfs_object_spec.rb @@ -6,7 +6,8 @@ RSpec.describe Gitlab::GithubImport::Representation::LfsObject do describe '#github_identifiers' do it 'returns a hash with needed identifiers' do github_identifiers = { - oid: 42 + oid: 42, + size: 123456 } other_attributes = { something_else: '_something_else_' } lfs_object = described_class.new(github_identifiers.merge(other_attributes)) diff --git a/spec/lib/gitlab/github_import/representation/note_text_spec.rb b/spec/lib/gitlab/github_import/representation/note_text_spec.rb index 8b57c9a0373..7aa458a1c33 100644 --- a/spec/lib/gitlab/github_import/representation/note_text_spec.rb +++ b/spec/lib/gitlab/github_import/representation/note_text_spec.rb @@ -22,35 +22,45 @@ RSpec.describe Gitlab::GithubImport::Representation::NoteText do end describe '.from_db_record' do + let(:representation) { described_class.from_db_record(record) } + context 'with Release' do - let(:record) { build_stubbed(:release, id: 42, description: 'Some text here..') } + let(:record) { build_stubbed(:release, id: 42, description: 'Some text here..', tag: 'v1.0') } + + it_behaves_like 'a Note text data', 'Release' - it_behaves_like 'a Note text data', 'Release' do - let(:representation) { described_class.from_db_record(record) } + it 'includes tag' do + expect(representation.tag).to eq 'v1.0' end end context 'with Issue' do - let(:record) { build_stubbed(:issue, id: 42, description: 'Some text here..') } + let(:record) { build_stubbed(:issue, id: 42, iid: 2, description: 'Some text here..') } + + it_behaves_like 'a Note text data', 'Issue' - it_behaves_like 'a Note text data', 'Issue' do - let(:representation) { described_class.from_db_record(record) } + it 'includes noteable iid' do + expect(representation.iid).to eq 2 end end context 'with MergeRequest' do - let(:record) { build_stubbed(:merge_request, id: 42, description: 'Some text here..') } + let(:record) { build_stubbed(:merge_request, id: 42, iid: 2, description: 'Some text here..') } - it_behaves_like 'a Note text data', 'MergeRequest' do - let(:representation) { described_class.from_db_record(record) } + it_behaves_like 'a Note text data', 'MergeRequest' + + it 'includes noteable iid' do + expect(representation.iid).to eq 2 end end context 'with Note' do - let(:record) { build_stubbed(:note, id: 42, note: 'Some text here..') } + let(:record) { build_stubbed(:note, id: 42, note: 'Some text here..', noteable_type: 'Issue') } + + it_behaves_like 'a Note text data', 'Note' - it_behaves_like 'a Note text data', 'Note' do - let(:representation) { described_class.from_db_record(record) } + it 'includes noteable type' do + expect(representation.noteable_type).to eq 'Issue' end end end @@ -61,7 +71,8 @@ RSpec.describe Gitlab::GithubImport::Representation::NoteText do { 'record_db_id' => 42, 'record_type' => 'Release', - 'text' => 'Some text here..' + 'text' => 'Some text here..', + 'tag' => 'v1.0' } end @@ -70,11 +81,76 @@ RSpec.describe Gitlab::GithubImport::Representation::NoteText do end describe '#github_identifiers' do - it 'returns a hash with needed identifiers' do - record_id = rand(100) - representation = described_class.new(record_db_id: record_id, text: 'text') + let(:iid) { nil } + let(:tag) { nil } + let(:noteable_type) { nil } + let(:hash) do + { + 'record_db_id' => 42, + 'record_type' => record_type, + 'text' => 'Some text here..', + 'iid' => iid, + 'tag' => tag, + 'noteable_type' => noteable_type + } + end + + subject { described_class.from_json_hash(hash) } + + context 'with Release' do + let(:record_type) { 'Release' } + let(:tag) { 'v1.0' } + + it 'returns a hash with needed identifiers' do + expect(subject.github_identifiers).to eq( + { + db_id: 42, + tag: 'v1.0' + } + ) + end + end + + context 'with Issue' do + let(:record_type) { 'Issue' } + let(:iid) { 2 } + + it 'returns a hash with needed identifiers' do + expect(subject.github_identifiers).to eq( + { + db_id: 42, + noteable_iid: 2 + } + ) + end + end - expect(representation.github_identifiers).to eq({ db_id: record_id }) + context 'with Merge Request' do + let(:record_type) { 'MergeRequest' } + let(:iid) { 3 } + + it 'returns a hash with needed identifiers' do + expect(subject.github_identifiers).to eq( + { + db_id: 42, + noteable_iid: 3 + } + ) + end + end + + context 'with Note' do + let(:record_type) { 'Note' } + let(:noteable_type) { 'MergeRequest' } + + it 'returns a hash with needed identifiers' do + expect(subject.github_identifiers).to eq( + { + db_id: 42, + noteable_type: 'MergeRequest' + } + ) + end end end end diff --git a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb index 0203da9f4fb..8925f466e27 100644 --- a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb +++ b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb @@ -77,7 +77,7 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do it 'returns a hash with needed identifiers' do github_identifiers = { review_id: 999, - merge_request_id: 42 + merge_request_iid: 1 } other_attributes = { something_else: '_something_else_' } review = described_class.new(github_identifiers.merge(other_attributes)) diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb index b8c1c67e07c..4b8e7401e9d 100644 --- a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb +++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb @@ -287,7 +287,8 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequest do describe '#github_identifiers' do it 'returns a hash with needed identifiers' do github_identifiers = { - iid: 1 + iid: 1, + title: 'My Pull Request' } other_attributes = { something_else: '_something_else_' } pr = described_class.new(github_identifiers.merge(other_attributes)) diff --git a/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb b/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb index 0393f692a69..0259fbedee3 100644 --- a/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb +++ b/spec/lib/gitlab/github_import/representation/pull_requests/review_requests_spec.rb @@ -46,4 +46,27 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequests::ReviewRequest let(:review_requests) { described_class.from_json_hash(response) } end end + + describe '#github_identifiers' do + it 'returns a hash with needed identifiers' do + review_requests = { + merge_request_iid: 2, + merge_request_id: merge_request_id, + users: [ + { id: 4, login: 'alice' }, + { id: 5, login: 'bob' } + ] + } + + github_identifiers = { + merge_request_iid: 2, + requested_reviewers: %w[alice bob] + } + + other_attributes = { merge_request_id: 123, something_else: '_something_else_' } + review_requests = described_class.new(review_requests.merge(other_attributes)) + + expect(review_requests.github_identifiers).to eq(github_identifiers) + end + end end diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb index d77aaa0e846..b6e369cb35b 100644 --- a/spec/lib/gitlab/github_import/user_finder_spec.rb +++ b/spec/lib/gitlab/github_import/user_finder_spec.rb @@ -259,6 +259,41 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do expect(finder.email_for_github_username('kittens')).to be_nil end + + context 'when a username does not exist on GitHub' do + context 'when github username inexistence is not cached' do + it 'caches github username inexistence' do + expect(client) + .to receive(:user) + .with('kittens') + .and_raise(::Octokit::NotFound) + + expect(Gitlab::Cache::Import::Caching) + .to receive(:write).with( + described_class::INEXISTENCE_OF_GITHUB_USERNAME_CACHE_KEY % 'kittens', true + ) + + expect(finder.email_for_github_username('kittens')).to be_nil + end + end + + context 'when github username inexistence is already cached' do + it 'does not make request to the client' do + expect(Gitlab::Cache::Import::Caching) + .to receive(:read).with(described_class::EMAIL_FOR_USERNAME_CACHE_KEY % 'kittens') + + expect(Gitlab::Cache::Import::Caching) + .to receive(:read).with( + described_class::INEXISTENCE_OF_GITHUB_USERNAME_CACHE_KEY % 'kittens' + ).and_return('true') + + expect(client) + .not_to receive(:user) + + expect(finder.email_for_github_username('kittens')).to be_nil + end + end + end end end diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb index 0ec94563cbb..40dcbe16688 100644 --- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb +++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb @@ -136,7 +136,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do let(:expected_identifier) { "design-#{project.id}" } let(:expected_id) { project.id } let(:expected_suffix) { '.design' } - let(:expected_repository) { ::DesignManagement::Repository.new(project) } + let(:expected_repository) { project.design_management_repository } let(:expected_container) { project } end diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb index ac512e28e7b..1cd93d7b364 100644 --- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb +++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb @@ -76,13 +76,17 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do end end - context 'when the class does not define #find_object' do + describe '#find_object' do let(:fake_class) do Class.new { include Gitlab::Graphql::Authorize::AuthorizeResource } end - it 'raises a comprehensive error message' do - expect { fake_class.new.find_object }.to raise_error(/Implement #find_object in #{fake_class.name}/) + let(:id) { "id" } + let(:return_value) { "return value" } + + it 'calls GitlabSchema.find_by_gid' do + expect(GitlabSchema).to receive(:find_by_gid).with(id).and_return(return_value) + expect(fake_class.new.find_object(id: id)).to be return_value end end diff --git a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb index 55650b0480e..172872fd7eb 100644 --- a/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb +++ b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb @@ -55,7 +55,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecations::Deprecation, feature_category: : it 'raises an error' do expect { parsed_deprecation }.to raise_error(ArgumentError, - '`alpha` and `deprecated` arguments cannot be passed at the same time' + '`experiment` and `deprecated` arguments cannot be passed at the same time' ) end end diff --git a/spec/lib/gitlab/graphql/known_operations_spec.rb b/spec/lib/gitlab/graphql/known_operations_spec.rb index 3ebfefbb43c..c7bc47e1e6a 100644 --- a/spec/lib/gitlab/graphql/known_operations_spec.rb +++ b/spec/lib/gitlab/graphql/known_operations_spec.rb @@ -2,7 +2,6 @@ require 'fast_spec_helper' require 'rspec-parameterized' -require "support/graphql/fake_query_type" RSpec.describe Gitlab::Graphql::KnownOperations do using RSpec::Parameterized::TableSyntax diff --git a/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/registry_spec.rb b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/registry_spec.rb new file mode 100644 index 00000000000..265839d1236 --- /dev/null +++ b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/registry_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Graphql::Loaders::LazyRelationLoader::Registry, feature_category: :vulnerability_management do + describe '#respond_to?' do + let(:relation) { Project.all } + let(:registry) { described_class.new(relation) } + + subject { registry.respond_to?(method_name) } + + context 'when the relation responds to given method' do + let(:method_name) { :sorted_by_updated_asc } + + it { is_expected.to be_truthy } + end + + context 'when the relation does not respond to given method' do + let(:method_name) { :this_method_does_not_exist } + + it { is_expected.to be_falsey } + end + end +end diff --git a/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy_spec.rb b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy_spec.rb new file mode 100644 index 00000000000..f54fb6e77c5 --- /dev/null +++ b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader/relation_proxy_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Graphql::Loaders::LazyRelationLoader::RelationProxy, feature_category: :vulnerability_management do + describe '#respond_to?' do + let(:object) { double } + let(:registry) { instance_double(Gitlab::Graphql::Loaders::LazyRelationLoader::Registry) } + let(:relation_proxy) { described_class.new(object, registry) } + + subject { relation_proxy.respond_to?(:foo) } + + before do + allow(registry).to receive(:respond_to?).with(:foo, false).and_return(responds_to?) + end + + context 'when the registry responds to given method' do + let(:responds_to?) { true } + + it { is_expected.to be_truthy } + end + + context 'when the registry does not respond to given method' do + let(:responds_to?) { false } + + it { is_expected.to be_falsey } + end + end +end diff --git a/spec/lib/gitlab/graphql/loaders/lazy_relation_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader_spec.rb new file mode 100644 index 00000000000..e56cb68c6cb --- /dev/null +++ b/spec/lib/gitlab/graphql/loaders/lazy_relation_loader_spec.rb @@ -0,0 +1,123 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Graphql::Loaders::LazyRelationLoader, feature_category: :vulnerability_management do + let(:query_context) { {} } + let(:args) { {} } + + let_it_be(:project) { create(:project) } + + let(:loader) { loader_class.new(query_context, project, **args) } + + describe '#load' do + subject(:load_relation) { loader.load } + + context 'when the association is has many' do + let_it_be(:public_issue) { create(:issue, project: project) } + let_it_be(:confidential_issue) { create(:issue, :confidential, project: project) } + + let(:loader_class) do + Class.new(described_class) do + self.model = Project + self.association = :issues + + def relation(public_only: false) + relation = base_relation + relation = relation.public_only if public_only + + relation + end + end + end + + it { is_expected.to be_an_instance_of(described_class::RelationProxy) } + + describe '#relation' do + subject { load_relation.load } + + context 'without arguments' do + it { is_expected.to contain_exactly(public_issue, confidential_issue) } + end + + context 'with arguments' do + let(:args) { { public_only: true } } + + it { is_expected.to contain_exactly(public_issue) } + end + end + + describe 'using the same context for different records' do + let_it_be(:another_project) { create(:project) } + + let(:loader_for_another_project) { loader_class.new(query_context, another_project, **args) } + let(:records_for_another_project) { loader_for_another_project.load.load } + let(:records_for_project) { load_relation.load } + + before do + loader # register the original loader to query context + end + + it 'does not mix associated records' do + expect(records_for_another_project).to be_empty + expect(records_for_project).to contain_exactly(public_issue, confidential_issue) + end + + it 'does not cause N+1 queries' do + expect { records_for_another_project }.not_to exceed_query_limit(1) + end + end + + describe 'using Active Record querying methods' do + subject { load_relation.limit(1).load.count } + + it { is_expected.to be(1) } + end + + describe 'using Active Record finder methods' do + subject { load_relation.last(2) } + + it { is_expected.to contain_exactly(public_issue, confidential_issue) } + end + + describe 'calling a method that returns a non relation object' do + subject { load_relation.limit(1).limit_value } + + it { is_expected.to be(1) } + end + + describe 'calling a prohibited method' do + subject(:count) { load_relation.count } + + it 'raises a `PrematureQueryExecutionTriggered` error' do + expect { count }.to raise_error(described_class::Registry::PrematureQueryExecutionTriggered) + end + end + end + + context 'when the association is has one' do + let!(:project_setting) { create(:project_setting, project: project) } + let(:loader_class) do + Class.new(described_class) do + self.model = Project + self.association = :project_setting + end + end + + it { is_expected.to eq(project_setting) } + end + + context 'when the association is belongs to' do + let(:loader_class) do + Class.new(described_class) do + self.model = Project + self.association = :namespace + end + end + + it 'raises error' do + expect { load_relation }.to raise_error(RuntimeError) + end + end + end +end diff --git a/spec/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing_spec.rb b/spec/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing_spec.rb new file mode 100644 index 00000000000..a9bf3f1dca9 --- /dev/null +++ b/spec/lib/gitlab/graphql/subscriptions/action_cable_with_load_balancing_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Graphql::Subscriptions::ActionCableWithLoadBalancing, feature_category: :shared do + let(:session_class) { ::Gitlab::Database::LoadBalancing::Session } + let(:session) { instance_double(session_class) } + let(:event) { instance_double(::GraphQL::Subscriptions::Event) } + + subject(:subscriptions) { described_class.new(schema: GitlabSchema) } + + it 'forces use of DB primary when executing subscription updates' do + expect(session_class).to receive(:current).and_return(session) + expect(session).to receive(:use_primary!) + + subscriptions.execute_update('sub:123', event, {}) + end +end diff --git a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb index 168f5aa529e..f0312293469 100644 --- a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb +++ b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb @@ -2,7 +2,6 @@ require 'spec_helper' require 'rspec-parameterized' -require "support/graphql/fake_query_type" RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do using RSpec::Parameterized::TableSyntax diff --git a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb index 986120dcd95..e42883aafd8 100644 --- a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb +++ b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb @@ -1,7 +1,5 @@ # frozen_string_literal: true require "fast_spec_helper" -require "support/graphql/fake_tracer" -require "support/graphql/fake_query_type" RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do let(:expected_duration) { 5 } diff --git a/spec/lib/gitlab/harbor/client_spec.rb b/spec/lib/gitlab/harbor/client_spec.rb index 4e80b8b53e3..745e22191bd 100644 --- a/spec/lib/gitlab/harbor/client_spec.rb +++ b/spec/lib/gitlab/harbor/client_spec.rb @@ -265,18 +265,20 @@ RSpec.describe Gitlab::Harbor::Client do end end - describe '#ping' do + describe '#check_project_availability' do before do - stub_request(:get, "https://demo.goharbor.io/api/v2.0/ping") + stub_request(:head, "https://demo.goharbor.io/api/v2.0/projects?project_name=testproject") .with( headers: { + 'Accept': 'application/json', + 'Authorization': 'Basic aGFyYm9ydXNlcm5hbWU6aGFyYm9ycGFzc3dvcmQ=', 'Content-Type': 'application/json' }) - .to_return(status: 200, body: 'pong') + .to_return(status: 200, body: '', headers: {}) end - it "calls api/v2.0/ping successfully" do - expect(client.ping).to eq(success: true) + it "calls api/v2.0/projects successfully" do + expect(client.check_project_availability).to eq(success: true) end end diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb index dbf0252da46..8b8097f4885 100644 --- a/spec/lib/gitlab/http_connection_adapter_spec.rb +++ b/spec/lib/gitlab/http_connection_adapter_spec.rb @@ -111,20 +111,6 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do end end - context 'when http(s) environment variable is set' do - before do - stub_env('https_proxy' => 'https://my.proxy') - end - - it 'sets up the connection' do - expect(connection).to be_a(Gitlab::NetHttpAdapter) - expect(connection.address).to eq('example.org') - expect(connection.hostname_override).to eq(nil) - expect(connection.addr_port).to eq('example.org') - expect(connection.port).to eq(443) - end - end - context 'when URL scheme is not HTTP/HTTPS' do let(:uri) { URI('ssh://example.org') } diff --git a/spec/lib/gitlab/import/metrics_spec.rb b/spec/lib/gitlab/import/metrics_spec.rb index 1a988af0dbd..9a7eb7b875e 100644 --- a/spec/lib/gitlab/import/metrics_spec.rb +++ b/spec/lib/gitlab/import/metrics_spec.rb @@ -42,11 +42,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do it 'does not emit importer metrics' do expect(subject).not_to receive(:track_usage_event) expect_no_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'failed' } + import_type: 'github', state: 'failed' ) subject.track_failed_import @@ -65,11 +65,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do subject.track_failed_import expect_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'failed' } + import_type: 'github', state: 'failed' ) end end @@ -102,11 +102,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do subject.track_finished_import expect_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'completed' } + import_type: 'github', state: 'completed' ) expect(subject.duration).not_to be_nil @@ -123,11 +123,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do subject.track_finished_import expect_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'partially completed' } + import_type: 'github', state: 'partially completed' ) end end @@ -140,11 +140,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do subject.track_finished_import expect_no_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'completed' } + import_type: 'github', state: 'completed' ) end end @@ -155,11 +155,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do it 'does not emit importer metrics' do expect(subject).not_to receive(:track_usage_event) expect_no_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'canceled' } + import_type: 'github', state: 'canceled' ) subject.track_canceled_import @@ -178,11 +178,11 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do subject.track_canceled_import expect_snowplow_event( - category: :test_importer, + category: 'Import::GithubService', action: 'create', label: 'github_import_project_state', project: project, - extra: { import_type: 'github', state: 'canceled' } + import_type: 'github', state: 'canceled' ) end end diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index f6d6a791e8c..66b57deb643 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -14,6 +14,7 @@ issues: - resource_milestone_events - resource_state_events - resource_iteration_events +- assignment_events - sent_notifications - sentry_issue - issuable_severity @@ -180,6 +181,7 @@ merge_requests: - resource_milestone_events - resource_state_events - resource_iteration_events +- assignment_events - label_links - labels - last_edited_by @@ -216,7 +218,7 @@ merge_requests: - approver_groups - approved_by_users - draft_notes -- merge_train +- merge_train_car - blocks_as_blocker - blocks_as_blockee - blocking_merge_requests @@ -260,6 +262,11 @@ ci_pipelines: - statuses - statuses_order_id_desc - latest_statuses_ordered_by_stage +- latest_statuses +- all_jobs +- current_jobs +- all_processable_jobs +- current_processable_jobs - builds - bridges - processables @@ -308,7 +315,6 @@ ci_pipelines: - latest_builds_report_results - messages - pipeline_artifacts -- latest_statuses - dast_profile - dast_profiles_pipeline - dast_site_profile @@ -396,8 +402,8 @@ builds: - job_artifacts_cluster_image_scanning - job_artifacts_cyclonedx - job_artifacts_requirements_v2 -- runner_machine -- runner_machine_build +- runner_manager +- runner_manager_build - runner_session - trace_metadata - terraform_state_versions @@ -493,6 +499,7 @@ container_repositories: - project - name project: +- catalog_resource - external_status_checks - base_tags - project_topics @@ -706,13 +713,14 @@ project: - packages - package_files - rpm_repository_files +- npm_metadata_caches - packages_cleanup_policy - alerting_setting - project_setting - webide_pipelines - reviews - incident_management_setting -- merge_trains +- merge_train_cars - designs - project_aliases - external_pull_requests @@ -724,6 +732,7 @@ project: - downstream_project_subscriptions - service_desk_setting - service_desk_custom_email_verification +- service_desk_custom_email_credential - security_setting - import_failures - container_expiration_policy @@ -779,6 +788,7 @@ project: - sbom_occurrences - analytics_dashboards_configuration_project - analytics_dashboards_pointer +- design_management_repository award_emoji: - awardable - user @@ -865,6 +875,7 @@ incident_management_setting: - project merge_trains: - project +merge_train_cars: - merge_request boards: - group @@ -998,3 +1009,22 @@ resource_iteration_events: iterations_cadence: - group - iterations +catalog_resource: + - project +approval_rules: + - users + - groups + - group_users + - security_orchestration_policy_configuration + - protected_branches + - approval_merge_request_rule_sources + - approval_merge_request_rules + - approval_project_rules_users + - approval_project_rules_protected_branches + - scan_result_policy_read +approval_project_rules_users: + - user + - approval_project_rule +approval_project_rules_protected_branches: + - protected_branch + - approval_project_rule diff --git a/spec/lib/gitlab/import_export/attributes_finder_spec.rb b/spec/lib/gitlab/import_export/attributes_finder_spec.rb index 767b7a3c84e..f12cbe4f82f 100644 --- a/spec/lib/gitlab/import_export/attributes_finder_spec.rb +++ b/spec/lib/gitlab/import_export/attributes_finder_spec.rb @@ -177,7 +177,8 @@ RSpec.describe Gitlab::ImportExport::AttributesFinder, feature_category: :import end def setup_yaml(hash) - allow(YAML).to receive(:load_file).with(test_config).and_return(hash) + allow(YAML).to receive(:safe_load_file) + .with(test_config, aliases: true, permitted_classes: [Symbol]).and_return(hash) end end end diff --git a/spec/lib/gitlab/import_export/fork_spec.rb b/spec/lib/gitlab/import_export/fork_spec.rb deleted file mode 100644 index 9d766eb3af1..00000000000 --- a/spec/lib/gitlab/import_export/fork_spec.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'forked project import' do - include ProjectForksHelper - - let(:user) { create(:user) } - let!(:project_with_repo) { create(:project, :repository, name: 'test-repo-restorer', path: 'test-repo-restorer') } - let!(:project) { create(:project, name: 'test-repo-restorer-no-repo', path: 'test-repo-restorer-no-repo') } - let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } - let(:shared) { project.import_export_shared } - let(:forked_from_project) { create(:project, :repository) } - let(:forked_project) { fork_project(project_with_repo, nil, repository: true) } - let(:repo_saver) { Gitlab::ImportExport::RepoSaver.new(exportable: project_with_repo, shared: shared) } - let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) } - - let(:repo_restorer) do - Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: bundle_path, shared: shared, importable: project) - end - - let!(:merge_request) do - create(:merge_request, source_project: forked_project, target_project: project_with_repo) - end - - let(:saver) do - Gitlab::ImportExport::Project::TreeSaver.new(project: project_with_repo, current_user: user, shared: shared) - end - - let(:restorer) do - Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project) - end - - before do - stub_feature_flags(project_export_as_ndjson: false) - - allow_next_instance_of(Gitlab::ImportExport) do |instance| - allow(instance).to receive(:storage_path).and_return(export_path) - end - - saver.save # rubocop:disable Rails/SaveBang - repo_saver.save # rubocop:disable Rails/SaveBang - - repo_restorer.restore - restorer.restore - end - - after do - FileUtils.rm_rf(export_path) - project_with_repo.repository.remove - project.repository.remove - end - - it 'can access the MR', :sidekiq_might_not_need_inline do - project.merge_requests.first.fetch_ref! - - expect(project.repository.ref_exists?('refs/merge-requests/1/head')).to be_truthy - end -end diff --git a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb index 07971d6271c..495cefa002a 100644 --- a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb @@ -14,20 +14,26 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego let(:importable) { create(:group, parent: group) } include_context 'relation tree restorer shared context' do - let(:importable_name) { nil } + let(:importable_name) { 'groups/4353' } end - let(:path) { 'spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json' } + let(:path) { Rails.root.join('spec/fixtures/lib/gitlab/import_export/group_exports/no_children/tree') } let(:relation_reader) do - Gitlab::ImportExport::Json::LegacyReader::File.new( - path, - relation_names: reader.group_relation_names) + Gitlab::ImportExport::Json::NdjsonReader.new(path) end let(:reader) do Gitlab::ImportExport::Reader.new( shared: shared, - config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.legacy_group_config_file).to_h + config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h + ) + end + + let(:members_mapper) do + Gitlab::ImportExport::MembersMapper.new( + exported_members: relation_reader.consume_relation(importable_name, 'members').map(&:first), + user: user, + importable: importable ) end @@ -41,7 +47,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego relation_factory: Gitlab::ImportExport::Group::RelationFactory, reader: reader, importable: importable, - importable_path: nil, + importable_path: importable_name, importable_attributes: attributes ) end @@ -62,20 +68,13 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego end describe 'relation object saving' do - let(:importable) { create(:group) } - let(:relation_reader) do - Gitlab::ImportExport::Json::LegacyReader::File.new( - path, - relation_names: [:labels]) - end - before do allow(shared.logger).to receive(:info).and_call_original allow(relation_reader).to receive(:consume_relation).and_call_original allow(relation_reader) .to receive(:consume_relation) - .with(nil, 'labels') + .with(importable_name, 'labels') .and_return([[label, 0]]) end diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb index aa30e24296e..a6afd0a36ec 100644 --- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups do +RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups, feature_category: :importers do include ImportExport::CommonUtil shared_examples 'group restoration' do @@ -171,7 +171,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer, feature: :subgroups do allow(shared).to receive(:export_path).and_return(tmpdir) expect(group_tree_restorer.restore).to eq(false) - expect(shared.errors).to include('Incorrect JSON format') + expect(shared.errors).to include('Invalid file') end end end diff --git a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb deleted file mode 100644 index 6c997dc1361..00000000000 --- a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# Verifies that given an exported project meta-data tree, when importing this -# tree and then exporting it again, we should obtain the initial tree. -# -# This equivalence only works up to a certain extent, for instance we need -# to ignore: -# -# - row IDs and foreign key IDs -# - some timestamps -# - randomly generated fields like tokens -# -# as these are expected to change between import/export cycles. -RSpec.describe Gitlab::ImportExport, feature_category: :importers do - include ImportExport::CommonUtil - include ConfigurationHelper - include ImportExport::ProjectTreeExpectations - - let(:json_fixture) { 'complex' } - - before do - stub_feature_flags(project_export_as_ndjson: false) - end - - it 'yields the initial tree when importing and exporting it again' do - project = create(:project) - user = create(:user, :admin) - - # We first generate a test fixture dynamically from a seed-fixture, so as to - # account for any fields in the initial fixture that are missing and set to - # defaults during import (ideally we should have realistic test fixtures - # that "honestly" represent exports) - expect( - restore_then_save_project( - project, - user, - import_path: seed_fixture_path, - export_path: test_fixture_path) - ).to be true - # Import, then export again from the generated fixture. Any residual changes - # in the JSON will count towards comparison i.e. test failures. - expect( - restore_then_save_project( - project, - user, - import_path: test_fixture_path, - export_path: test_tmp_path) - ).to be true - - imported_json = Gitlab::Json.parse(File.read("#{test_fixture_path}/project.json")) - exported_json = Gitlab::Json.parse(File.read("#{test_tmp_path}/project.json")) - - assert_relations_match(imported_json, exported_json) - end - - private - - def seed_fixture_path - "#{fixtures_path}/#{json_fixture}" - end - - def test_fixture_path - "#{test_tmp_path}/#{json_fixture}" - end -end diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb deleted file mode 100644 index 793b3ebfb9e..00000000000 --- a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_relative 'shared_example' - -RSpec.describe Gitlab::ImportExport::Json::LegacyReader::File do - it_behaves_like 'import/export json legacy reader' do - let(:valid_path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } - let(:data) { valid_path } - let(:json_data) { Gitlab::Json.parse(File.read(valid_path)) } - end - - describe '#exist?' do - let(:legacy_reader) do - described_class.new(path, relation_names: []) - end - - subject { legacy_reader.exist? } - - context 'given valid path' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } - - it { is_expected.to be true } - end - - context 'given invalid path' do - let(:path) { 'spec/non-existing-folder/do-not-create-this-file.json' } - - it { is_expected.to be false } - end - end -end diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb deleted file mode 100644 index 57d66dc0f50..00000000000 --- a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_relative 'shared_example' - -RSpec.describe Gitlab::ImportExport::Json::LegacyReader::Hash do - it_behaves_like 'import/export json legacy reader' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' } - - # the hash is modified by the `LegacyReader` - # we need to deep-dup it - let(:json_data) { Gitlab::Json.parse(File.read(path)) } - let(:data) { Gitlab::Json.parse(File.read(path)) } - end - - describe '#exist?' do - let(:legacy_reader) do - described_class.new(tree_hash, relation_names: []) - end - - subject { legacy_reader.exist? } - - context 'tree_hash is nil' do - let(:tree_hash) { nil } - - it { is_expected.to be_falsey } - end - - context 'tree_hash presents' do - let(:tree_hash) { { "issues": [] } } - - it { is_expected.to be_truthy } - end - end -end diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb b/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb deleted file mode 100644 index 3e9bd3fe741..00000000000 --- a/spec/lib/gitlab/import_export/json/legacy_reader/shared_example.rb +++ /dev/null @@ -1,102 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'import/export json legacy reader' do - let(:relation_names) { [] } - - let(:legacy_reader) do - described_class.new( - data, - relation_names: relation_names, - allowed_path: "project") - end - - describe '#consume_attributes' do - context 'when valid path is passed' do - subject { legacy_reader.consume_attributes("project") } - - context 'no excluded attributes' do - let(:relation_names) { [] } - - it 'returns the whole tree from parsed JSON' do - expect(subject).to eq(json_data) - end - end - - context 'some attributes are excluded' do - let(:relation_names) { %w[milestones labels] } - - it 'returns hash without excluded attributes and relations' do - expect(subject).not_to include('milestones', 'labels') - end - end - end - - context 'when invalid path is passed' do - it 'raises an exception' do - expect { legacy_reader.consume_attributes("invalid-path") } - .to raise_error(ArgumentError) - end - end - end - - describe '#consume_relation' do - context 'when valid path is passed' do - let(:key) { 'labels' } - - subject { legacy_reader.consume_relation("project", key) } - - context 'key has not been consumed' do - it 'returns an Enumerator' do - expect(subject).to be_an_instance_of(Enumerator) - end - - context 'value is nil' do - before do - expect(legacy_reader).to receive(:relations).and_return({ key => nil }) - end - - it 'yields nothing to the Enumerator' do - expect(subject.to_a).to eq([]) - end - end - - context 'value is an array' do - before do - expect(legacy_reader).to receive(:relations).and_return({ key => %w[label1 label2] }) - end - - it 'yields every relation value to the Enumerator' do - expect(subject.to_a).to eq([['label1', 0], ['label2', 1]]) - end - end - - context 'value is not array' do - before do - expect(legacy_reader).to receive(:relations).and_return({ key => 'non-array value' }) - end - - it 'yields the value with index 0 to the Enumerator' do - expect(subject.to_a).to eq([['non-array value', 0]]) - end - end - end - - context 'key has been consumed' do - before do - legacy_reader.consume_relation("project", key).first - end - - it 'yields nothing to the Enumerator' do - expect(subject.to_a).to eq([]) - end - end - end - - context 'when invalid path is passed' do - it 'raises an exception' do - expect { legacy_reader.consume_relation("invalid") } - .to raise_error(ArgumentError) - end - end - end -end diff --git a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb b/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb deleted file mode 100644 index 2c0f023ad2c..00000000000 --- a/spec/lib/gitlab/import_export/json/legacy_writer_spec.rb +++ /dev/null @@ -1,102 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' -require 'tmpdir' - -RSpec.describe Gitlab::ImportExport::Json::LegacyWriter, feature_category: :importers do - let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" } - - subject do - described_class.new(path, allowed_path: "project") - end - - after do - FileUtils.rm_rf(path) - end - - describe "#write_attributes" do - it "writes correct json" do - expected_hash = { "key" => "value_1", "key_1" => "value_2" } - subject.write_attributes("project", expected_hash) - - expect(subject_json).to eq(expected_hash) - end - - context 'when invalid path is used' do - it 'raises an exception' do - expect { subject.write_attributes("invalid", { "key" => "value" }) } - .to raise_error(ArgumentError) - end - end - end - - describe "#write_relation" do - context "when key is already written" do - it "raises exception" do - subject.write_relation("project", "key", "old value") - - expect { subject.write_relation("project", "key", "new value") } - .to raise_exception("key 'key' already written") - end - end - - context "when key is not already written" do - context "when multiple key value pairs are stored" do - it "writes correct json" do - expected_hash = { "key" => "value_1", "key_1" => "value_2" } - expected_hash.each do |key, value| - subject.write_relation("project", key, value) - end - - expect(subject_json).to eq(expected_hash) - end - end - end - - context 'when invalid path is used' do - it 'raises an exception' do - expect { subject.write_relation("invalid", "key", "value") } - .to raise_error(ArgumentError) - end - end - end - - describe "#write_relation_array" do - context 'when array is used' do - it 'writes correct json' do - subject.write_relation_array("project", "key", ["value"]) - - expect(subject_json).to eq({ "key" => ["value"] }) - end - end - - context 'when enumerable is used' do - it 'writes correct json' do - values = %w(value1 value2) - - enumerator = Enumerator.new do |items| - values.each { |value| items << value } - end - - subject.write_relation_array("project", "key", enumerator) - - expect(subject_json).to eq({ "key" => values }) - end - end - - context "when key is already written" do - it "raises an exception" do - subject.write_relation_array("project", "key", %w(old_value)) - - expect { subject.write_relation_array("project", "key", %w(new_value)) } - .to raise_error(ArgumentError) - end - end - end - - def subject_json - subject.close - - ::JSON.parse(File.read(subject.path)) - end -end diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb index 0ca4c4ccc87..98afe01c08b 100644 --- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb +++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::Json::NdjsonReader do +RSpec.describe Gitlab::ImportExport::Json::NdjsonReader, feature_category: :importers do include ImportExport::CommonUtil let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/tree' } @@ -26,14 +26,6 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonReader do end end - describe '#legacy?' do - let(:dir_path) { fixture } - - subject { ndjson_reader.legacy? } - - it { is_expected.to be false } - end - describe '#consume_attributes' do let(:dir_path) { fixture } @@ -42,6 +34,20 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonReader do it 'returns the whole root tree from parsed JSON' do expect(subject).to eq(root_tree) end + + context 'when project.json is symlink' do + it 'raises error an error' do + Dir.mktmpdir do |tmpdir| + FileUtils.touch(File.join(tmpdir, 'passwd')) + File.symlink(File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project.json')) + + ndjson_reader = described_class.new(tmpdir) + + expect { ndjson_reader.consume_attributes(importable_path) } + .to raise_error(Gitlab::ImportExport::Error, 'Invalid file') + end + end + end end describe '#consume_relation' do @@ -91,6 +97,22 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonReader do end end + context 'when relation file is a symlink' do + it 'yields nothing to the Enumerator' do + Dir.mktmpdir do |tmpdir| + Dir.mkdir(File.join(tmpdir, 'project')) + File.write(File.join(tmpdir, 'passwd'), "{}\n{}") + File.symlink(File.join(tmpdir, 'passwd'), File.join(tmpdir, 'project', 'issues.ndjson')) + + ndjson_reader = described_class.new(tmpdir) + + result = ndjson_reader.consume_relation(importable_path, 'issues') + + expect(result.to_a).to eq([]) + end + end + end + context 'relation file is empty' do let(:key) { 'empty' } diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb index 103d3512e8b..f4c9189030b 100644 --- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer, feature_category let(:exportable_path) { 'project' } let(:logger) { Gitlab::Export::Logger.build } - let(:json_writer) { instance_double('Gitlab::ImportExport::Json::LegacyWriter') } + let(:json_writer) { instance_double('Gitlab::ImportExport::Json::NdjsonWriter') } let(:hash) { { name: exportable.name, description: exportable.description }.stringify_keys } let(:include) { [] } let(:custom_orderer) { nil } diff --git a/spec/lib/gitlab/import_export/model_configuration_spec.rb b/spec/lib/gitlab/import_export/model_configuration_spec.rb index ce965a05a32..8e5fe96f3b4 100644 --- a/spec/lib/gitlab/import_export/model_configuration_spec.rb +++ b/spec/lib/gitlab/import_export/model_configuration_spec.rb @@ -9,7 +9,7 @@ RSpec.describe 'Import/Export model configuration', feature_category: :importers include ConfigurationHelper let(:all_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' } - let(:all_models_hash) { YAML.load_file(all_models_yml) } + let(:all_models_hash) { YAML.safe_load_file(all_models_yml, aliases: true) } let(:current_models) { setup_models } let(:model_names) { relation_names_for(:project) } diff --git a/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb b/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb index d70e89c6856..f8018e75879 100644 --- a/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb +++ b/spec/lib/gitlab/import_export/project/exported_relations_merger_spec.rb @@ -64,8 +64,8 @@ RSpec.describe Gitlab::ImportExport::Project::ExportedRelationsMerger do expect(result).to eq(false) expect(shared.errors).to match_array( [ - "undefined method `export_file' for nil:NilClass", - "undefined method `export_file' for nil:NilClass" + /^undefined method `export_file' for nil:NilClass/, + /^undefined method `export_file' for nil:NilClass/ ] ) end diff --git a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb index 75012aa80ec..180a6b6ff0a 100644 --- a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb @@ -55,54 +55,19 @@ RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer, feature_cate end end - context 'with legacy reader' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } - let(:relation_reader) do - Gitlab::ImportExport::Json::LegacyReader::File.new( - path, - relation_names: reader.project_relation_names, - allowed_path: 'project' - ) - end - - let(:attributes) { relation_reader.consume_attributes('project') } - - it_behaves_like 'import project successfully' - - context 'with logging of relations creation' do - let_it_be(:group) { create(:group).tap { |g| g.add_maintainer(user) } } - let_it_be(:importable) do - create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group) - end - - it 'logs top-level relation creation' do - expect(shared.logger) - .to receive(:info) - .with(hash_including(message: '[Project/Group Import] Created new object relation')) - .at_least(:once) - - subject - end - end - end - - context 'with ndjson reader' do + context 'when inside a group' do let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } - it_behaves_like 'import project successfully' - - context 'when inside a group' do - let_it_be(:group) do - create(:group, :disabled_and_unoverridable).tap { |g| g.add_maintainer(user) } - end - - before do - importable.update!(shared_runners_enabled: false, group: group) - end + let_it_be(:group) do + create(:group, :disabled_and_unoverridable).tap { |g| g.add_maintainer(user) } + end - it_behaves_like 'import project successfully' + before do + importable.update!(shared_runners_enabled: false, group: group) end + + it_behaves_like 'import project successfully' end context 'with invalid relations' do diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index a07fe4fd29c..5aa16f9508d 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -12,7 +12,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i let(:shared) { project.import_export_shared } - RSpec.shared_examples 'project tree restorer work properly' do |reader, ndjson_enabled| + RSpec.shared_examples 'project tree restorer work properly' do describe 'restore project tree' do before_all do # Using an admin for import, so we can check assignment of existing members @@ -27,10 +27,9 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i @shared = @project.import_export_shared stub_all_feature_flags - stub_feature_flags(project_import_ndjson: ndjson_enabled) setup_import_export_config('complex') - setup_reader(reader) + setup_reader allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true) allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false) @@ -606,23 +605,15 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i end end - context 'project.json file access check' do + context 'when expect tree structure is not present in the export path' do let(:user) { create(:user) } - let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } - let(:project_tree_restorer) do - described_class.new(user: user, shared: shared, project: project) - end + let_it_be(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } - let(:restored_project_json) { project_tree_restorer.restore } + it 'fails to restore the project' do + result = described_class.new(user: user, shared: shared, project: project).restore - it 'does not read a symlink' do - Dir.mktmpdir do |tmpdir| - setup_symlink(tmpdir, 'project.json') - allow(shared).to receive(:export_path).and_call_original - - expect(project_tree_restorer.restore).to eq(false) - expect(shared.errors).to include('invalid import format') - end + expect(result).to eq(false) + expect(shared.errors).to include('invalid import format') end end @@ -635,7 +626,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i context 'with a simple project' do before do setup_import_export_config('light') - setup_reader(reader) + setup_reader expect(restored_project_json).to eq(true) end @@ -670,7 +661,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i context 'multiple pipelines reference the same external pull request' do before do setup_import_export_config('multi_pipeline_ref_one_external_pr') - setup_reader(reader) + setup_reader expect(restored_project_json).to eq(true) end @@ -698,7 +689,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('light') - setup_reader(reader) + setup_reader expect(project).to receive(:merge_requests).and_call_original expect(project).to receive(:merge_requests).and_raise(exception) @@ -715,7 +706,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('light') - setup_reader(reader) + setup_reader expect(project).to receive(:merge_requests).and_call_original expect(project).to receive(:merge_requests).and_raise(exception) @@ -747,7 +738,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i context 'when the project has overridden params in import data' do before do setup_import_export_config('light') - setup_reader(reader) + setup_reader end it 'handles string versions of visibility_level' do @@ -813,7 +804,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('group') - setup_reader(reader) + setup_reader expect(restored_project_json).to eq(true) end @@ -849,7 +840,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('light') - setup_reader(reader) + setup_reader end it 'imports labels' do @@ -885,7 +876,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('milestone-iid') - setup_reader(reader) + setup_reader end it 'preserves the project milestone IID' do @@ -901,7 +892,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i context 'with external authorization classification labels' do before do setup_import_export_config('light') - setup_reader(reader) + setup_reader end it 'converts empty external classification authorization labels to nil' do @@ -928,76 +919,80 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i described_class.new(user: user, shared: shared, project: project) end - before do - allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:exist?).and_return(true) - allow_any_instance_of(Gitlab::ImportExport::Json::NdjsonReader).to receive(:exist?).and_return(false) - allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:tree_hash) { tree_hash } - end - - context 'no group visibility' do - let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } + describe 'visibility level' do + before do + setup_import_export_config('light') - it 'uses the project visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(visibility) + allow_next_instance_of(Gitlab::ImportExport::Json::NdjsonReader) do |relation_reader| + allow(relation_reader).to receive(:consume_attributes).and_return(tree_hash) + end end - end - - context 'with restricted internal visibility' do - describe 'internal project' do - let(:visibility) { Gitlab::VisibilityLevel::INTERNAL } - it 'uses private visibility' do - stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) + context 'no group visibility' do + let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } + it 'uses the project visibility' do expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + expect(restorer.project.visibility_level).to eq(visibility) end end - end - context 'with group visibility' do - before do - group = create(:group, visibility_level: group_visibility) - group.add_members([user], GroupMember::MAINTAINER) - project.update!(group: group) - end + context 'with restricted internal visibility' do + describe 'internal project' do + let(:visibility) { Gitlab::VisibilityLevel::INTERNAL } - context 'private group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE } - let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } + it 'uses private visibility' do + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) - it 'uses the group visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(group_visibility) + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + end end end - context 'public group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC } - let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } + context 'with group visibility' do + before do + group = create(:group, visibility_level: group_visibility) + group.add_members([user], GroupMember::MAINTAINER) + project.update!(group: group) + end - it 'uses the project visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(visibility) + context 'private group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::PRIVATE } + let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } + + it 'uses the group visibility' do + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(group_visibility) + end end - end - context 'internal group visibility' do - let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL } - let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } + context 'public group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::PUBLIC } + let(:visibility) { Gitlab::VisibilityLevel::PRIVATE } - it 'uses the group visibility' do - expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(group_visibility) + it 'uses the project visibility' do + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(visibility) + end end - context 'with restricted internal visibility' do - it 'sets private visibility' do - stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) + context 'internal group visibility' do + let(:group_visibility) { Gitlab::VisibilityLevel::INTERNAL } + let(:visibility) { Gitlab::VisibilityLevel::PUBLIC } + it 'uses the group visibility' do expect(restorer.restore).to eq(true) - expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + expect(restorer.project.visibility_level).to eq(group_visibility) + end + + context 'with restricted internal visibility' do + it 'sets private visibility' do + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) + + expect(restorer.restore).to eq(true) + expect(restorer.project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + end end end end @@ -1008,24 +1003,35 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i let(:user2) { create(:user) } let(:project_members) do [ - { - "id" => 2, - "access_level" => 40, - "source_type" => "Project", - "notification_level" => 3, - "user" => { - "id" => user2.id, - "email" => user2.email, - "username" => 'test' - } - } + [ + { + "id" => 2, + "access_level" => 40, + "source_type" => "Project", + "notification_level" => 3, + "user" => { + "id" => user2.id, + "email" => user2.email, + "username" => 'test' + } + }, + 0 + ] ] end - let(:tree_hash) { { 'project_members' => project_members } } - before do project.add_maintainer(user) + + setup_import_export_config('light') + + allow_next_instance_of(Gitlab::ImportExport::Json::NdjsonReader) do |relation_reader| + allow(relation_reader).to receive(:consume_relation).and_call_original + + allow(relation_reader).to receive(:consume_relation) + .with('project', 'project_members') + .and_return(project_members) + end end it 'restores project members' do @@ -1045,7 +1051,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i before do setup_import_export_config('with_invalid_records') - setup_reader(reader) + setup_reader subject end @@ -1138,13 +1144,5 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i end end - context 'enable ndjson import' do - it_behaves_like 'project tree restorer work properly', :legacy_reader, true - - it_behaves_like 'project tree restorer work properly', :ndjson_reader, true - end - - context 'disable ndjson import' do - it_behaves_like 'project tree restorer work properly', :legacy_reader, false - end + it_behaves_like 'project tree restorer work properly' end diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb index b87992c4594..4166eba4e8e 100644 --- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb @@ -9,28 +9,21 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license, feature_ let_it_be(:group) { create(:group) } let_it_be(:project) { setup_project } - shared_examples 'saves project tree successfully' do |ndjson_enabled| + shared_examples 'saves project tree successfully' do include ImportExport::CommonUtil - subject { get_json(full_path, exportable_path, relation_name, ndjson_enabled) } + subject { get_json(full_path, exportable_path, relation_name) } describe 'saves project tree attributes' do let_it_be(:shared) { project.import_export_shared } let(:relation_name) { :projects } - let_it_be(:full_path) do - if ndjson_enabled - File.join(shared.export_path, 'tree') - else - File.join(shared.export_path, Gitlab::ImportExport.project_filename) - end - end + let_it_be(:full_path) { File.join(shared.export_path, 'tree') } before_all do RSpec::Mocks.with_temporary_scope do stub_all_feature_flags - stub_feature_flags(project_export_as_ndjson: ndjson_enabled) project.add_maintainer(user) @@ -300,13 +293,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license, feature_ let_it_be(:group) { create(:group) } let(:project) { setup_project } - let(:full_path) do - if ndjson_enabled - File.join(shared.export_path, 'tree') - else - File.join(shared.export_path, Gitlab::ImportExport.project_filename) - end - end + let(:full_path) { File.join(shared.export_path, 'tree') } let(:shared) { project.import_export_shared } let(:params) { {} } @@ -314,7 +301,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license, feature_ let(:project_tree_saver ) { described_class.new(project: project, current_user: user, shared: shared, params: params) } before do - stub_feature_flags(project_export_as_ndjson: ndjson_enabled) project.add_maintainer(user) FileUtils.rm_rf(export_path) @@ -425,13 +411,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license, feature_ end end - context 'with JSON' do - it_behaves_like "saves project tree successfully", false - end - - context 'with NDJSON' do - it_behaves_like "saves project tree successfully", true - end + it_behaves_like "saves project tree successfully" context 'when streaming has to retry', :aggregate_failures do let(:shared) { double('shared', export_path: exportable_path) } diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 2384baabb6b..854909fd592 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -1060,3 +1060,21 @@ ResourceIterationEvent: - action Iterations::Cadence: - title +ApprovalProjectRule: + - approvals_required + - name + - rule_type + - scanners + - vulnerabilities_allowed + - severity_levels + - report_type + - vulnerability_states + - orchestration_policy_idx + - applies_to_all_protected_branches +ApprovalProjectRulesUser: + - user_id + - approval_project_rule_id +ApprovalProjectRulesProtectedBranch: + - protected_branch_id + - approval_project_rule_id + - branch_name diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb deleted file mode 100644 index acd6634058f..00000000000 --- a/spec/lib/gitlab/incoming_email_spec.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::IncomingEmail do - let(:setting_name) { :incoming_email } - - it_behaves_like 'common email methods' - - describe 'self.key_from_address' do - before do - stub_incoming_email_setting(address: 'replies+%{key}@example.com') - end - - it "returns reply key" do - expect(described_class.key_from_address("replies+key@example.com")).to eq("key") - end - - it 'does not match emails with extra bits' do - expect(described_class.key_from_address('somereplies+somekey@example.com.someotherdomain.com')).to be nil - end - - context 'when a custom wildcard address is used' do - let(:wildcard_address) { 'custom.address+%{key}@example.com' } - - it 'finds key if email matches address pattern' do - key = described_class.key_from_address( - 'custom.address+foo@example.com', wildcard_address: wildcard_address - ) - expect(key).to eq('foo') - end - end - end -end diff --git a/spec/lib/gitlab/jwt_authenticatable_spec.rb b/spec/lib/gitlab/jwt_authenticatable_spec.rb index 92d5feceb75..9a06f9b91df 100644 --- a/spec/lib/gitlab/jwt_authenticatable_spec.rb +++ b/spec/lib/gitlab/jwt_authenticatable_spec.rb @@ -172,11 +172,17 @@ RSpec.describe Gitlab::JwtAuthenticatable do end it 'raises an error if iat is invalid' do - encoded_message = JWT.encode(payload.merge(iat: 'wrong'), test_class.secret, 'HS256') + encoded_message = JWT.encode(payload.merge(iat: Time.current.to_i + 1), test_class.secret, 'HS256') expect { test_class.decode_jwt(encoded_message, iat_after: true) }.to raise_error(JWT::DecodeError) end + it 'raises InvalidPayload exception if iat is a string' do + expect do + JWT.encode(payload.merge(iat: 'wrong'), test_class.secret, 'HS256') + end.to raise_error(JWT::InvalidPayload) + end + it 'raises an error if iat is absent' do encoded_message = JWT.encode(payload, test_class.secret, 'HS256') diff --git a/spec/lib/gitlab/kas/user_access_spec.rb b/spec/lib/gitlab/kas/user_access_spec.rb index 8795ad565d0..a8296d23a18 100644 --- a/spec/lib/gitlab/kas/user_access_spec.rb +++ b/spec/lib/gitlab/kas/user_access_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Kas::UserAccess, feature_category: :kubernetes_management do +RSpec.describe Gitlab::Kas::UserAccess, feature_category: :deployment_management do describe '.enabled?' do subject { described_class.enabled? } diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb deleted file mode 100644 index e022f5bd912..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb +++ /dev/null @@ -1,269 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::API do - let(:client) { double('kubernetes client') } - let(:helm) { described_class.new(client) } - let(:gitlab_namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } - let(:gitlab_namespace_labels) { Gitlab::Kubernetes::Helm::NAMESPACE_LABELS } - let(:namespace) { Gitlab::Kubernetes::Namespace.new(gitlab_namespace, client, labels: gitlab_namespace_labels) } - let(:application_name) { 'app-name' } - let(:rbac) { false } - let(:files) { {} } - - let(:command) do - Gitlab::Kubernetes::Helm::V2::InstallCommand.new( - name: application_name, - chart: 'chart-name', - rbac: rbac, - files: files - ) - end - - subject { helm } - - before do - allow(Gitlab::Kubernetes::Namespace).to( - receive(:new).with(gitlab_namespace, client, labels: gitlab_namespace_labels).and_return(namespace) - ) - allow(client).to receive(:create_config_map) - end - - describe '#initialize' do - it 'creates a namespace object' do - expect(Gitlab::Kubernetes::Namespace).to( - receive(:new).with(gitlab_namespace, client, labels: gitlab_namespace_labels) - ) - - subject - end - end - - describe '#uninstall' do - before do - allow(client).to receive(:create_pod).and_return(nil) - allow(client).to receive(:get_config_map).and_return(nil) - allow(client).to receive(:create_config_map).and_return(nil) - allow(client).to receive(:delete_pod).and_return(nil) - allow(namespace).to receive(:ensure_exists!).once - end - - it 'ensures the namespace exists before creating the POD' do - expect(namespace).to receive(:ensure_exists!).once.ordered - expect(client).to receive(:create_pod).once.ordered - - subject.uninstall(command) - end - - it 'removes an existing pod before installing' do - expect(client).to receive(:delete_pod).with('install-app-name', 'gitlab-managed-apps').once.ordered - expect(client).to receive(:create_pod).once.ordered - - subject.uninstall(command) - end - - context 'with a ConfigMap' do - let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application_name, files).generate } - - it 'creates a ConfigMap on kubeclient' do - expect(client).to receive(:create_config_map).with(resource).once - - subject.install(command) - end - - context 'config map already exists' do - before do - expect(client).to receive(:get_config_map).with("values-content-configuration-#{application_name}", gitlab_namespace).and_return(resource) - end - - it 'updates the config map' do - expect(client).to receive(:update_config_map).with(resource).once - - subject.install(command) - end - end - end - end - - describe '#install' do - before do - allow(client).to receive(:create_pod).and_return(nil) - allow(client).to receive(:get_config_map).and_return(nil) - allow(client).to receive(:create_config_map).and_return(nil) - allow(client).to receive(:create_service_account).and_return(nil) - allow(client).to receive(:delete_pod).and_return(nil) - allow(namespace).to receive(:ensure_exists!).once - end - - it 'ensures the namespace exists before creating the POD' do - expect(namespace).to receive(:ensure_exists!).once.ordered - expect(client).to receive(:create_pod).once.ordered - - subject.install(command) - end - - it 'removes an existing pod before installing' do - expect(client).to receive(:delete_pod).with('install-app-name', 'gitlab-managed-apps').once.ordered - expect(client).to receive(:create_pod).once.ordered - - subject.install(command) - end - - context 'with a ConfigMap' do - let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application_name, files).generate } - - it 'creates a ConfigMap on kubeclient' do - expect(client).to receive(:create_config_map).with(resource).once - - subject.install(command) - end - - context 'config map already exists' do - before do - expect(client).to receive(:get_config_map).with("values-content-configuration-#{application_name}", gitlab_namespace).and_return(resource) - end - - it 'updates the config map' do - expect(client).to receive(:update_config_map).with(resource).once - - subject.install(command) - end - end - end - - context 'without a service account' do - it 'does not create a service account on kubeclient' do - expect(client).not_to receive(:create_service_account) - expect(client).not_to receive(:update_cluster_role_binding) - - subject.install(command) - end - end - - context 'with a service account' do - let(:command) { Gitlab::Kubernetes::Helm::V2::InitCommand.new(name: application_name, files: files, rbac: rbac) } - - context 'rbac-enabled cluster' do - let(:rbac) { true } - - let(:service_account_resource) do - Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' }) - end - - let(:cluster_role_binding_resource) do - Kubeclient::Resource.new( - metadata: { name: 'tiller-admin' }, - roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' }, - subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }] - ) - end - - context 'service account does not exist' do - before do - expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) - end - - it 'creates a service account, followed the cluster role binding on kubeclient' do - expect(client).to receive(:create_service_account).with(service_account_resource).once.ordered - expect(client).to receive(:update_cluster_role_binding).with(cluster_role_binding_resource).once.ordered - - subject.install(command) - end - end - - context 'service account already exists' do - before do - expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_return(service_account_resource) - end - - it 'updates the service account, followed by creating the cluster role binding' do - expect(client).to receive(:update_service_account).with(service_account_resource).once.ordered - expect(client).to receive(:update_cluster_role_binding).with(cluster_role_binding_resource).once.ordered - - subject.install(command) - end - end - - context 'a non-404 error is thrown' do - before do - expect(client).to receive(:get_service_account).with('tiller', 'gitlab-managed-apps').and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil)) - end - - it 'raises an error' do - expect { subject.install(command) }.to raise_error(Kubeclient::HttpError) - end - end - end - - context 'legacy abac cluster' do - it 'does not create a service account on kubeclient' do - expect(client).not_to receive(:create_service_account) - expect(client).not_to receive(:update_cluster_role_binding) - - subject.install(command) - end - end - end - end - - describe '#status' do - let(:phase) { Gitlab::Kubernetes::Pod::RUNNING } - let(:pod) { Kubeclient::Resource.new(status: { phase: phase }) } # partial representation - - it 'fetches POD phase from kubernetes cluster' do - expect(client).to receive(:get_pod).with(command.pod_name, gitlab_namespace).once.and_return(pod) - - expect(subject.status(command.pod_name)).to eq(phase) - end - end - - describe '#log' do - let(:log) { 'some output' } - let(:response) { RestClient::Response.new(log) } - - it 'fetches POD phase from kubernetes cluster' do - expect(client).to receive(:get_pod_log).with(command.pod_name, gitlab_namespace).once.and_return(response) - - expect(subject.log(command.pod_name)).to eq(log) - end - end - - describe '#delete_pod!' do - it 'deletes the POD from kubernetes cluster' do - expect(client).to receive(:delete_pod).with('install-app-name', 'gitlab-managed-apps').once - - subject.delete_pod!('install-app-name') - end - - context 'when the resource being deleted does not exist' do - it 'catches the error' do - expect(client).to receive(:delete_pod).with('install-app-name', 'gitlab-managed-apps') - .and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not found', nil)) - - subject.delete_pod!('install-app-name') - end - end - end - - describe '#get_config_map' do - before do - allow(namespace).to receive(:ensure_exists!).once - allow(client).to receive(:get_config_map).and_return(nil) - end - - it 'ensures the namespace exists before retrieving the config map' do - expect(namespace).to receive(:ensure_exists!).once - - subject.get_config_map('example-config-map-name') - end - - it 'gets the config map on kubeclient' do - expect(client).to receive(:get_config_map) - .with('example-config-map-name', namespace.name) - .once - - subject.get_config_map('example-config-map-name') - end - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb deleted file mode 100644 index 8aa755bffce..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb +++ /dev/null @@ -1,89 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::Pod do - describe '#generate' do - using RSpec::Parameterized::TableSyntax - - where(:helm_major_version, :expected_helm_version, :expected_command_env) do - 2 | '2.17.0' | [:TILLER_NAMESPACE] - 3 | '3.2.4' | nil - end - - with_them do - let(:cluster) { create(:cluster, helm_major_version: helm_major_version) } - let(:app) { create(:clusters_applications_knative, cluster: cluster) } - let(:command) { app.install_command } - let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } - let(:service_account_name) { nil } - - subject { described_class.new(command, namespace, service_account_name: service_account_name) } - - context 'with a command' do - it 'generates a Kubeclient::Resource' do - expect(subject.generate).to be_a_kind_of(Kubeclient::Resource) - end - - it 'generates the appropriate metadata' do - metadata = subject.generate.metadata - expect(metadata.name).to eq("install-#{app.name}") - expect(metadata.namespace).to eq('gitlab-managed-apps') - expect(metadata.labels['gitlab.org/action']).to eq('install') - expect(metadata.labels['gitlab.org/application']).to eq(app.name) - end - - it 'generates a container spec' do - spec = subject.generate.spec - expect(spec.containers.count).to eq(1) - end - - it 'generates the appropriate specifications for the container' do - container = subject.generate.spec.containers.first - expect(container.name).to eq('helm') - expect(container.image).to eq("registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/#{expected_helm_version}-kube-1.13.12-alpine-3.12") - expect(container.env.map(&:name)).to include(:HELM_VERSION, :COMMAND_SCRIPT, *expected_command_env) - expect(container.command).to match_array(["/bin/sh"]) - expect(container.args).to match_array(["-c", "$(COMMAND_SCRIPT)"]) - end - - it 'includes a never restart policy' do - spec = subject.generate.spec - expect(spec.restartPolicy).to eq('Never') - end - - it 'includes volumes for the container' do - container = subject.generate.spec.containers.first - expect(container.volumeMounts.first['name']).to eq('configuration-volume') - expect(container.volumeMounts.first['mountPath']).to eq("/data/helm/#{app.name}/config") - end - - it 'includes a volume inside the specification' do - spec = subject.generate.spec - expect(spec.volumes.first['name']).to eq('configuration-volume') - end - - it 'mounts configMap specification in the volume' do - volume = subject.generate.spec.volumes.first - expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}") - expect(volume.configMap['items'].first['key']).to eq(:'values.yaml') - expect(volume.configMap['items'].first['path']).to eq(:'values.yaml') - end - - it 'has no serviceAccountName' do - spec = subject.generate.spec - expect(spec.serviceAccountName).to be_nil - end - - context 'with a service_account_name' do - let(:service_account_name) { 'sa' } - - it 'uses the serviceAccountName provided' do - spec = subject.generate.spec - expect(spec.serviceAccountName).to eq(service_account_name) - end - end - end - end - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb deleted file mode 100644 index 3d2b36b9094..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::BaseCommand do - subject(:base_command) do - test_class.new(rbac) - end - - let(:application) { create(:clusters_applications_helm) } - let(:rbac) { false } - - let(:test_class) do - Class.new(described_class) do - def initialize(rbac) - super( - name: 'test-class-name', - rbac: rbac, - files: { some: 'value' } - ) - end - end - end - - describe 'HELM_VERSION' do - subject { described_class::HELM_VERSION } - - it { is_expected.to match /^2\.\d+\.\d+$/ } - end - - describe '#env' do - subject { base_command.env } - - it { is_expected.to include(TILLER_NAMESPACE: 'gitlab-managed-apps') } - end - - it_behaves_like 'helm command generator' do - let(:commands) { '' } - end - - describe '#pod_name' do - subject { base_command.pod_name } - - it { is_expected.to eq('install-test-class-name') } - end - - it_behaves_like 'helm command' do - let(:command) { base_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb deleted file mode 100644 index 698b88c9fa1..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true -require 'fast_spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::Certificate do - describe '.generate_root' do - subject { described_class.generate_root } - - it 'generates a root CA that expires a long way in the future' do - expect(subject.cert.not_after).to be > 999.years.from_now - end - end - - describe '#issue' do - subject { described_class.generate_root.issue } - - it 'generates a cert that expires soon' do - expect(subject.cert.not_after).to be < 60.minutes.from_now - end - - context 'passing in INFINITE_EXPIRY' do - subject { described_class.generate_root.issue(expires_in: described_class::INFINITE_EXPIRY) } - - it 'generates a cert that expires a long way in the future' do - expect(subject.cert.not_after).to be > 999.years.from_now - end - end - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb deleted file mode 100644 index 4a3a41dba4a..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::DeleteCommand do - subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) } - - let(:app_name) { 'app-name' } - let(:rbac) { true } - let(:files) { {} } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm delete --purge app-name - EOS - end - end - - describe '#pod_name' do - subject { delete_command.pod_name } - - it { is_expected.to eq('uninstall-app-name') } - end - - it_behaves_like 'helm command' do - let(:command) { delete_command } - end - - describe '#delete_command' do - it 'deletes the release' do - expect(subject.delete_command).to eq('helm delete --purge app-name') - end - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb deleted file mode 100644 index 8ae78ada15c..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::InitCommand do - subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) } - - let(:application) { create(:clusters_applications_helm) } - let(:rbac) { false } - let(:files) { {} } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem - EOS - end - end - - context 'on a rbac-enabled cluster' do - let(:rbac) { true } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem --service-account tiller - EOS - end - end - end - - it_behaves_like 'helm command' do - let(:command) { init_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb deleted file mode 100644 index 250d1a82e7a..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb +++ /dev/null @@ -1,183 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::InstallCommand do - subject(:install_command) do - described_class.new( - name: 'app-name', - chart: 'chart-name', - rbac: rbac, - files: files, - version: version, - repository: repository, - preinstall: preinstall, - postinstall: postinstall - ) - end - - let(:files) { { 'ca.pem': 'some file content' } } - let(:repository) { 'https://repository.example.com' } - let(:rbac) { false } - let(:version) { '1.2.3' } - let(:preinstall) { nil } - let(:postinstall) { nil } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_comand} - EOS - end - - let(:helm_install_comand) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - - context 'when rbac is true' do - let(:rbac) { true } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=true,rbac.enabled\\=true - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is a pre-install script' do - let(:preinstall) { ['/bin/date', '/bin/true'] } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - /bin/date - /bin/true - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is a post-install script' do - let(:postinstall) { ['/bin/date', "/bin/false\n"] } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - /bin/date - /bin/false - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is no version' do - let(:version) { nil } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - it_behaves_like 'helm command' do - let(:command) { install_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb deleted file mode 100644 index 98eb77d397c..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb +++ /dev/null @@ -1,87 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::PatchCommand do - let(:files) { { 'ca.pem': 'some file content' } } - let(:repository) { 'https://repository.example.com' } - let(:rbac) { false } - let(:version) { '1.2.3' } - - subject(:patch_command) do - described_class.new( - name: 'app-name', - chart: 'chart-name', - rbac: rbac, - files: files, - version: version, - repository: repository - ) - end - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_upgrade_comand} - EOS - end - - let(:helm_upgrade_comand) do - <<~EOS.squish - helm upgrade app-name chart-name - --reuse-values - --version 1.2.3 - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - - context 'when rbac is true' do - let(:rbac) { true } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm repo add app-name https://repository.example.com - helm repo update - #{helm_upgrade_command} - EOS - end - - let(:helm_upgrade_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --reuse-values - --version 1.2.3 - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is no version' do - let(:version) { nil } - - it { expect { patch_command }.to raise_error(ArgumentError, 'version is required') } - end - - describe '#pod_name' do - subject { patch_command.pod_name } - - it { is_expected.to eq 'install-app-name' } - end - - it_behaves_like 'helm command' do - let(:command) { patch_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb deleted file mode 100644 index 2a3a4cec2b0..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V2::ResetCommand do - subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) } - - let(:rbac) { true } - let(:name) { 'helm' } - let(:files) { {} } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - export HELM_HOST="localhost:44134" - tiller -listen ${HELM_HOST} -alsologtostderr & - helm init --client-only - helm reset --force - EOS - end - end - - describe '#pod_name' do - subject { reset_command.pod_name } - - it { is_expected.to eq('uninstall-helm') } - end - - it_behaves_like 'helm command' do - let(:command) { reset_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb deleted file mode 100644 index ad5ff13b4c9..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V3::BaseCommand do - subject(:base_command) do - test_class.new(rbac) - end - - let(:application) { create(:clusters_applications_helm) } - let(:rbac) { false } - - let(:test_class) do - Class.new(described_class) do - def initialize(rbac) - super( - name: 'test-class-name', - rbac: rbac, - files: { some: 'value' } - ) - end - end - end - - describe 'HELM_VERSION' do - subject { described_class::HELM_VERSION } - - it { is_expected.to match /^3\.\d+\.\d+$/ } - end - - it_behaves_like 'helm command generator' do - let(:commands) { '' } - end - - describe '#pod_name' do - subject { base_command.pod_name } - - it { is_expected.to eq('install-test-class-name') } - end - - it_behaves_like 'helm command' do - let(:command) { base_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb deleted file mode 100644 index 63e7a8d2f25..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V3::DeleteCommand do - subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) } - - let(:app_name) { 'app-name' } - let(:rbac) { true } - let(:files) { {} } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm uninstall app-name --namespace gitlab-managed-apps - EOS - end - end - - describe '#pod_name' do - subject { delete_command.pod_name } - - it { is_expected.to eq('uninstall-app-name') } - end - - it_behaves_like 'helm command' do - let(:command) { delete_command } - end - - describe '#delete_command' do - it 'deletes the release' do - expect(subject.delete_command).to eq('helm uninstall app-name --namespace gitlab-managed-apps') - end - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb deleted file mode 100644 index 2bf1f713b3f..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb +++ /dev/null @@ -1,168 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V3::InstallCommand do - subject(:install_command) do - described_class.new( - name: 'app-name', - chart: 'chart-name', - rbac: rbac, - files: files, - version: version, - repository: repository, - preinstall: preinstall, - postinstall: postinstall - ) - end - - let(:files) { { 'ca.pem': 'some file content' } } - let(:repository) { 'https://repository.example.com' } - let(:rbac) { false } - let(:version) { '1.2.3' } - let(:preinstall) { nil } - let(:postinstall) { nil } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_comand} - EOS - end - - let(:helm_install_comand) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - - context 'when rbac is true' do - let(:rbac) { true } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=true,rbac.enabled\\=true - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is a pre-install script' do - let(:preinstall) { ['/bin/date', '/bin/true'] } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - /bin/date - /bin/true - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is a post-install script' do - let(:postinstall) { ['/bin/date', "/bin/false\n"] } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - /bin/date - /bin/false - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --version 1.2.3 - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is no version' do - let(:version) { nil } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_install_command} - EOS - end - - let(:helm_install_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --install - --atomic - --cleanup-on-fail - --reset-values - --set rbac.create\\=false,rbac.enabled\\=false - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - it_behaves_like 'helm command' do - let(:command) { install_command } - end -end diff --git a/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb deleted file mode 100644 index 2f22e0f2e77..00000000000 --- a/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb +++ /dev/null @@ -1,81 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Kubernetes::Helm::V3::PatchCommand do - let(:files) { { 'ca.pem': 'some file content' } } - let(:repository) { 'https://repository.example.com' } - let(:rbac) { false } - let(:version) { '1.2.3' } - - subject(:patch_command) do - described_class.new( - name: 'app-name', - chart: 'chart-name', - rbac: rbac, - files: files, - version: version, - repository: repository - ) - end - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_upgrade_comand} - EOS - end - - let(:helm_upgrade_comand) do - <<~EOS.squish - helm upgrade app-name chart-name - --reuse-values - --version 1.2.3 - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - - context 'when rbac is true' do - let(:rbac) { true } - - it_behaves_like 'helm command generator' do - let(:commands) do - <<~EOS - helm repo add app-name https://repository.example.com - helm repo update - #{helm_upgrade_command} - EOS - end - - let(:helm_upgrade_command) do - <<~EOS.squish - helm upgrade app-name chart-name - --reuse-values - --version 1.2.3 - --namespace gitlab-managed-apps - -f /data/helm/app-name/config/values.yaml - EOS - end - end - end - - context 'when there is no version' do - let(:version) { nil } - - it { expect { patch_command }.to raise_error(ArgumentError, 'version is required') } - end - - describe '#pod_name' do - subject { patch_command.pod_name } - - it { is_expected.to eq 'install-app-name' } - end - - it_behaves_like 'helm command' do - let(:command) { patch_command } - end -end diff --git a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb index bc127f74e84..0844ab7eccc 100644 --- a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb +++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb @@ -5,14 +5,15 @@ require 'spec_helper' RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do let(:client) { double } let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } } + let(:gitea_ghost) { { id: -1, login: 'Ghost', email: '' } } - subject(:user) { described_class.new(client, octocat) } + describe '#gitlab_id' do + subject(:user) { described_class.new(client, octocat) } - before do - allow(client).to receive(:user).and_return(octocat) - end + before do + allow(client).to receive(:user).and_return(octocat) + end - describe '#gitlab_id' do context 'when GitHub user is a GitLab user' do it 'return GitLab user id when user associated their account with GitHub' do gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github') @@ -51,4 +52,16 @@ RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do expect(user.gitlab_id).to be_nil end end + + describe '.email' do + subject(:user) { described_class.new(client, gitea_ghost) } + + before do + allow(client).to receive(:user).and_return(gitea_ghost) + end + + it 'assigns a dummy email address when user is a Ghost gitea user' do + expect(subject.send(:email)).to eq described_class::GITEA_GHOST_EMAIL + end + end end diff --git a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb index 08437920e0c..77c42f57f3c 100644 --- a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do +RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store, feature_category: :application_performance do let(:subscriber) { described_class.new } let(:counter) { double(:counter) } let(:data) { { 'result' => { 'data' => { 'event' => 'updated' } } } } @@ -55,7 +55,6 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do { event: :updated } end - let(:broadcasting) { 'issues:Z2lkOi8vZ2l0bGFiL0lzc3VlLzQ0Ng' } let(:payload) do { broadcasting: broadcasting, @@ -64,14 +63,40 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do } end - it 'tracks the broadcast event' do + before do allow(::Gitlab::Metrics).to receive(:counter).with( :action_cable_broadcasts_total, /broadcast/ ).and_return(counter) + end - expect(counter).to receive(:increment) + context 'when broadcast is for a GraphQL event' do + let(:broadcasting) { 'graphql-event::issuableEpicUpdated:issuableId:Z2lkOi8vZ2l0bGFiL0lzc3VlLzM' } + + it 'tracks the event with broadcasting set to event topic' do + expect(counter).to receive(:increment).with({ broadcasting: 'graphql-event:issuableEpicUpdated' }) + + subscriber.broadcast(event) + end + end + + context 'when broadcast is for a GraphQL channel subscription' do + let(:broadcasting) { 'graphql-subscription:09ae595a-45c4-4ae0-b765-4e503203211d' } + + it 'strips out subscription ID from broadcasting' do + expect(counter).to receive(:increment).with({ broadcasting: 'graphql-subscription' }) + + subscriber.broadcast(event) + end + end + + context 'when broadcast is something else' do + let(:broadcasting) { 'unknown-topic' } + + it 'tracks the event as "other"' do + expect(counter).to receive(:increment).with({ broadcasting: 'other' }) - subscriber.broadcast(event) + subscriber.broadcast(event) + end end end diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index 7ce5cbec18d..afb029a96cb 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -226,7 +226,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do # Emulate Marginalia pre-pending comments def sql(query, comments: true) - if comments && !%w[BEGIN COMMIT].include?(query) + if comments "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}" else query @@ -244,8 +244,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do 'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false - nil | 'BEGIN' | false | false | false - nil | 'COMMIT' | false | false | false + 'TRANSACTION' | 'BEGIN' | false | false | false + 'TRANSACTION' | 'COMMIT' | false | false | false + 'TRANSACTION' | 'ROLLBACK' | false | false | false end with_them do @@ -291,7 +292,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do # Emulate Marginalia pre-pending comments def sql(query, comments: true) - if comments && !%w[BEGIN COMMIT].include?(query) + if comments "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}" else query @@ -313,8 +314,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do 'CACHE' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | true | true 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true | false 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false | false - nil | 'BEGIN' | false | false | false | false - nil | 'COMMIT' | false | false | false | false + 'TRANSACTION' | 'BEGIN' | false | false | false | false + 'TRANSACTION' | 'COMMIT' | false | false | false | false + 'TRANSACTION' | 'ROLLBACK' | false | false | false | false end with_them do diff --git a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb index e489ac97b9c..18a5d2c2c3f 100644 --- a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do +RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store, feature_category: :logging do let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) } let(:subscriber) { described_class.new } @@ -15,7 +15,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do :event, payload: { method: 'POST', code: "200", duration: 0.321, - scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects', + scheme: 'https', host: 'gitlab.com', port: 443, path: '/api/v4/projects', query: 'current=true' }, time: Time.current @@ -95,6 +95,47 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do expect(described_class.payload).to eql(external_http_count: 7, external_http_duration_s: 1.2) end end + + context 'with multiple requests' do + let(:slow_requests) do + [ + { + method: 'POST', + host: 'gitlab.com', + port: 80, + path: '/api/v4/projects/2/issues', + duration_s: 5.3 + }, + { + method: 'POST', + host: 'gitlab.com', + port: 443, + path: '/api/v4/projects', + duration_s: 0.321 + } + ] + end + + before do + stub_const("#{described_class}::MAX_SLOW_REQUESTS", 2) + stub_const("#{described_class}::THRESHOLD_SLOW_REQUEST_S", 0.01) + + subscriber.request(event_1) + subscriber.request(event_2) + subscriber.request(event_3) + end + + it 'returns a payload containing a limited set of slow requests' do + expect(described_class.payload).to eq( + external_http_count: 3, + external_http_duration_s: 5.741, + external_http_slow_requests: slow_requests + ) + expect(described_class.top_slowest_requests).to eq(slow_requests) + + expect(Gitlab::SafeRequestStore[:external_http_slow_requests].length).to eq(3) + end + end end describe '#request' do @@ -153,7 +194,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do expect(Gitlab::SafeRequestStore[:external_http_detail_store][0]).to match a_hash_including( start: be_like_time(Time.current), method: 'POST', code: "200", duration: 0.321, - scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects', + scheme: 'https', host: 'gitlab.com', port: 443, path: '/api/v4/projects', query: 'current=true', exception_object: nil, backtrace: be_a(Array) ) diff --git a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb index b401b7cc996..c2c3bb29b16 100644 --- a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store, feature_category: :pods do +RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store, feature_category: :cell do let(:subscriber) { described_class.new } describe '#caught_up_replica_pick' do diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb index aaa274e252d..83d4d3fb612 100644 --- a/spec/lib/gitlab/middleware/go_spec.rb +++ b/spec/lib/gitlab/middleware/go_spec.rb @@ -138,7 +138,7 @@ RSpec.describe Gitlab::Middleware::Go, feature_category: :source_code_management context 'with a blacklisted ip' do it 'returns forbidden' do - expect(Gitlab::Auth).to receive(:find_for_git_client).and_raise(Gitlab::Auth::IpBlacklisted) + expect(Gitlab::Auth).to receive(:find_for_git_client).and_raise(Gitlab::Auth::IpBlocked) response = go expect(response[0]).to eq(403) diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb index 294a5ee82ed..509a4bb921b 100644 --- a/spec/lib/gitlab/middleware/multipart_spec.rb +++ b/spec/lib/gitlab/middleware/multipart_spec.rb @@ -175,7 +175,7 @@ RSpec.describe Gitlab::Middleware::Multipart do end it 'raises an error' do - expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification raised') + expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification failed') end end @@ -191,7 +191,7 @@ RSpec.describe Gitlab::Middleware::Multipart do end it 'raises an error' do - expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification raised') + expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification failed') end end end diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb index 5555990b113..f7063f2c4f2 100644 --- a/spec/lib/gitlab/octokit/middleware_spec.rb +++ b/spec/lib/gitlab/octokit/middleware_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } - shared_examples 'Public URL' do + shared_examples 'Allowed URL' do it 'does not raise an error' do expect(app).to receive(:call).with(env) @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do end end - shared_examples 'Local URL' do + shared_examples 'Blocked URL' do it 'raises an error' do expect { middleware.call(env) }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError) end @@ -24,7 +24,24 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do context 'when the URL is a public URL' do let(:env) { { url: 'https://public-url.com' } } - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' + + context 'with failed address check' do + before do + stub_env('RSPEC_ALLOW_INVALID_URLS', 'false') + allow(Addrinfo).to receive(:getaddrinfo).and_raise(SocketError) + end + + it_behaves_like 'Blocked URL' + + context 'with disabled dns rebinding check' do + before do + stub_application_setting(dns_rebinding_protection_enabled: false) + end + + it_behaves_like 'Allowed URL' + end + end end context 'when the URL is a localhost address' do @@ -35,7 +52,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) end - it_behaves_like 'Local URL' + it_behaves_like 'Blocked URL' end context 'when localhost requests are allowed' do @@ -43,7 +60,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do stub_application_setting(allow_local_requests_from_web_hooks_and_services: true) end - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' end end @@ -55,7 +72,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) end - it_behaves_like 'Local URL' + it_behaves_like 'Blocked URL' end context 'when local network requests are allowed' do @@ -63,7 +80,7 @@ RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do stub_application_setting(allow_local_requests_from_web_hooks_and_services: true) end - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' end end diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb index baf2546fc5c..e45c29a9dd2 100644 --- a/spec/lib/gitlab/redis/multi_store_spec.rb +++ b/spec/lib/gitlab/redis/multi_store_spec.rb @@ -333,7 +333,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do stub_feature_flags(use_primary_store_as_default_for_test_store: false) end - it 'executes only on secondary redis store', :aggregate_errors do + it 'executes only on secondary redis store', :aggregate_failures do expect(secondary_store).to receive(name).with(*expected_args).and_call_original expect(primary_store).not_to receive(name).with(*expected_args).and_call_original @@ -342,7 +342,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when using primary store as default' do - it 'executes only on primary redis store', :aggregate_errors do + it 'executes only on primary redis store', :aggregate_failures do expect(primary_store).to receive(name).with(*expected_args).and_call_original expect(secondary_store).not_to receive(name).with(*expected_args).and_call_original @@ -437,7 +437,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end RSpec.shared_examples_for 'verify that store contains values' do |store| - it "#{store} redis store contains correct values", :aggregate_errors do + it "#{store} redis store contains correct values", :aggregate_failures do subject redis_store = multi_store.send(store) @@ -530,7 +530,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when executing on primary instance is successful' do - it 'executes on both primary and secondary redis store', :aggregate_errors do + it 'executes on both primary and secondary redis store', :aggregate_failures do expect(primary_store).to receive(name).with(*expected_args).and_call_original expect(secondary_store).to receive(name).with(*expected_args).and_call_original @@ -551,7 +551,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do stub_feature_flags(use_primary_store_as_default_for_test_store: false) end - it 'executes only on secondary redis store', :aggregate_errors do + it 'executes only on secondary redis store', :aggregate_failures do expect(secondary_store).to receive(name).with(*expected_args).and_call_original expect(primary_store).not_to receive(name).with(*expected_args).and_call_original @@ -560,7 +560,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when using primary store as default' do - it 'executes only on primary redis store', :aggregate_errors do + it 'executes only on primary redis store', :aggregate_failures do expect(primary_store).to receive(name).with(*expected_args).and_call_original expect(secondary_store).not_to receive(name).with(*expected_args).and_call_original @@ -575,7 +575,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do allow(Gitlab::ErrorTracking).to receive(:log_exception) end - it 'logs the exception and execute on secondary instance', :aggregate_errors do + it 'logs the exception and execute on secondary instance', :aggregate_failures do expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError), hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name)) expect(secondary_store).to receive(name).with(*expected_args).and_call_original @@ -593,7 +593,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end end - it 'is executed only 1 time on each instance', :aggregate_errors do + it 'is executed only 1 time on each instance', :aggregate_failures do expect(primary_store).to receive(:pipelined).and_call_original expect_next_instance_of(Redis::PipelinedConnection) do |pipeline| expect(pipeline).to receive(name).with(*expected_args).once.and_call_original @@ -645,7 +645,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when executing on primary instance is successful' do - it 'executes on both primary and secondary redis store', :aggregate_errors do + it 'executes on both primary and secondary redis store', :aggregate_failures do expect(primary_store).to receive(name).and_call_original expect(secondary_store).to receive(name).and_call_original @@ -662,7 +662,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do allow(Gitlab::ErrorTracking).to receive(:log_exception) end - it 'logs the exception and execute on secondary instance', :aggregate_errors do + it 'logs the exception and execute on secondary instance', :aggregate_failures do expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError), hash_including(:multi_store_error_message, command_name: name)) expect(secondary_store).to receive(name).and_call_original @@ -760,7 +760,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do stub_feature_flags(use_primary_store_as_default_for_test_store: false) end - it 'executes on secondary store', :aggregate_errors do + it 'executes on secondary store', :aggregate_failures do expect(primary_store).not_to receive(:send).and_call_original expect(secondary_store).to receive(:send).and_call_original @@ -769,7 +769,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when using primary store as default' do - it 'executes on primary store', :aggregate_errors do + it 'executes on primary store', :aggregate_failures do expect(secondary_store).not_to receive(:send).and_call_original expect(primary_store).to receive(:send).and_call_original @@ -930,7 +930,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do subject end - it 'fallback and executes only on the secondary store', :aggregate_errors do + it 'fallback and executes only on the secondary store', :aggregate_failures do expect(primary_store).to receive(:command).and_call_original expect(secondary_store).not_to receive(:command) @@ -955,7 +955,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do - it 'fallback and executes only on the secondary store', :aggregate_errors do + it 'fallback and executes only on the secondary store', :aggregate_failures do expect(primary_store).to receive(:command).and_call_original expect(secondary_store).not_to receive(:command) @@ -968,7 +968,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do stub_feature_flags(use_primary_store_as_default_for_test_store: false) end - it 'fallback and executes only on the secondary store', :aggregate_errors do + it 'fallback and executes only on the secondary store', :aggregate_failures do expect(secondary_store).to receive(:command).and_call_original expect(primary_store).not_to receive(:command) @@ -981,7 +981,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do multi_store.pipelined(&:command) end - it 'is executed only 1 time on each instance', :aggregate_errors do + it 'is executed only 1 time on each instance', :aggregate_failures do expect(primary_store).to receive(:pipelined).once.and_call_original expect(secondary_store).to receive(:pipelined).once.and_call_original diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb index 4d608c07736..0c6a832a730 100644 --- a/spec/lib/gitlab/reference_extractor_spec.rb +++ b/spec/lib/gitlab/reference_extractor_spec.rb @@ -301,7 +301,7 @@ RSpec.describe Gitlab::ReferenceExtractor do describe 'referables prefixes' do def prefixes - described_class::REFERABLES.each_with_object({}) do |referable, result| + described_class.referrables.each_with_object({}) do |referable, result| class_name = referable.to_s.camelize klass = class_name.constantize if Object.const_defined?(class_name) @@ -314,7 +314,7 @@ RSpec.describe Gitlab::ReferenceExtractor do end it 'returns all supported prefixes' do - expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ & [vulnerability: *iteration:)) + expect(prefixes.keys.uniq).to include(*%w(@ # ~ % ! $ & [vulnerability:)) end it 'does not allow one prefix for multiple referables if not allowed specificly' do diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index e51e62d5f0a..5e58282ff92 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -79,10 +79,10 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected - .to eq("cannot start with a non-alphanumeric character except for periods or underscores, " \ - "can contain only alphanumeric characters, forward slashes, periods, and underscores, " \ - "cannot end with a period or forward slash, and has a relative path structure " \ - "with no http protocol chars or leading or trailing forward slashes") + .to eq("must have a relative path structure with no HTTP " \ + "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \ + "end with a special character, and must not contain consecutive special characters." + ) } end @@ -101,13 +101,14 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.not_to match('good_for+you') } it { is_expected.not_to match('source/') } it { is_expected.not_to match('.source/full./path') } + it { is_expected.not_to match('.source/.full/.path') } + it { is_expected.not_to match('_source') } + it { is_expected.not_to match('.source') } it { is_expected.to match('source') } - it { is_expected.to match('.source') } - it { is_expected.to match('_source') } it { is_expected.to match('source/full') } it { is_expected.to match('source/full/path') } - it { is_expected.to match('.source/.full/.path') } + it { is_expected.to match('sou_rce/fu-ll/pa.th') } it { is_expected.to match('domain_namespace') } it { is_expected.to match('gitlab-migration-test') } it { is_expected.to match('1-project-path') } @@ -115,10 +116,22 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.to match('') } # it is possible to pass an empty string for destination_namespace in bulk_import POST request end + describe '.bulk_import_source_full_path_regex_message' do + subject { described_class.bulk_import_source_full_path_regex_message } + + it { + is_expected + .to eq( + "must have a relative path structure with no HTTP " \ + "protocol characters, or leading or trailing forward slashes. Path segments must not start or " \ + "end with a special character, and must not contain consecutive special characters." + ) + } + end + describe '.bulk_import_source_full_path_regex' do subject { described_class.bulk_import_source_full_path_regex } - it { is_expected.not_to match('?gitlab') } it { is_expected.not_to match("Users's something") } it { is_expected.not_to match('/source') } it { is_expected.not_to match('http:') } @@ -126,20 +139,32 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.not_to match('example.com/?stuff=true') } it { is_expected.not_to match('example.com:5000/?stuff=true') } it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') } - it { is_expected.not_to match('_good_for_me!') } - it { is_expected.not_to match('good_for+you') } it { is_expected.not_to match('source/') } - it { is_expected.not_to match('.source/full./path') } it { is_expected.not_to match('') } + it { is_expected.not_to match('.source/full./path') } + it { is_expected.not_to match('?gitlab') } + it { is_expected.not_to match('_good_for_me!') } + it { is_expected.not_to match('group/@*%_my_other-project-----') } + it { is_expected.not_to match('_foog-for-me!') } + it { is_expected.not_to match('.source/full/path.') } + it { is_expected.to match('good_for+you') } it { is_expected.to match('source') } it { is_expected.to match('.source') } it { is_expected.to match('_source') } it { is_expected.to match('source/full') } it { is_expected.to match('source/full/path') } - it { is_expected.to match('.source/.full/.path') } it { is_expected.to match('domain_namespace') } it { is_expected.to match('gitlab-migration-test') } + it { is_expected.to match('source/full/path-') } + it { is_expected.to match('.source/full/path') } + it { is_expected.to match('.source/.full/.path') } + it { is_expected.to match('source/full/.path') } + it { is_expected.to match('source/full/..path') } + it { is_expected.to match('source/full/---1path') } + it { is_expected.to match('source/full/-___path') } + it { is_expected.to match('source/full/path---') } + it { is_expected.to match('group/__my_other-project-----') } end describe '.group_path_regex' do @@ -1164,10 +1189,21 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do MARKDOWN end - it { is_expected.to match(%(
    \nsomething\n
    )) } - it { is_expected.not_to match(%(must start in first column
    \nsomething\n
    )) } - it { is_expected.not_to match(%(
    must be multi-line
    )) } - it { expect(subject.match(markdown)[:html]).to eq expected } + describe 'normal regular expression' do + it { is_expected.to match(%(
    \nsomething\n
    )) } + it { is_expected.not_to match(%(must start in first column
    \nsomething\n
    )) } + it { is_expected.not_to match(%(
    must be multi-line
    )) } + it { expect(subject.match(markdown)[:html]).to eq expected } + end + + describe 'untrusted regular expression' do + subject { Gitlab::UntrustedRegexp.new(described_class::MARKDOWN_HTML_BLOCK_REGEX_UNTRUSTED, multiline: true) } + + it { is_expected.to match(%(
    \nsomething\n
    )) } + it { is_expected.not_to match(%(must start in first column
    \nsomething\n
    )) } + it { is_expected.not_to match(%(
    must be multi-line
    )) } + it { expect(subject.match(markdown)[:html]).to eq expected } + end end context 'HTML comment lines' do diff --git a/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb new file mode 100644 index 00000000000..b15f95dbd9c --- /dev/null +++ b/spec/lib/gitlab/resource_events/assignment_event_recorder_spec.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::ResourceEvents::AssignmentEventRecorder, feature_category: :value_stream_management do + using RSpec::Parameterized::TableSyntax + + let_it_be(:user1) { create(:user) } + let_it_be(:user2) { create(:user) } + let_it_be(:user3) { create(:user) } + + let_it_be_with_refind(:issue_with_two_assignees) { create(:issue, assignees: [user1, user2]) } + let_it_be_with_refind(:mr_with_no_assignees) { create(:merge_request) } + let_it_be_with_refind(:mr_with_one_assignee) { create(:merge_request, assignee: [user3]) } + + let(:parent_records) do + { + issue_with_two_assignees: issue_with_two_assignees, + mr_with_no_assignees: mr_with_no_assignees, + mr_with_one_assignee: mr_with_one_assignee + } + end + + let(:user_records) do + { + user1: user1, + user2: user2, + user3: user3 + } + end + + where(:parent, :new_assignees, :assignee_history) do + :issue_with_two_assignees | [:user1, :user2, :user3] | [[:user3, :add]] + :issue_with_two_assignees | [:user1, :user3] | [[:user2, :remove], [:user3, :add]] + :issue_with_two_assignees | [:user1] | [[:user2, :remove]] + :issue_with_two_assignees | [] | [[:user1, :remove], [:user2, :remove]] + :mr_with_no_assignees | [:user1] | [[:user1, :add]] + :mr_with_no_assignees | [] | [] + :mr_with_one_assignee | [:user3] | [] + :mr_with_one_assignee | [:user1] | [[:user3, :remove], [:user1, :add]] + end + + with_them do + it 'records the assignment history corrently' do + parent_record = parent_records[parent] + old_assignees = parent_record.assignees.to_a + parent_record.assignees = new_assignees.map { |user_variable_name| user_records[user_variable_name] } + + described_class.new(parent: parent_record, old_assignees: old_assignees).record + + expected_records = assignee_history.map do |user_variable_name, action| + have_attributes({ + user_id: user_records[user_variable_name].id, + action: action.to_s + }) + end + + expect(parent_record.assignment_events).to match_array(expected_records) + end + end + + context 'when batching' do + it 'invokes multiple insert queries' do + stub_const('Gitlab::ResourceEvents::AssignmentEventRecorder::BATCH_SIZE', 1) + + expect(ResourceEvents::MergeRequestAssignmentEvent).to receive(:insert_all).twice + + described_class.new(parent: mr_with_one_assignee, old_assignees: [user1]).record # 1 assignment, 1 unassignment + end + end + + context 'when duplicated old assignees were given' do + it 'deduplicates the records' do + expect do + described_class.new(parent: mr_with_one_assignee, old_assignees: [user3, user2, user2]).record + end.to change { ResourceEvents::MergeRequestAssignmentEvent.count }.by(1) + end + end + + context 'when the record_issue_and_mr_assignee_events FF is off' do + before do + stub_feature_flags(record_issue_and_mr_assignee_events: false) + end + + it 'does nothing' do + expect do + described_class.new(parent: mr_with_one_assignee, old_assignees: [user2, user3]).record + end.not_to change { mr_with_one_assignee.assignment_events.count } + end + end +end diff --git a/spec/lib/gitlab/service_desk_email_spec.rb b/spec/lib/gitlab/service_desk_email_spec.rb deleted file mode 100644 index 69569c0f194..00000000000 --- a/spec/lib/gitlab/service_desk_email_spec.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::ServiceDeskEmail do - let(:setting_name) { :service_desk_email } - - it_behaves_like 'common email methods' - - describe '.key_from_address' do - context 'when service desk address is set' do - before do - stub_service_desk_email_setting(address: 'address+%{key}@example.com') - end - - it 'returns key' do - expect(described_class.key_from_address('address+key@example.com')).to eq('key') - end - end - - context 'when service desk address is not set' do - before do - stub_service_desk_email_setting(address: nil) - end - - it 'returns nil' do - expect(described_class.key_from_address('address+key@example.com')).to be_nil - end - end - end - - describe '.address_for_key' do - context 'when service desk address is set' do - before do - stub_service_desk_email_setting(address: 'address+%{key}@example.com') - end - - it 'returns address' do - expect(described_class.address_for_key('foo')).to eq('address+foo@example.com') - end - end - - context 'when service desk address is not set' do - before do - stub_service_desk_email_setting(address: nil) - end - - it 'returns nil' do - expect(described_class.key_from_address('foo')).to be_nil - end - end - end -end diff --git a/spec/lib/gitlab/service_desk_spec.rb b/spec/lib/gitlab/service_desk_spec.rb index f554840ec78..d6725f37d39 100644 --- a/spec/lib/gitlab/service_desk_spec.rb +++ b/spec/lib/gitlab/service_desk_spec.rb @@ -4,8 +4,8 @@ require 'spec_helper' RSpec.describe Gitlab::ServiceDesk do before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?).and_return(true) end describe 'enabled?' do @@ -39,7 +39,7 @@ RSpec.describe Gitlab::ServiceDesk do context 'when incoming emails are disabled' do before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(false) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(false) end it { is_expected.to be_falsy } @@ -47,7 +47,7 @@ RSpec.describe Gitlab::ServiceDesk do context 'when email key is not supported' do before do - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(false) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?).and_return(false) end it { is_expected.to be_falsy } diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb index e3d9549a3c0..4b589dc43af 100644 --- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb @@ -309,7 +309,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do end shared_examples 'performs database queries' do - it 'logs the database time', :aggregate_errors do + it 'logs the database time', :aggregate_failures do expect(logger).to receive(:info).with(expected_start_payload).ordered expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered @@ -318,7 +318,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do end end - it 'prevents database time from leaking to the next job', :aggregate_errors do + it 'prevents database time from leaking to the next job', :aggregate_failures do expect(logger).to receive(:info).with(expected_start_payload).ordered expect(logger).to receive(:info).with(expected_end_payload_with_db).ordered expect(logger).to receive(:info).with(expected_start_payload).ordered diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index 6a515a2b8a5..31258c42b5f 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -79,10 +79,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi context 'with Redis cookies' do def with_redis(&block) - Sidekiq.redis(&block) + Gitlab::Redis::Queues.with(&block) end - let(:cookie_key) { "#{idempotency_key}:cookie:v2" } + let(:cookie_key) { "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:#{idempotency_key}:cookie:v2" } let(:cookie) { get_redis_msgpack(cookie_key) } describe '#check!' do diff --git a/spec/lib/gitlab/slug/environment_spec.rb b/spec/lib/gitlab/slug/environment_spec.rb index e8f0fba27b2..8e23ad118d4 100644 --- a/spec/lib/gitlab/slug/environment_spec.rb +++ b/spec/lib/gitlab/slug/environment_spec.rb @@ -1,38 +1,41 @@ # frozen_string_literal: true require 'fast_spec_helper' +require 'rspec-parameterized' -RSpec.describe Gitlab::Slug::Environment do +RSpec.describe Gitlab::Slug::Environment, feature_category: :environment_management do describe '#generate' do - { - "staging-12345678901234567" => "staging-123456789-q517sa", - "9-staging-123456789012345" => "env-9-staging-123-q517sa", - "staging-1234567890123456" => "staging-1234567890123456", - "staging-1234567890123456-" => "staging-123456789-q517sa", - "production" => "production", - "PRODUCTION" => "production-q517sa", - "review/1-foo" => "review-1-foo-q517sa", - "1-foo" => "env-1-foo-q517sa", - "1/foo" => "env-1-foo-q517sa", - "foo-" => "foo", - "foo--bar" => "foo-bar-q517sa", - "foo**bar" => "foo-bar-q517sa", - "*-foo" => "env-foo-q517sa", - "staging-12345678-" => "staging-12345678", - "staging-12345678-01234567" => "staging-12345678-q517sa", - "" => "env-q517sa", - nil => "env-q517sa" - }.each do |name, matcher| - before do - # ('a' * 64).to_i(16).to_s(36).last(6) gives 'q517sa' - allow(Digest::SHA2).to receive(:hexdigest).with(name).and_return('a' * 64) - end + using RSpec::Parameterized::TableSyntax - it "returns a slug matching #{matcher}, given #{name}" do - slug = described_class.new(name).generate + subject { described_class.new(name).generate } - expect(slug).to match(/\A#{matcher}\z/) - end + before do + # ('a' * 64).to_i(16).to_s(36).last(6) gives 'q517sa' + allow(Digest::SHA2).to receive(:hexdigest).with(name.to_s).and_return('a' * 64) + end + + where(:name, :slug) do + "staging-12345678901234567" | "staging-123456789-q517sa" + "9-staging-123456789012345" | "env-9-staging-123-q517sa" + "staging-1234567890123456" | "staging-1234567890123456" + "staging-1234567890123456-" | "staging-123456789-q517sa" + "production" | "production" + "PRODUCTION" | "production-q517sa" + "review/1-foo" | "review-1-foo-q517sa" + "1-foo" | "env-1-foo-q517sa" + "1/foo" | "env-1-foo-q517sa" + "foo-" | "foo" + "foo--bar" | "foo-bar-q517sa" + "foo**bar" | "foo-bar-q517sa" + "*-foo" | "env-foo-q517sa" + "staging-12345678-" | "staging-12345678" + "staging-12345678-01234567" | "staging-12345678-q517sa" + "" | "env-q517sa" + nil | "env-q517sa" + end + + with_them do + it { is_expected.to eq(slug) } end end end diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb index f93eb6f96cc..52c7a68921b 100644 --- a/spec/lib/gitlab/subscription_portal_spec.rb +++ b/spec/lib/gitlab/subscription_portal_spec.rb @@ -64,6 +64,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do :subscriptions_more_minutes_url | "#{staging_customers_url}/buy_pipeline_minutes" :subscriptions_more_storage_url | "#{staging_customers_url}/buy_storage" :subscriptions_manage_url | "#{staging_customers_url}/subscriptions" + :subscriptions_legacy_sign_in_url | "#{staging_customers_url}/customers/sign_in?legacy=true" :subscriptions_instance_review_url | "#{staging_customers_url}/instance_review" :subscriptions_gitlab_plans_url | "#{staging_customers_url}/gitlab_plans" :edit_account_url | "#{staging_customers_url}/customers/edit" diff --git a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb index 38ec28c2b9a..c1dfee3cccb 100644 --- a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb +++ b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb @@ -16,10 +16,12 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do end subject(:finder) do - described_class.new(base_dir, '', - { 'General' => '', 'Bar' => 'Bar' }, - include_categories_for_file, - excluded_patterns: excluded_patterns) + described_class.new( + base_dir, '', + { 'General' => '', 'Bar' => 'Bar' }, + include_categories_for_file, + excluded_patterns: excluded_patterns + ) end let(:excluded_patterns) { [] } diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb new file mode 100644 index 00000000000..0f2082c1f25 --- /dev/null +++ b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_stub_snowplow_by_default, feature_category: :application_instrumentation do + let(:emitter) { SnowplowTracker::Emitter.new(endpoint: 'localhost', options: { buffer_size: 1 }) } + + let(:tracker) do + SnowplowTracker::Tracker + .new( + emitters: [emitter], + subject: SnowplowTracker::Subject.new, + namespace: 'namespace', + app_id: 'app_id' + ) + end + + before do + stub_application_setting(snowplow_app_id: '_abc123_') + end + + around do |example| + freeze_time { example.run } + end + + context 'when snowplow is enabled' do + before do + allow(SnowplowTracker::AsyncEmitter) + .to receive(:new) + .with(endpoint: 'localhost:9091', + options: + { + protocol: 'https', + on_success: subject.method(:increment_successful_events_emissions), + on_failure: subject.method(:failure_callback) + } + ).and_return(emitter) + + allow(SnowplowTracker::Tracker) + .to receive(:new) + .with( + emitters: [emitter], + subject: an_instance_of(SnowplowTracker::Subject), + namespace: described_class::SNOWPLOW_NAMESPACE, + app_id: '_abc123_' + ).and_return(tracker) + end + + describe '#event' do + it 'sends event to tracker' do + allow(tracker).to receive(:track_struct_event).and_call_original + + subject.event('category', 'action', label: 'label', property: 'property', value: 1.5) + + expect(tracker) + .to have_received(:track_struct_event) + .with(category: 'category', action: 'action', label: 'label', property: 'property', value: 1.5, context: nil, + tstamp: (Time.now.to_f * 1000).to_i) + end + + it 'increase total snowplow events counter' do + counter = double + + expect(counter).to receive(:increment) + expect(Gitlab::Metrics).to receive(:counter) + .with(:gitlab_db_events_snowplow_events_total, 'Number of Snowplow events') + .and_return(counter) + + subject.event('category', 'action', label: 'label', property: 'property', value: 1.5) + end + end + end + + context 'for callbacks' do + describe 'on success' do + it 'increase gitlab_successful_snowplow_events_total counter' do + counter = double + + expect(counter).to receive(:increment).with({}, 2) + expect(Gitlab::Metrics).to receive(:counter) + .with( + :gitlab_db_events_snowplow_successful_events_total, + 'Number of successful Snowplow events emissions').and_return(counter) + + subject.method(:increment_successful_events_emissions).call(2) + end + end + + describe 'on failure' do + it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do + counter = double + error_message = "Issue database_event_update failed to be reported to collector at localhost:9091" + failures = [{ "e" => "se", + "se_ca" => "Issue", + "se_la" => "issues", + "se_ac" => "database_event_update" }] + allow(Gitlab::Metrics).to receive(:counter) + .with( + :gitlab_db_events_snowplow_successful_events_total, + 'Number of successful Snowplow events emissions').and_call_original + + expect(Gitlab::AppLogger).to receive(:error).with(error_message) + expect(counter).to receive(:increment).with({}, 1) + expect(Gitlab::Metrics).to receive(:counter) + .with( + :gitlab_db_events_snowplow_failed_events_total, + 'Number of failed Snowplow events emissions').and_return(counter) + + subject.method(:failure_callback).call(2, failures) + end + end + end +end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index e79bb2ef129..56be80678e9 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::Tracking do +RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation do include StubENV before do @@ -102,12 +102,28 @@ RSpec.describe Gitlab::Tracking do end end - describe '.event' do + context 'event tracking' do let(:namespace) { create(:namespace) } - shared_examples 'delegates to destination' do |klass| + shared_examples 'rescued error raised by destination class' do + it 'rescues error' do + error = StandardError.new("something went wrong") + allow_any_instance_of(destination_class).to receive(:event).and_raise(error) + + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + .with( + error, + snowplow_category: category, + snowplow_action: action + ) + + expect { tracking_method }.not_to raise_error + end + end + + shared_examples 'delegates to destination' do |klass, method| before do - allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event) + allow_any_instance_of(klass).to receive(:event) end it "delegates to #{klass} destination" do @@ -118,8 +134,8 @@ RSpec.describe Gitlab::Tracking do expect(Gitlab::Tracking::StandardContext) .to receive(:new) - .with(project: project, user: user, namespace: namespace, extra_key_1: 'extra value 1', extra_key_2: 'extra value 2') - .and_call_original + .with(project: project, user: user, namespace: namespace, extra_key_1: 'extra value 1', extra_key_2: 'extra value 2') + .and_call_original expect_any_instance_of(klass).to receive(:event) do |_, category, action, args| expect(category).to eq('category') @@ -132,7 +148,7 @@ RSpec.describe Gitlab::Tracking do expect(args[:context].last).to eq(other_context) end - described_class.event('category', 'action', + described_class.method(method).call('category', 'action', label: 'label', property: 'property', value: 1.5, @@ -141,44 +157,95 @@ RSpec.describe Gitlab::Tracking do user: user, namespace: namespace, extra_key_1: 'extra value 1', - extra_key_2: 'extra value 2') + extra_key_2: 'extra value 2' + ) end end - context 'when the action is not passed in as a string' do - it 'allows symbols' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + describe '.database_event' do + context 'when the action is not passed in as a string' do + it 'allows symbols' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) - described_class.event('category', :some_action) - end + described_class.database_event('category', :some_action) + end + + it 'allows nil' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + described_class.database_event('category', nil) + end - it 'allows nil' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + it 'allows integers' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) - described_class.event('category', nil) + described_class.database_event('category', 1) + end end - it 'allows integers' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + it_behaves_like 'rescued error raised by destination class' do + let(:category) { 'Issue' } + let(:action) { 'created' } + let(:destination_class) { Gitlab::Tracking::Destinations::DatabaseEventsSnowplow } - described_class.event('category', 1) + subject(:tracking_method) { described_class.database_event(category, action) } end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :database_event end - context 'when destination is Snowplow' do - before do - allow(Rails.env).to receive(:development?).and_return(true) + describe '.event' do + context 'when the action is not passed in as a string' do + it 'allows symbols' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + described_class.event('category', :some_action) + end + + it 'allows nil' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + described_class.event('category', nil) + end + + it 'allows integers' do + expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + described_class.event('category', 1) + end end - it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow - end + context 'when destination is Snowplow' do + before do + allow(Rails.env).to receive(:development?).and_return(true) + end - context 'when destination is SnowplowMicro' do - before do - allow(Rails.env).to receive(:development?).and_return(true) + it_behaves_like 'rescued error raised by destination class' do + let(:category) { 'category' } + let(:action) { 'action' } + let(:destination_class) { Gitlab::Tracking::Destinations::Snowplow } + + subject(:tracking_method) { described_class.event(category, action) } + end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow, :event end - it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro + context 'when destination is SnowplowMicro' do + before do + allow(Rails.env).to receive(:development?).and_return(true) + end + + it_behaves_like 'rescued error raised by destination class' do + let(:category) { 'category' } + let(:action) { 'action' } + let(:destination_class) { Gitlab::Tracking::Destinations::Snowplow } + + subject(:tracking_method) { described_class.event(category, action) } + end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro, :event + end end end diff --git a/spec/lib/gitlab/untrusted_regexp_spec.rb b/spec/lib/gitlab/untrusted_regexp_spec.rb index 66675b20107..232329a5a1b 100644 --- a/spec/lib/gitlab/untrusted_regexp_spec.rb +++ b/spec/lib/gitlab/untrusted_regexp_spec.rb @@ -3,7 +3,7 @@ require 'fast_spec_helper' require 'support/shared_examples/lib/gitlab/malicious_regexp_shared_examples' -RSpec.describe Gitlab::UntrustedRegexp do +RSpec.describe Gitlab::UntrustedRegexp, feature_category: :shared do describe '#initialize' do subject { described_class.new(pattern) } @@ -22,6 +22,39 @@ RSpec.describe Gitlab::UntrustedRegexp do end end + describe '#replace_gsub' do + let(:regex_str) { '(?P(ftp))' } + let(:regex) { described_class.new(regex_str, multiline: true) } + + def result(regex, text) + regex.replace_gsub(text) do |match| + if match[:scheme] + "http|#{match[:scheme]}|rss" + else + match.to_s + end + end + end + + it 'replaces all instances of the match in a string' do + text = 'Use only https instead of ftp' + + expect(result(regex, text)).to eq('Use only https instead of http|ftp|rss') + end + + it 'replaces nothing when no match' do + text = 'Use only https instead of gopher' + + expect(result(regex, text)).to eq(text) + end + + it 'handles empty text' do + text = '' + + expect(result(regex, text)).to eq('') + end + end + describe '#replace' do it 'replaces the first instance of the match in a string' do result = described_class.new('foo').replace('foo bar foo', 'oof') diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb index 912093be29f..7b6c89b5dd3 100644 --- a/spec/lib/gitlab/url_blocker_spec.rb +++ b/spec/lib/gitlab/url_blocker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do +RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :shared do include StubRequests let(:schemes) { %w[http https] } @@ -21,7 +21,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - shared_context 'instance configured to deny all requests' do + shared_context 'when instance configured to deny all requests' do before do allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(true) stub_application_setting(deny_all_requests_except_allowed: true) @@ -30,7 +30,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do shared_examples 'a URI denied by `deny_all_requests_except_allowed`' do context 'when instance setting is enabled' do - include_context 'instance configured to deny all requests' + include_context 'when instance configured to deny all requests' it 'blocks the request' do expect { subject }.to raise_error(described_class::BlockedUrlError) @@ -81,7 +81,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end shared_examples 'a URI exempt from `deny_all_requests_except_allowed`' do - include_context 'instance configured to deny all requests' + include_context 'when instance configured to deny all requests' it 'does not block the request' do expect { subject }.not_to raise_error @@ -248,15 +248,30 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do context 'when domain cannot be resolved' do let(:import_url) { 'http://foobar.x' } - it 'raises an error' do + before do stub_env('RSPEC_ALLOW_INVALID_URLS', 'false') + end + it 'raises an error' do expect { subject }.to raise_error(described_class::BlockedUrlError) end + + context 'with HTTP_PROXY' do + let(:import_url) { 'http://foobar.x' } + + before do + allow(Gitlab).to receive(:http_proxy_env?).and_return(true) + end + + it_behaves_like 'validates URI and hostname' do + let(:expected_uri) { import_url } + let(:expected_hostname) { nil } + end + end end context 'when domain is too long' do - let(:import_url) { 'https://example' + 'a' * 1024 + '.com' } + let(:import_url) { "https://example#{'a' * 1024}.com" } it 'raises an error' do expect { subject }.to raise_error(described_class::BlockedUrlError) @@ -285,7 +300,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - context 'DNS rebinding protection with IP allowed' do + context 'when DNS rebinding protection with IP allowed' do let(:import_url) { 'http://a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network:9121/scrape?target=unix:///var/opt/gitlab/redis/redis.socket&check-keys=*' } before do @@ -300,9 +315,31 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end it_behaves_like 'a URI exempt from `deny_all_requests_except_allowed`' + + context 'with HTTP_PROXY' do + before do + allow(Gitlab).to receive(:http_proxy_env?).and_return(true) + end + + it_behaves_like 'validates URI and hostname' do + let(:expected_uri) { import_url } + let(:expected_hostname) { nil } + end + + context 'when domain is in no_proxy env' do + before do + stub_env('no_proxy', 'a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network') + end + + it_behaves_like 'validates URI and hostname' do + let(:expected_uri) { 'http://192.168.0.120:9121/scrape?target=unix:///var/opt/gitlab/redis/redis.socket&check-keys=*' } + let(:expected_hostname) { 'a.192.168.0.120.3times.127.0.0.1.1time.repeat.rebind.network' } + end + end + end end - context 'disabled DNS rebinding protection' do + context 'with disabled DNS rebinding protection' do let(:options) { { dns_rebind_protection: false, schemes: schemes } } context 'when URI is internal' do @@ -483,7 +520,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do expect(described_class.blocked_url?('https://gitlab.com/foo/foo.git', schemes: schemes)).to be false end - context 'when allow_local_network is' do + describe 'allow_local_network' do let(:shared_address_space_ips) { ['100.64.0.0', '100.64.127.127', '100.64.255.255'] } let(:local_ips) do @@ -564,11 +601,11 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - context 'true (default)' do + context 'when true (default)' do it_behaves_like 'allows local requests', { allow_localhost: true, allow_local_network: true, schemes: %w[http https] } end - context 'false' do + context 'when false' do it 'blocks urls from private networks' do local_ips.each do |ip| stub_domain_resolv(fake_domain, ip) do @@ -721,14 +758,14 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - context 'when dns_rebinding_setting is' do - context 'enabled' do + describe 'dns_rebinding_setting' do + context 'when enabled' do let(:dns_rebind_value) { true } it_behaves_like 'allowlists the domain' end - context 'disabled' do + context 'when disabled' do let(:dns_rebind_value) { false } it_behaves_like 'allowlists the domain' @@ -768,8 +805,8 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - context 'when enforce_user is' do - context 'false (default)' do + describe 'enforce_user' do + context 'when false (default)' do it 'does not block urls with a non-alphanumeric username' do expect(described_class).not_to be_blocked_url('ssh://-oProxyCommand=whoami@example.com/a', schemes: ['ssh']) @@ -781,7 +818,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - context 'true' do + context 'when true' do it 'blocks urls with a non-alphanumeric username' do aggregate_failures do expect(described_class).to be_blocked_url('ssh://-oProxyCommand=whoami@example.com/a', enforce_user: true, schemes: ['ssh']) @@ -849,7 +886,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do end end - def stub_domain_resolv(domain, ip, port = 80, &block) + def stub_domain_resolv(domain, ip, port = 80) address = instance_double(Addrinfo, ip_address: ip, ipv4_private?: true, diff --git a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb index 8dcb402dfb2..c56e5ce4e7a 100644 --- a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb +++ b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry do +RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry, feature_category: :integrations do let(:ipv4) { IPAddr.new('192.168.1.1') } describe '#initialize' do @@ -65,11 +65,31 @@ RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry do end it 'matches IPv6 within IPv6 range' do - ipv6_range = IPAddr.new('fd84:6d02:f6d8:c89e::/124') + ipv6_range = IPAddr.new('::ffff:192.168.1.0/8') ip_allowlist_entry = described_class.new(ipv6_range) expect(ip_allowlist_entry).to be_match(ipv6_range.to_range.last.to_s, 8080) expect(ip_allowlist_entry).not_to be_match('fd84:6d02:f6d8:f::f', 8080) end + + it 'matches IPv4 to IPv6 mapped addresses in allow list' do + ipv6_range = IPAddr.new('::ffff:192.168.1.1') + ip_allowlist_entry = described_class.new(ipv6_range) + + expect(ip_allowlist_entry).to be_match(ipv4, 8080) + expect(ip_allowlist_entry).to be_match(ipv6_range.to_range.last.to_s, 8080) + expect(ip_allowlist_entry).not_to be_match('::ffff:192.168.1.0', 8080) + expect(ip_allowlist_entry).not_to be_match('::ffff:169.254.168.101', 8080) + end + + it 'matches IPv4 to IPv6 mapped addresses in requested IP' do + ipv4_range = IPAddr.new('192.168.1.1/24') + ip_allowlist_entry = described_class.new(ipv4_range) + + expect(ip_allowlist_entry).to be_match(ipv4, 8080) + expect(ip_allowlist_entry).to be_match('::ffff:192.168.1.0', 8080) + expect(ip_allowlist_entry).to be_match('::ffff:192.168.1.1', 8080) + expect(ip_allowlist_entry).not_to be_match('::ffff:169.254.170.100/8', 8080) + end end end diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb index 4b835d11975..c336a4850d2 100644 --- a/spec/lib/gitlab/usage/metric_definition_spec.rb +++ b/spec/lib/gitlab/usage/metric_definition_spec.rb @@ -7,7 +7,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do { description: 'GitLab instance unique identifier', value_type: 'string', - product_category: 'collection', product_stage: 'growth', product_section: 'devops', status: 'active', @@ -263,7 +262,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do { description: 'Test metric definition', value_type: 'string', - product_category: 'collection', product_stage: 'growth', product_section: 'devops', status: 'active', diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb index 8e0fce37e46..d0ea4e7aa16 100644 --- a/spec/lib/gitlab/usage/metric_spec.rb +++ b/spec/lib/gitlab/usage/metric_spec.rb @@ -13,7 +13,6 @@ RSpec.describe Gitlab::Usage::Metric do product_section: "dev", product_stage: "plan", product_group: "plan", - product_category: "issue_tracking", value_type: "number", status: "active", time_frame: "all", diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_mode_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_mode_spec.rb new file mode 100644 index 00000000000..a6128b4df1f --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_mode_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMode, feature_category: :cell do + let(:expected_value) { Gitlab::Database.database_mode } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb index ed35b2c8cde..b1b193c8d04 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb @@ -5,6 +5,6 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::IncomingEmailEncryptedSecretsEnabledMetric, feature_category: :service_ping do it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do - let(:expected_value) { ::Gitlab::IncomingEmail.encrypted_secrets.active? } + let(:expected_value) { ::Gitlab::Email::IncomingEmail.encrypted_secrets.active? } end end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/index_inconsistencies_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/index_inconsistencies_metric_spec.rb index afc9d610207..92a576d1a9f 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/index_inconsistencies_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/index_inconsistencies_metric_spec.rb @@ -13,7 +13,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::IndexInconsistenciesMet end let(:runner) { instance_double(Gitlab::Database::SchemaValidation::Runner, execute: inconsistencies) } - let(:inconsistency_class) { Gitlab::Database::SchemaValidation::Validators::BaseValidator::Inconsistency } + let(:inconsistency_class) { Gitlab::Database::SchemaValidation::Inconsistency } let(:inconsistencies) do [ diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb index d602eae3159..ea239e53d01 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb @@ -5,6 +5,6 @@ require 'spec_helper' RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ServiceDeskEmailEncryptedSecretsEnabledMetric, feature_category: :service_ping do it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do - let(:expected_value) { ::Gitlab::ServiceDeskEmail.encrypted_secrets.active? } + let(:expected_value) { ::Gitlab::Email::ServiceDeskEmail.encrypted_secrets.active? } end end diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb index 4f647c2700a..271e9595703 100644 --- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb +++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb @@ -75,7 +75,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator, feature_cate end end - context 'for redis metrics' do + context 'for redis metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/399421' do it_behaves_like 'name suggestion' do let(:key_path) { 'usage_activity_by_stage_monthly.create.merge_requests_users' } let(:name_suggestion) { // } diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb index 730c05b7dcb..f1ce48468fe 100644 --- a/spec/lib/gitlab/usage/service_ping_report_spec.rb +++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb @@ -72,25 +72,34 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c context 'when using cached' do let(:new_usage_data) { { 'uuid' => '1112' } } + let(:instrumented_payload) { { 'instrumented' => { 'metric' => 1 } } } + let(:full_payload) { usage_data.merge(instrumented_payload) } + let(:new_full_payload) { new_usage_data.merge(instrumented_payload) } + + before do + allow_next_instance_of(Gitlab::Usage::ServicePing::InstrumentedPayload) do |instance| + allow(instance).to receive(:build).and_return(instrumented_payload) + end + end context 'for cached: true' do it 'caches the values' do allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(full_payload) + expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(full_payload) - expect(Rails.cache.fetch('usage_data')).to eq(usage_data) + expect(Rails.cache.fetch('usage_data')).to eq(full_payload) end it 'writes to cache and returns fresh data' do allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) - expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(full_payload) + expect(described_class.for(output: :all_metrics_values)).to eq(new_full_payload) + expect(described_class.for(output: :all_metrics_values, cached: true)).to eq(new_full_payload) - expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) + expect(Rails.cache.fetch('usage_data')).to eq(new_full_payload) end end @@ -98,10 +107,10 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c it 'returns fresh data' do allow(Gitlab::UsageData).to receive(:data).and_return(usage_data, new_usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(usage_data) - expect(described_class.for(output: :all_metrics_values)).to eq(new_usage_data) + expect(described_class.for(output: :all_metrics_values)).to eq(full_payload) + expect(described_class.for(output: :all_metrics_values)).to eq(new_full_payload) - expect(Rails.cache.fetch('usage_data')).to eq(new_usage_data) + expect(Rails.cache.fetch('usage_data')).to eq(new_full_payload) end end end diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb index 8c497970555..aadd398e5fd 100644 --- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb @@ -428,7 +428,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s described_class.track_event('event4', values: entity2, time: 2.days.ago) end - it 'calculates union of given events', :aggregate_failure do + it 'calculates union of given events', :aggregate_failures do expect(described_class.calculate_events_union(**time_range.merge(event_names: %w[event4]))).to eq 2 expect(described_class.calculate_events_union(**time_range.merge(event_names: %w[event1_slot event2_slot event3_slot]))).to eq 3 end diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb index 383938b0324..d6a99b5ea8b 100644 --- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb @@ -6,11 +6,12 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git let_it_be(:user1) { build(:user, id: 1) } let_it_be(:user2) { build(:user, id: 2) } let_it_be(:user3) { build(:user, id: 3) } - let_it_be(:project) { build(:project) } + let_it_be(:project) { create(:project) } let_it_be(:category) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CATEGORY } let_it_be(:event_action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_ACTION } let_it_be(:event_label) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL } + let(:original_params) { nil } let(:event_property) { action } let(:time) { Time.zone.now } @@ -67,6 +68,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git context 'for Issue created actions' do it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do let(:action) { described_class::ISSUE_CREATED } + let(:original_params) { { namespace: project.project_namespace.reload } } def track_action(params) described_class.track_issue_created_action(**params) diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index d529319e6e9..f2b332501be 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -29,10 +29,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify) expect(subject[:usage_activity_by_stage_monthly]) .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify) - expect(subject[:usage_activity_by_stage][:create]) - .not_to include(:merge_requests_users) expect(subject[:usage_activity_by_stage_monthly][:create]) - .to include(:merge_requests_users) + .to include(:snippets) end it 'clears memoized values' do @@ -715,7 +713,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled) expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?) expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?) - expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?) + expect(subject[:reply_by_email_enabled]).to eq(Gitlab::Email::IncomingEmail.enabled?) expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled) expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled) expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled) @@ -1021,24 +1019,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic end end - describe '.merge_requests_users', :clean_gitlab_redis_shared_state do - let(:time_period) { { created_at: 2.days.ago..time } } - let(:time) { Time.current } - - before do - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: 1, time: time) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: 1, time: time) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: 2, time: time) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: 3, time: time) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:merge_request_action, values: 4, time: time - 3.days) - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:design_action, values: 5, time: time) - end - - it 'returns the distinct count of users using merge requests (via events table) within the specified time period' do - expect(described_class.merge_requests_users(time_period)).to eq(3) - end - end - def for_defined_days_back(days: [31, 3]) days.each do |n| travel_to(n.days.ago) do @@ -1067,7 +1047,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(result.duration).to be_an(Float) end - it 'records error and returns nil', :aggregated_errors do + it 'records error and returns nil', :aggregate_failures do allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) result = described_class.with_metadata { raise } diff --git a/spec/lib/gitlab/utils/error_message_spec.rb b/spec/lib/gitlab/utils/error_message_spec.rb index 2c2d16656e8..17786f2c8ef 100644 --- a/spec/lib/gitlab/utils/error_message_spec.rb +++ b/spec/lib/gitlab/utils/error_message_spec.rb @@ -9,15 +9,20 @@ RSpec.describe Gitlab::Utils::ErrorMessage, feature_category: :error_tracking do end end - subject(:object) { klass.new } + let(:message) { 'Something went wrong' } - describe 'error message' do - subject { object.to_user_facing(string) } + subject(:object) { klass.new } - let(:string) { 'Error Message' } + describe '#to_user_facing' do + it 'returns a user-facing error message with the UF prefix' do + expect(described_class.to_user_facing(message)).to eq("UF: #{message}") + end + end - it "returns input prefixed with UF:" do - is_expected.to eq 'UF: Error Message' + describe '#prefixed_error_message' do + it 'returns a message with the given prefix' do + prefix = 'ERROR' + expect(described_class.prefixed_error_message(message, prefix)).to eq("#{prefix}: #{message}") end end end diff --git a/spec/lib/gitlab/utils/measuring_spec.rb b/spec/lib/gitlab/utils/measuring_spec.rb index 5dad79b1c5f..4d2791f771f 100644 --- a/spec/lib/gitlab/utils/measuring_spec.rb +++ b/spec/lib/gitlab/utils/measuring_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Utils::Measuring do measurement.with_measuring { result } end - it 'measures and logs data', :aggregate_failure do + it 'measures and logs data', :aggregate_failures do expect(measurement).to receive(:with_measure_time).and_call_original expect(measurement).to receive(:with_count_queries).and_call_original expect(measurement).to receive(:with_gc_stats).and_call_original diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb index 27bfe181ef6..ea8083e7d7f 100644 --- a/spec/lib/gitlab/utils/strong_memoize_spec.rb +++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb @@ -3,12 +3,15 @@ require 'fast_spec_helper' require 'rspec-benchmark' require 'rspec-parameterized' +require 'active_support/testing/time_helpers' RSpec.configure do |config| config.include RSpec::Benchmark::Matchers end RSpec.describe Gitlab::Utils::StrongMemoize, feature_category: :shared do + include ActiveSupport::Testing::TimeHelpers + let(:klass) do strong_memoize_class = described_class @@ -30,6 +33,13 @@ RSpec.describe Gitlab::Utils::StrongMemoize, feature_category: :shared do end end + def method_name_with_expiration + strong_memoize_with_expiration(:method_name_with_expiration, 1) do + trace << value + value + end + end + def method_name_attr trace << value value @@ -142,6 +152,43 @@ RSpec.describe Gitlab::Utils::StrongMemoize, feature_category: :shared do end end + describe '#strong_memoize_with_expiration' do + [nil, false, true, 'value', 0, [0]].each do |value| + context "with value #{value}" do + let(:value) { value } + let(:method_name) { :method_name_with_expiration } + + it_behaves_like 'caching the value' + + it 'raises exception for invalid type as key' do + expect { object.strong_memoize_with_expiration(10, 1) { 20 } }.to raise_error /Invalid type of '10'/ + end + + it 'raises exception for invalid characters in key' do + expect { object.strong_memoize_with_expiration(:enabled?, 1) { 20 } } + .to raise_error /is not allowed as an instance variable name/ + end + end + end + + context 'value memoization test' do + let(:value) { 'value' } + + it 'caches the value for specified number of seconds' do + object.method_name_with_expiration + object.method_name_with_expiration + + expect(object.trace.count).to eq(1) + + travel_to(Time.current + 2.seconds) do + object.method_name_with_expiration + + expect(object.trace.count).to eq(2) + end + end + end + end + describe '#strong_memoize_with' do [nil, false, true, 'value', 0, [0]].each do |value| context "with value #{value}" do @@ -215,19 +262,21 @@ RSpec.describe Gitlab::Utils::StrongMemoize, feature_category: :shared do describe '.strong_memoize_attr' do [nil, false, true, 'value', 0, [0]].each do |value| - let(:value) { value } + context "with value '#{value}'" do + let(:value) { value } - context "memoized after method definition with value #{value}" do - let(:method_name) { :method_name_attr } + context 'memoized after method definition' do + let(:method_name) { :method_name_attr } - it_behaves_like 'caching the value' + it_behaves_like 'caching the value' - it 'calls the existing .method_added' do - expect(klass.method_added_list).to include(:method_name_attr) - end + it 'calls the existing .method_added' do + expect(klass.method_added_list).to include(:method_name_attr) + end - it 'retains method arity' do - expect(klass.instance_method(method_name).arity).to eq(0) + it 'retains method arity' do + expect(klass.instance_method(method_name).arity).to eq(0) + end end end end diff --git a/spec/lib/json_web_token/hmac_token_spec.rb b/spec/lib/json_web_token/hmac_token_spec.rb index 016084eaf69..7c486b2fe1b 100644 --- a/spec/lib/json_web_token/hmac_token_spec.rb +++ b/spec/lib/json_web_token/hmac_token_spec.rb @@ -50,8 +50,8 @@ RSpec.describe JSONWebToken::HMACToken do context 'that was generated using a different secret' do let(:encoded_token) { described_class.new('some other secret').encoded } - it "raises exception saying 'Signature verification raised" do - expect { decoded_token }.to raise_error(JWT::VerificationError, 'Signature verification raised') + it "raises exception saying 'Signature verification failed" do + expect { decoded_token }.to raise_error(JWT::VerificationError, 'Signature verification failed') end end diff --git a/spec/lib/product_analytics/settings_spec.rb b/spec/lib/product_analytics/settings_spec.rb new file mode 100644 index 00000000000..2cacd55b871 --- /dev/null +++ b/spec/lib/product_analytics/settings_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ProductAnalytics::Settings, feature_category: :product_analytics do + describe 'config settings' do + context 'when configured' do + before do + mock_settings('test') + end + + it 'will be configured' do + expect(described_class.configured?).to be_truthy + end + end + + context 'when not configured' do + before do + mock_settings('') + end + + it 'will not be configured' do + expect(described_class.configured?).to be_falsey + end + end + + context 'when one configuration setting is missing' do + before do + missing_key = ProductAnalytics::Settings::CONFIG_KEYS.last + mock_settings('test', ProductAnalytics::Settings::CONFIG_KEYS - [missing_key]) + allow(::Gitlab::CurrentSettings).to receive(missing_key).and_return('') + end + + it 'will not be configured' do + expect(described_class.configured?).to be_falsey + end + end + + ProductAnalytics::Settings::CONFIG_KEYS.each do |key| + it "can read #{key}" do + expect(::Gitlab::CurrentSettings).to receive(key).and_return('test') + + expect(described_class.send(key)).to eq('test') + end + end + end + + describe '.enabled?' do + before do + allow(described_class).to receive(:configured?).and_return(true) + end + + context 'when enabled' do + before do + allow(::Gitlab::CurrentSettings).to receive(:product_analytics_enabled?).and_return(true) + end + + it 'will be enabled' do + expect(described_class.enabled?).to be_truthy + end + end + + context 'when disabled' do + before do + allow(::Gitlab::CurrentSettings).to receive(:product_analytics_enabled?).and_return(false) + end + + it 'will be enabled' do + expect(described_class.enabled?).to be_falsey + end + end + end + + private + + def mock_settings(setting, keys = ProductAnalytics::Settings::CONFIG_KEYS) + keys.each do |key| + allow(::Gitlab::CurrentSettings).to receive(key).and_return(setting) + end + end +end diff --git a/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb b/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb new file mode 100644 index 00000000000..5926852ff57 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/abuse_reports_menu_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::AbuseReportsMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/abuse_reports', + title: _('Abuse Reports'), + icon: 'slight-frown' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :abuse_reports } + + describe '#pill_count' do + let_it_be(:user) { create(:user, :admin) } + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it 'returns zero when there are no abuse reports' do + expect(subject.pill_count).to eq 0 + end + + it 'memoizes the query' do + subject.pill_count + + control = ActiveRecord::QueryRecorder.new do + subject.pill_count + end + + expect(control.count).to eq 0 + end + + context 'when there are abuse reports' do + it 'returns the number of abuse reports' do + create_list(:abuse_report, 2) + + expect(subject.pill_count).to eq 2 + end + end + end +end diff --git a/spec/lib/sidebars/admin/menus/admin_overview_menu_spec.rb b/spec/lib/sidebars/admin/menus/admin_overview_menu_spec.rb new file mode 100644 index 00000000000..d076e73fdd1 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/admin_overview_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::AdminOverviewMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin', + title: s_('Admin|Overview'), + icon: 'overview' + + it_behaves_like 'Admin menu with sub menus' +end diff --git a/spec/lib/sidebars/admin/menus/admin_settings_menu_spec.rb b/spec/lib/sidebars/admin/menus/admin_settings_menu_spec.rb new file mode 100644 index 00000000000..be23dd4d25b --- /dev/null +++ b/spec/lib/sidebars/admin/menus/admin_settings_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::AdminSettingsMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/application_settings/general', + title: s_('Admin|Settings'), + icon: 'settings' + + it_behaves_like 'Admin menu with sub menus' +end diff --git a/spec/lib/sidebars/admin/menus/analytics_menu_spec.rb b/spec/lib/sidebars/admin/menus/analytics_menu_spec.rb new file mode 100644 index 00000000000..b4aa6e9aeb6 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/analytics_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::AnalyticsMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/dev_ops_reports', + title: s_('Admin|Analytics'), + icon: 'chart' + + it_behaves_like 'Admin menu with sub menus' +end diff --git a/spec/lib/sidebars/admin/menus/applications_menu_spec.rb b/spec/lib/sidebars/admin/menus/applications_menu_spec.rb new file mode 100644 index 00000000000..0346fa4adfa --- /dev/null +++ b/spec/lib/sidebars/admin/menus/applications_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::ApplicationsMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/applications', + title: s_('Admin|Applications'), + icon: 'applications' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :applications } +end diff --git a/spec/lib/sidebars/admin/menus/ci_cd_menu_spec.rb b/spec/lib/sidebars/admin/menus/ci_cd_menu_spec.rb new file mode 100644 index 00000000000..b0d46abbee2 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/ci_cd_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::CiCdMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/runners', + title: s_('Admin|CI/CD'), + icon: 'rocket' + + it_behaves_like 'Admin menu with sub menus' +end diff --git a/spec/lib/sidebars/admin/menus/deploy_keys_menu_spec.rb b/spec/lib/sidebars/admin/menus/deploy_keys_menu_spec.rb new file mode 100644 index 00000000000..f0ee846fb42 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/deploy_keys_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::DeployKeysMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/deploy_keys', + title: s_('Admin|Deploy Keys'), + icon: 'key' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :deploy_keys } +end diff --git a/spec/lib/sidebars/admin/menus/labels_menu_spec.rb b/spec/lib/sidebars/admin/menus/labels_menu_spec.rb new file mode 100644 index 00000000000..63e4927ab0d --- /dev/null +++ b/spec/lib/sidebars/admin/menus/labels_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::LabelsMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/labels', + title: s_('Admin|Labels'), + icon: 'labels' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :labels } +end diff --git a/spec/lib/sidebars/admin/menus/messages_menu_spec.rb b/spec/lib/sidebars/admin/menus/messages_menu_spec.rb new file mode 100644 index 00000000000..14979b7e47a --- /dev/null +++ b/spec/lib/sidebars/admin/menus/messages_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::MessagesMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/broadcast_messages', + title: s_('Admin|Messages'), + icon: 'messages' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :broadcast_messages } +end diff --git a/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb new file mode 100644 index 00000000000..0483159da7a --- /dev/null +++ b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::MonitoringMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/system_info', + title: s_('Admin|Monitoring'), + icon: 'monitor' + + it_behaves_like 'Admin menu with sub menus' +end diff --git a/spec/lib/sidebars/admin/menus/system_hooks_menu_spec.rb b/spec/lib/sidebars/admin/menus/system_hooks_menu_spec.rb new file mode 100644 index 00000000000..a2d0b851091 --- /dev/null +++ b/spec/lib/sidebars/admin/menus/system_hooks_menu_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Menus::SystemHooksMenu, feature_category: :navigation do + it_behaves_like 'Admin menu', + link: '/admin/hooks', + title: s_('Admin|System Hooks'), + icon: 'hook' + + it_behaves_like 'Admin menu without sub menus', active_routes: { controller: :hooks } +end diff --git a/spec/lib/sidebars/admin/panel_spec.rb b/spec/lib/sidebars/admin/panel_spec.rb new file mode 100644 index 00000000000..a12fc8f8d2a --- /dev/null +++ b/spec/lib/sidebars/admin/panel_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Admin::Panel, feature_category: :navigation do + let_it_be(:user) { build(:admin) } + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it 'implements #super_sidebar_context_header' do + expect(subject.super_sidebar_context_header).to eq({ title: 'Admin Area' }) + end +end diff --git a/spec/lib/sidebars/concerns/super_sidebar_panel_spec.rb b/spec/lib/sidebars/concerns/super_sidebar_panel_spec.rb index f33cb4ab7f6..e0c05379a9e 100644 --- a/spec/lib/sidebars/concerns/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/concerns/super_sidebar_panel_spec.rb @@ -83,8 +83,10 @@ RSpec.describe Sidebars::Concerns::SuperSidebarPanel, feature_category: :navigat expect(uncategorized_menu.renderable_items).to eq([]) end - it 'adds Menu Items to defined super_sidebar_parent, before super_sidebar_before' do - allow(menu_item).to receive(:super_sidebar_before).and_return(:exists) + it 'replaces placeholder Menu Items in the defined super_sidebar_parent' do + menu_foo.insert_item_before(:exists, nil_menu_item) + allow(menu_item).to receive(:item_id).and_return(:nil_item) + subject.transform_old_menus(current_menus, menu_bar) expect(menu_foo.renderable_items).to eq([menu_item, existing_item]) diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb new file mode 100644 index 00000000000..3d3d304a5a0 --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/analyze_menu_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Analyze")) + expect(subject.sprite_icon).to eq("chart") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :cycle_analytics, + :ci_cd_analytics, + :contribution_analytics, + :devops_adoption, + :insights, + :issues_analytics, + :productivity_analytics, + :repository_analytics + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/build_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/build_menu_spec.rb new file mode 100644 index 00000000000..9437e11c1b6 --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/build_menu_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::BuildMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Build")) + expect(subject.sprite_icon).to eq("rocket") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :runners + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/manage_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/manage_menu_spec.rb new file mode 100644 index 00000000000..cde9ab0d6fe --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/manage_menu_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::ManageMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Manage")) + expect(subject.sprite_icon).to eq("users") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :activity, + :members, + :labels, + :milestones, + :iterations + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb new file mode 100644 index 00000000000..759975856b8 --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/monitor_menu_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::MonitorMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Monitor")) + expect(subject.sprite_icon).to eq("monitor") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :explore, + :datasources + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb new file mode 100644 index 00000000000..e9c2701021c --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/operations_menu_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::OperationsMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Operate")) + expect(subject.sprite_icon).to eq("deployments") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :dependency_proxy, + :packages_registry, + :container_registry, + :group_kubernetes_clusters + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/plan_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/plan_menu_spec.rb new file mode 100644 index 00000000000..d289295d119 --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/plan_menu_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::PlanMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Plan")) + expect(subject.sprite_icon).to eq("planning") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :issue_boards, + :epic_boards, + :roadmap, + :group_wiki, + :crm_contacts, + :crm_organizations + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb b/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb new file mode 100644 index 00000000000..9eb81dda462 --- /dev/null +++ b/spec/lib/sidebars/groups/super_sidebar_menus/secure_menu_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::SuperSidebarMenus::SecureMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Secure")) + expect(subject.sprite_icon).to eq("shield") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :security_dashboard, + :vulnerability_report, + :audit_events, + :compliance, + :scan_policies + ]) + end +end diff --git a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb index beaf3875f1c..e9e9b87b588 100644 --- a/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/groups/super_sidebar_panel_spec.rb @@ -26,15 +26,16 @@ RSpec.describe Sidebars::Groups::SuperSidebarPanel, feature_category: :navigatio let(:category_menu) do [ Sidebars::StaticMenu, + Sidebars::Groups::SuperSidebarMenus::ManageMenu, Sidebars::Groups::SuperSidebarMenus::PlanMenu, - Sidebars::Groups::Menus::CiCdMenu, - (Sidebars::Groups::Menus::SecurityComplianceMenu if Gitlab.ee?), + Sidebars::Groups::SuperSidebarMenus::BuildMenu, + Sidebars::Groups::SuperSidebarMenus::SecureMenu, Sidebars::Groups::SuperSidebarMenus::OperationsMenu, - Sidebars::Groups::Menus::ObservabilityMenu, - (Sidebars::Groups::Menus::AnalyticsMenu if Gitlab.ee?), + Sidebars::Groups::SuperSidebarMenus::MonitorMenu, + Sidebars::Groups::SuperSidebarMenus::AnalyzeMenu, Sidebars::UncategorizedMenu, Sidebars::Groups::Menus::SettingsMenu - ].compact + ] end it "is exposed as a renderable menu" do diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb index 641f1c6e7e6..74ed344dd24 100644 --- a/spec/lib/sidebars/menu_spec.rb +++ b/spec/lib/sidebars/menu_spec.rb @@ -29,8 +29,14 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do end it 'returns a tree-like structure of itself and all menu items' do - menu.add_item(Sidebars::MenuItem.new(title: 'Is active', link: 'foo2', active_routes: { controller: 'fooc' })) menu.add_item(Sidebars::MenuItem.new( + item_id: 'id1', + title: 'Is active', + link: 'foo2', + active_routes: { controller: 'fooc' } + )) + menu.add_item(Sidebars::MenuItem.new( + item_id: 'id2', title: 'Not active', link: 'foo3', active_routes: { controller: 'barc' }, @@ -50,6 +56,7 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do pill_count: nil, items: [ { + id: 'id1', title: "Is active", icon: nil, link: "foo2", @@ -57,6 +64,7 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do pill_count: nil }, { + id: 'id2', title: "Not active", icon: nil, link: "foo3", @@ -230,6 +238,47 @@ RSpec.describe Sidebars::Menu, feature_category: :navigation do end end + describe '#replace_placeholder' do + let(:item1) { Sidebars::NilMenuItem.new(item_id: :foo1) } + let(:item2) { Sidebars::MenuItem.new(item_id: :foo2, title: 'foo2', link: 'foo2', active_routes: {}) } + let(:item3) { Sidebars::NilMenuItem.new(item_id: :foo3) } + + subject { menu.instance_variable_get(:@items) } + + before do + menu.add_item(item1) + menu.add_item(item2) + menu.add_item(item3) + end + + context 'when a NilMenuItem reference element exists' do + it 'replaces the reference element with the provided item' do + item = Sidebars::MenuItem.new(item_id: :foo1, title: 'target', active_routes: {}, link: 'target') + menu.replace_placeholder(item) + + expect(subject).to eq [item, item2, item3] + end + end + + context 'when a MenuItem reference element exists' do + it 'does not replace the reference element and adds to the end of the list' do + item = Sidebars::MenuItem.new(item_id: :foo2, title: 'target', active_routes: {}, link: 'target') + menu.replace_placeholder(item) + + expect(subject).to eq [item1, item2, item3, item] + end + end + + context 'when reference element does not exist' do + it 'adds the element to the end of the list' do + item = Sidebars::MenuItem.new(item_id: :new_element, title: 'target', active_routes: {}, link: 'target') + menu.replace_placeholder(item) + + expect(subject).to eq [item1, item2, item3, item] + end + end + end + describe '#remove_element' do let(:item1) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) } let(:item2) { Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: {}, item_id: :foo2) } diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb index 5065c261cf8..a63acdb5dc2 100644 --- a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb @@ -51,7 +51,7 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu, feature_category: :na end end - describe 'Feature Flags' do + describe 'Feature flags' do let(:item_id) { :feature_flags } it_behaves_like 'access rights checks' diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb index c7ff846bc95..f783e7fcff2 100644 --- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb @@ -56,7 +56,7 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat describe '#pill_count' do it 'returns zero when there are no open issues' do - expect(subject.pill_count).to eq 0 + expect(subject.pill_count).to eq '0' end it 'memoizes the query' do @@ -74,7 +74,14 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat create_list(:issue, 2, :opened, project: project) create(:issue, :closed, project: project) - expect(subject.pill_count).to eq 2 + expect(subject.pill_count).to eq '2' + end + end + + describe 'formatting' do + it 'returns truncated digits for count value over 1000' do + allow(project).to receive(:open_issues_count).and_return 1001 + expect(subject.pill_count).to eq('1k') end end end diff --git a/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb index a19df559b58..697f8a6d7c7 100644 --- a/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb @@ -51,7 +51,7 @@ RSpec.describe Sidebars::Projects::Menus::MergeRequestsMenu, feature_category: : describe '#pill_count' do it 'returns zero when there are no open merge requests' do - expect(subject.pill_count).to eq 0 + expect(subject.pill_count).to eq '0' end it 'memoizes the query' do @@ -69,7 +69,16 @@ RSpec.describe Sidebars::Projects::Menus::MergeRequestsMenu, feature_category: : create_list(:merge_request, 2, :unique_branches, source_project: project, author: user, state: :opened) create(:merge_request, source_project: project, state: :merged) - expect(subject.pill_count).to eq 2 + expect(subject.pill_count).to eq '2' + end + end + + describe 'formatting' do + it 'returns truncated digits for count value over 1000' do + create_list(:merge_request, 1001, :unique_branches, source_project: project, author: user, state: :opened) + create(:merge_request, source_project: project, state: :merged) + + expect(subject.pill_count).to eq('1k') end end end diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb index 554bc763345..860206dc6af 100644 --- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb @@ -39,7 +39,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego before do stub_container_registry_config(enabled: registry_enabled) stub_config(packages: { enabled: packages_enabled }) - stub_feature_flags(harbor_registry_integration: false) + stub_feature_flags(harbor_registry_integration: false, ml_experiment_tracking: false) end context 'when Packages Registry is visible' do @@ -168,6 +168,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego stub_feature_flags(harbor_registry_integration: true) is_expected.not_to be_nil + expect(subject.active_routes[:controller]).to eq('projects/harbor/repositories') end end @@ -180,5 +181,25 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego end end end + + describe 'Model experiments' do + let(:item_id) { :model_experiments } + + context 'when :ml_experiment_tracking is enabled' do + it 'shows the menu item' do + stub_feature_flags(ml_experiment_tracking: true) + + is_expected.not_to be_nil + end + end + + context 'when :ml_experiment_tracking is disabled' do + it 'does not show the menu item' do + stub_feature_flags(ml_experiment_tracking: false) + + is_expected.to be_nil + end + end + end end end diff --git a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb index b0631aacdb9..1aa0ea30d0a 100644 --- a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb @@ -6,7 +6,11 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou let_it_be(:project) { create(:project, :repository) } let(:user) { project.first_owner } - let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: 'master') } + let(:is_super_sidebar) { false } + let(:context) do + Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: 'master', + is_super_sidebar: is_super_sidebar) + end subject { described_class.new(context) } @@ -36,9 +40,8 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou end context 'for menu items' do - shared_examples_for 'repository menu item link for' do |item_id| + shared_examples_for 'repository menu item link for' do let(:ref) { 'master' } - let(:item_id) { item_id } subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id }.link } using RSpec::Parameterized::TableSyntax @@ -77,10 +80,34 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou end end + shared_examples_for 'repository menu item with different super sidebar title' do |title, super_sidebar_title| + subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id } } + + specify do + expect(subject.title).to eq(title) + end + + context 'when inside the super sidebar' do + let(:is_super_sidebar) { true } + + specify do + expect(subject.title).to eq(super_sidebar_title) + end + end + end + + describe 'Files' do + let_it_be(:item_id) { :files } + + it_behaves_like 'repository menu item with different super sidebar title', + _('Files'), + _('Repository') + end + describe 'Commits' do let_it_be(:item_id) { :commits } - it_behaves_like 'repository menu item link for', :commits do + it_behaves_like 'repository menu item link for' do let(:route) { "/#{project.full_path}/-/commits/#{ref}" } end end @@ -103,16 +130,22 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou project.project_feature.update!(analytics_access_level: ProjectFeature::ENABLED) end - it_behaves_like 'repository menu item link for', :contributors do + it_behaves_like 'repository menu item link for' do let(:route) { "/#{project.full_path}/-/graphs/#{ref}" } end end end describe 'Network' do - it_behaves_like 'repository menu item link for', :graphs do + let_it_be(:item_id) { :graphs } + + it_behaves_like 'repository menu item link for' do let(:route) { "/#{project.full_path}/-/network/#{ref}" } end + + it_behaves_like 'repository menu item with different super sidebar title', + _('Graph'), + _('Repository graph') end end end diff --git a/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb b/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb index c5fd407dae9..9d50eb6f817 100644 --- a/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb @@ -13,8 +13,7 @@ RSpec.describe Sidebars::Projects::Menus::SnippetsMenu, feature_category: :navig let(:menu) { subject } let(:extra_attrs) do { - super_sidebar_parent: ::Sidebars::Projects::Menus::RepositoryMenu, - super_sidebar_before: :contributors, + super_sidebar_parent: ::Sidebars::Projects::SuperSidebarMenus::CodeMenu, item_id: :project_snippets } end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb new file mode 100644 index 00000000000..8f07241d2e2 --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/analyze_menu_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::AnalyzeMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Analyze")) + expect(subject.sprite_icon).to eq("chart") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :dashboards_analytics, + :cycle_analytics, + :contributors, + :ci_cd_analytics, + :repository_analytics, + :code_review, + :merge_requests, + :issues, + :insights, + :model_experiments + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb new file mode 100644 index 00000000000..3f2a40e1c7d --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/build_menu_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::BuildMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Build")) + expect(subject.sprite_icon).to eq("rocket") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :pipelines, + :jobs, + :pipelines_editor, + :releases, + :environments, + :pipeline_schedules, + :feature_flags, + :test_cases, + :artifacts + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/code_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/code_menu_spec.rb new file mode 100644 index 00000000000..e307ff91234 --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/code_menu_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::CodeMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Code")) + expect(subject.sprite_icon).to eq("code") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :files, + :branches, + :commits, + :tags, + :graphs, + :compare, + :project_snippets, + :file_locks + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/manage_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/manage_menu_spec.rb new file mode 100644 index 00000000000..6a6d61496ea --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/manage_menu_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::ManageMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Manage")) + expect(subject.sprite_icon).to eq("users") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :activity, + :members, + :labels, + :milestones, + :iterations + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb new file mode 100644 index 00000000000..5c7f11bafe5 --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/monitor_menu_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::MonitorMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Monitor")) + expect(subject.sprite_icon).to eq("monitor") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :metrics, + :error_tracking, + :alert_management, + :incidents, + :on_call_schedules, + :escalation_policies + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb index df3f7e6cdab..85162635857 100644 --- a/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb +++ b/spec/lib/sidebars/projects/super_sidebar_menus/operations_menu_spec.rb @@ -5,8 +5,24 @@ require 'spec_helper' RSpec.describe Sidebars::Projects::SuperSidebarMenus::OperationsMenu, feature_category: :navigation do subject { described_class.new({}) } + let(:items) { subject.instance_variable_get(:@items) } + it 'has title and sprite_icon' do - expect(subject.title).to eq(_("Operations")) + expect(subject.title).to eq(s_("Navigation|Operate")) expect(subject.sprite_icon).to eq("deployments") end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :packages_registry, + :container_registry, + :kubernetes, + :terraform, + :infrastructure_registry, + :activity, + :google_cloud, + :aws + ]) + end end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/plan_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/plan_menu_spec.rb index 3917d26f6f2..9f3aa62a364 100644 --- a/spec/lib/sidebars/projects/super_sidebar_menus/plan_menu_spec.rb +++ b/spec/lib/sidebars/projects/super_sidebar_menus/plan_menu_spec.rb @@ -5,8 +5,20 @@ require 'spec_helper' RSpec.describe Sidebars::Projects::SuperSidebarMenus::PlanMenu, feature_category: :navigation do subject { described_class.new({}) } + let(:items) { subject.instance_variable_get(:@items) } + it 'has title and sprite_icon' do - expect(subject.title).to eq(_("Plan")) + expect(subject.title).to eq(s_("Navigation|Plan")) expect(subject.sprite_icon).to eq("planning") end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :boards, + :project_wiki, + :service_desk, + :requirements + ]) + end end diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/secure_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/secure_menu_spec.rb new file mode 100644 index 00000000000..74ef761332e --- /dev/null +++ b/spec/lib/sidebars/projects/super_sidebar_menus/secure_menu_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::SuperSidebarMenus::SecureMenu, feature_category: :navigation do + subject { described_class.new({}) } + + let(:items) { subject.instance_variable_get(:@items) } + + it 'has title and sprite_icon' do + expect(subject.title).to eq(s_("Navigation|Secure")) + expect(subject.sprite_icon).to eq("shield") + end + + it 'defines list of NilMenuItem placeholders' do + expect(items.map(&:class).uniq).to eq([Sidebars::NilMenuItem]) + expect(items.map(&:item_id)).to eq([ + :discover_project_security, + :dashboard, + :vulnerability_report, + :dependency_list, + :license_compliance, + :audit_events, + :scan_policies, + :on_demand_scans, + :configuration + ]) + end +end diff --git a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb index d6fc3fd8fe1..25554bba6f1 100644 --- a/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb +++ b/spec/lib/sidebars/projects/super_sidebar_panel_spec.rb @@ -26,13 +26,14 @@ RSpec.describe Sidebars::Projects::SuperSidebarPanel, feature_category: :navigat let(:category_menu) do [ Sidebars::StaticMenu, + Sidebars::Projects::SuperSidebarMenus::ManageMenu, Sidebars::Projects::SuperSidebarMenus::PlanMenu, - Sidebars::Projects::Menus::RepositoryMenu, - Sidebars::Projects::Menus::CiCdMenu, - Sidebars::Projects::Menus::SecurityComplianceMenu, + Sidebars::Projects::SuperSidebarMenus::CodeMenu, + Sidebars::Projects::SuperSidebarMenus::BuildMenu, + Sidebars::Projects::SuperSidebarMenus::SecureMenu, Sidebars::Projects::SuperSidebarMenus::OperationsMenu, - Sidebars::Projects::Menus::MonitorMenu, - Sidebars::Projects::Menus::AnalyticsMenu, + Sidebars::Projects::SuperSidebarMenus::MonitorMenu, + Sidebars::Projects::SuperSidebarMenus::AnalyzeMenu, Sidebars::UncategorizedMenu, Sidebars::Projects::Menus::SettingsMenu ] diff --git a/spec/lib/sidebars/search/panel_spec.rb b/spec/lib/sidebars/search/panel_spec.rb new file mode 100644 index 00000000000..8561dc0b875 --- /dev/null +++ b/spec/lib/sidebars/search/panel_spec.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Search::Panel, feature_category: :navigation do + let_it_be(:current_user) { create(:user) } + let_it_be(:user) { create(:user) } + + let(:context) { Sidebars::Context.new(current_user: current_user, container: user) } + let(:panel) { described_class.new(context) } + + subject { described_class.new(context) } + + describe '#aria_label' do + it 'returns the correct aria label' do + expect(panel.aria_label).to eq(_('Search')) + end + end + + describe '#super_sidebar_context_header' do + it 'returns a hash with the correct title and icon' do + expected_header = { + title: 'Search', + icon: 'search' + } + expect(panel.super_sidebar_context_header).to eq(expected_header) + end + end +end diff --git a/spec/lib/sidebars/static_menu_spec.rb b/spec/lib/sidebars/static_menu_spec.rb index 086eb332a15..b336b457302 100644 --- a/spec/lib/sidebars/static_menu_spec.rb +++ b/spec/lib/sidebars/static_menu_spec.rb @@ -9,8 +9,10 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do describe '#serialize_for_super_sidebar' do it 'returns flat list of all menu items' do - subject.add_item(Sidebars::MenuItem.new(title: 'Is active', link: 'foo2', active_routes: { controller: 'fooc' })) - subject.add_item(Sidebars::MenuItem.new(title: 'Not active', link: 'foo3', active_routes: { controller: 'barc' })) + subject.add_item(Sidebars::MenuItem.new(item_id: 'id1', title: 'Is active', link: 'foo2', + active_routes: { controller: 'fooc' })) + subject.add_item(Sidebars::MenuItem.new(item_id: 'id2', title: 'Not active', link: 'foo3', + active_routes: { controller: 'barc' })) subject.add_item(Sidebars::NilMenuItem.new(item_id: 'nil_item')) allow(context).to receive(:route_is_active).and_return(->(x) { x[:controller] == 'fooc' }) @@ -18,6 +20,7 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do expect(subject.serialize_for_super_sidebar).to eq( [ { + id: 'id1', title: "Is active", icon: nil, link: "foo2", @@ -25,6 +28,7 @@ RSpec.describe Sidebars::StaticMenu, feature_category: :navigation do pill_count: nil }, { + id: 'id2', title: "Not active", icon: nil, link: "foo3", diff --git a/spec/lib/sidebars/user_settings/menus/comment_templates_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/comment_templates_menu_spec.rb new file mode 100644 index 00000000000..4f9620d6c8d --- /dev/null +++ b/spec/lib/sidebars/user_settings/menus/comment_templates_menu_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::UserSettings::Menus::CommentTemplatesMenu, feature_category: :navigation do + it_behaves_like 'User settings menu', + link: '/-/profile/comment_templates', + title: _('Comment Templates'), + icon: 'symlink', + active_routes: { controller: :comment_templates } + + describe '#render?' do + subject { described_class.new(context) } + + let_it_be(:user) { build(:user) } + + context 'when comment templates are enabled' do + before do + allow(subject).to receive(:saved_replies_enabled?).and_return(true) + end + + context 'when user is logged in' do + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + it 'does not render' do + expect(subject.render?).to be true + end + end + + context 'when user is not logged in' do + let(:context) { Sidebars::Context.new(current_user: nil, container: nil) } + + subject { described_class.new(context) } + + it 'does not render' do + expect(subject.render?).to be false + end + end + end + + context 'when comment templates are disabled' do + before do + allow(subject).to receive(:saved_replies_enabled?).and_return(false) + end + + context 'when user is logged in' do + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + it 'renders' do + expect(subject.render?).to be false + end + end + + context 'when user is not logged in' do + let(:context) { Sidebars::Context.new(current_user: nil, container: nil) } + + subject { described_class.new(context) } + + it 'does not render' do + expect(subject.render?).to be false + end + end + end + end +end diff --git a/spec/lib/sidebars/user_settings/menus/saved_replies_menu_spec.rb b/spec/lib/sidebars/user_settings/menus/saved_replies_menu_spec.rb deleted file mode 100644 index ea1a2a3539f..00000000000 --- a/spec/lib/sidebars/user_settings/menus/saved_replies_menu_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Sidebars::UserSettings::Menus::SavedRepliesMenu, feature_category: :navigation do - it_behaves_like 'User settings menu', - link: '/-/profile/saved_replies', - title: _('Saved Replies'), - icon: 'symlink', - active_routes: { controller: :saved_replies } - - describe '#render?' do - subject { described_class.new(context) } - - let_it_be(:user) { build(:user) } - - context 'when saved replies are enabled' do - before do - allow(subject).to receive(:saved_replies_enabled?).and_return(true) - end - - context 'when user is logged in' do - let(:context) { Sidebars::Context.new(current_user: user, container: nil) } - - it 'does not render' do - expect(subject.render?).to be true - end - end - - context 'when user is not logged in' do - let(:context) { Sidebars::Context.new(current_user: nil, container: nil) } - - subject { described_class.new(context) } - - it 'does not render' do - expect(subject.render?).to be false - end - end - end - - context 'when saved replies are disabled' do - before do - allow(subject).to receive(:saved_replies_enabled?).and_return(false) - end - - context 'when user is logged in' do - let(:context) { Sidebars::Context.new(current_user: user, container: nil) } - - it 'renders' do - expect(subject.render?).to be false - end - end - - context 'when user is not logged in' do - let(:context) { Sidebars::Context.new(current_user: nil, container: nil) } - - subject { described_class.new(context) } - - it 'does not render' do - expect(subject.render?).to be false - end - end - end - end -end diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb index 0aba6cb0065..721b3d70feb 100644 --- a/spec/lib/uploaded_file_spec.rb +++ b/spec/lib/uploaded_file_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe UploadedFile do +RSpec.describe UploadedFile, feature_category: :package_registry do let(:temp_dir) { Dir.tmpdir } let(:temp_file) { Tempfile.new(%w[test test], temp_dir) } @@ -15,7 +15,7 @@ RSpec.describe UploadedFile do end context 'from_params functions' do - RSpec.shared_examples 'using the file path' do |filename:, content_type:, sha256:, path_suffix:, upload_duration:| + RSpec.shared_examples 'using the file path' do |filename:, content_type:, sha256:, path_suffix:, upload_duration:, sha1:, md5:| it { is_expected.not_to be_nil } it 'sets properly the attributes' do @@ -25,6 +25,8 @@ RSpec.describe UploadedFile do expect(subject.remote_id).to be_nil expect(subject.path).to end_with(path_suffix) expect(subject.upload_duration).to eq(upload_duration) + expect(subject.sha1).to eq(sha1) + expect(subject.md5).to eq(md5) end it 'handles a blank path' do @@ -38,7 +40,7 @@ RSpec.describe UploadedFile do end end - RSpec.shared_examples 'using the remote id' do |filename:, content_type:, sha256:, size:, remote_id:, upload_duration:| + RSpec.shared_examples 'using the remote id' do |filename:, content_type:, sha256:, size:, remote_id:, upload_duration:, sha1:, md5:| it { is_expected.not_to be_nil } it 'sets properly the attributes' do @@ -49,6 +51,8 @@ RSpec.describe UploadedFile do expect(subject.size).to eq(size) expect(subject.remote_id).to eq(remote_id) expect(subject.upload_duration).to eq(upload_duration) + expect(subject.sha1).to eq(sha1) + expect(subject.md5).to eq(md5) end end @@ -81,7 +85,9 @@ RSpec.describe UploadedFile do 'name' => 'dir/my file&.txt', 'type' => 'my/type', 'upload_duration' => '5.05', - 'sha256' => 'sha256' } + 'sha256' => 'sha256', + 'sha1' => 'sha1', + 'md5' => 'md5' } end it_behaves_like 'using the file path', @@ -89,7 +95,9 @@ RSpec.describe UploadedFile do content_type: 'my/type', sha256: 'sha256', path_suffix: 'test', - upload_duration: 5.05 + upload_duration: 5.05, + sha1: 'sha1', + md5: 'md5' end context 'with a remote id' do @@ -101,7 +109,9 @@ RSpec.describe UploadedFile do 'remote_id' => '1234567890', 'etag' => 'etag1234567890', 'upload_duration' => '5.05', - 'size' => '123456' + 'size' => '123456', + 'sha1' => 'sha1', + 'md5' => 'md5' } end @@ -111,7 +121,9 @@ RSpec.describe UploadedFile do sha256: 'sha256', size: 123456, remote_id: '1234567890', - upload_duration: 5.05 + upload_duration: 5.05, + sha1: 'sha1', + md5: 'md5' end context 'with a path and a remote id' do @@ -124,7 +136,9 @@ RSpec.describe UploadedFile do 'remote_id' => '1234567890', 'etag' => 'etag1234567890', 'upload_duration' => '5.05', - 'size' => '123456' + 'size' => '123456', + 'sha1' => 'sha1', + 'md5' => 'md5' } end @@ -134,7 +148,9 @@ RSpec.describe UploadedFile do sha256: 'sha256', size: 123456, remote_id: '1234567890', - upload_duration: 5.05 + upload_duration: 5.05, + sha1: 'sha1', + md5: 'md5' end end end @@ -262,6 +278,14 @@ RSpec.describe UploadedFile do end end end + + context 'when unknown keyword params are provided' do + it 'raises an exception' do + expect do + described_class.new(temp_file.path, foo: 'param1', bar: 'param2') + end.to raise_error(ArgumentError, 'unknown keyword(s): foo, bar') + end + end end describe '#sanitize_filename' do diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb index c796801fdf9..140b067f7aa 100644 --- a/spec/mailers/emails/profile_spec.rb +++ b/spec/mailers/emails/profile_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require 'email_spec' -RSpec.describe Emails::Profile do +RSpec.describe Emails::Profile, feature_category: :user_profile do include EmailSpec::Matchers include_context 'gitlab email notification' @@ -541,4 +541,31 @@ RSpec.describe Emails::Profile do is_expected.to have_body_text /#{profile_emails_path}/ end end + + describe 'awarded a new achievement' do + let(:user) { build(:user) } + let(:achievement) { build(:achievement) } + + subject { Notify.new_achievement_email(user, achievement) } + + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'it should not have Gmail Actions links' + it_behaves_like 'a user cannot unsubscribe through footer link' + + it 'is sent to the user' do + is_expected.to deliver_to user.email + end + + it 'has the correct subject' do + is_expected.to have_subject("#{achievement.namespace.full_path} awarded you the #{achievement.name} achievement") + end + + it 'includes a link to the profile page' do + is_expected.to have_body_text(group_url(achievement.namespace)) + end + + it 'includes a link to the awarding group' do + is_expected.to have_body_text(user_url(user)) + end + end end diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb index 25afa8b48ce..76036fcd0b3 100644 --- a/spec/mailers/emails/service_desk_spec.rb +++ b/spec/mailers/emails/service_desk_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require 'email_spec' -RSpec.describe Emails::ServiceDesk do +RSpec.describe Emails::ServiceDesk, feature_category: :service_desk do include EmailSpec::Helpers include EmailSpec::Matchers include EmailHelpers @@ -16,6 +16,9 @@ RSpec.describe Emails::ServiceDesk do let_it_be(:issue) { create(:issue, project: project) } let_it_be(:email) { 'someone@gitlab.com' } let_it_be(:expected_unsubscribe_url) { unsubscribe_sent_notification_url('b7721fc7e8419911a8bea145236a0519') } + let_it_be(:credential) { create(:service_desk_custom_email_credential, project: project) } + let_it_be(:verification) { create(:service_desk_custom_email_verification, project: project) } + let_it_be(:service_desk_setting) { create(:service_desk_setting, project: project, custom_email: 'user@example.com') } let(:template) { double(content: template_content) } @@ -81,6 +84,37 @@ RSpec.describe Emails::ServiceDesk do end end + shared_examples 'a custom email verification process email' do + it 'contains custom email and project in subject' do + expect(subject.subject).to include(service_desk_setting.custom_email) + expect(subject.subject).to include(service_desk_setting.project.name) + end + end + + shared_examples 'a custom email verification process notification email' do + it 'has correct recipient' do + expect(subject.to).to eq(['owner@example.com']) + end + + it 'contains custom email and project in body' do + is_expected.to have_body_text(service_desk_setting.custom_email) + is_expected.to have_body_text(service_desk_setting.project.name) + end + end + + shared_examples 'a custom email verification process result email with error' do |error_identifier, expected_text| + context "when having #{error_identifier} error" do + before do + service_desk_setting.custom_email_verification.error = error_identifier + end + + it 'contains correct error message headline in text part' do + # look for text part because we can ignore HTML tags then + expect(subject.text_part.body).to match(expected_text) + end + end + end + describe '.service_desk_thank_you_email' do let_it_be(:reply_in_subject) { true } let_it_be(:default_text) do @@ -305,4 +339,59 @@ RSpec.describe Emails::ServiceDesk do end end end + + describe '.service_desk_custom_email_verification_email' do + subject { Notify.service_desk_custom_email_verification_email(service_desk_setting) } + + it_behaves_like 'a custom email verification process email' + + it 'uses service bot name and custom email as sender' do + expect_sender(User.support_bot, sender_email: service_desk_setting.custom_email) + end + + it 'forcibly uses SMTP delivery method and has correct settings' do + expect_service_desk_custom_email_delivery_options(service_desk_setting) + end + + it 'uses verification email address as recipient' do + expect(subject.to).to eq([service_desk_setting.custom_email_address_for_verification]) + end + + it 'contains verification token' do + is_expected.to have_body_text("Verification token: #{verification.token}") + end + end + + describe '.service_desk_verification_triggered_email' do + before do + service_desk_setting.custom_email_verification.triggerer = user + end + + subject { Notify.service_desk_verification_triggered_email(service_desk_setting, 'owner@example.com') } + + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'a custom email verification process email' + it_behaves_like 'a custom email verification process notification email' + + it 'contains triggerer username' do + is_expected.to have_body_text("@#{user.username}") + end + end + + describe '.service_desk_verification_result_email' do + before do + service_desk_setting.custom_email_verification.triggerer = user + end + + subject { Notify.service_desk_verification_result_email(service_desk_setting, 'owner@example.com') } + + it_behaves_like 'an email sent from GitLab' + it_behaves_like 'a custom email verification process email' + it_behaves_like 'a custom email verification process notification email' + it_behaves_like 'a custom email verification process result email with error', 'smtp_host_issue', 'SMTP host issue' + it_behaves_like 'a custom email verification process result email with error', 'invalid_credentials', 'Invalid credentials' + it_behaves_like 'a custom email verification process result email with error', 'mail_not_received_within_timeframe', 'Verification email not received within timeframe' + it_behaves_like 'a custom email verification process result email with error', 'incorrect_from', 'Incorrect From header' + it_behaves_like 'a custom email verification process result email with error', 'incorrect_token', 'Incorrect verification token' + end end diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index 7f838e0caf9..eb681846e82 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -78,7 +78,7 @@ RSpec.describe Notify do end end - context 'for issues' do + context 'for issues', feature_category: :team_planning do describe 'that are new' do subject { described_class.new_issue_email(issue.assignees.first.id, issue.id) } @@ -143,6 +143,8 @@ RSpec.describe Notify do it_behaves_like 'an unsubscribeable thread' it_behaves_like 'appearance header and footer enabled' it_behaves_like 'appearance header and footer not enabled' + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' it 'is sent as the author' do expect_sender(current_user) @@ -151,9 +153,34 @@ RSpec.describe Notify do it 'has the correct subject and body' do aggregate_failures do is_expected.to have_referable_subject(issue, reply: true) - is_expected.to have_body_text(previous_assignee.name) - is_expected.to have_body_text(assignee.name) - is_expected.to have_body_text(project_issue_path(project, issue)) + is_expected.to have_body_text("Assignee changed from #{previous_assignee.name} to #{assignee.name}") + is_expected.to have_plain_text_content("Assignee changed from #{previous_assignee.name} to #{assignee.name}") + end + end + + context 'without new assignee' do + before do + issue.update!(assignees: []) + end + + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' + + it 'uses "Unassigned" placeholder' do + is_expected.to have_body_text("Assignee changed from #{previous_assignee.name} to Unassigned") + is_expected.to have_plain_text_content("Assignee changed from #{previous_assignee.name} to Unassigned") + end + end + + context 'without previous assignees' do + subject { described_class.reassigned_issue_email(recipient.id, issue.id, [], current_user.id) } + + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' + + it 'uses short text' do + is_expected.to have_body_text("Assignee changed to #{assignee.name}") + is_expected.to have_plain_text_content("Assignee changed to #{assignee.name}") end end @@ -270,6 +297,81 @@ RSpec.describe Notify do end end + describe 'closed' do + subject { described_class.closed_issue_email(recipient.id, issue.id, current_user.id) } + + it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do + let(:model) { issue } + end + + it_behaves_like 'it should show Gmail Actions View Issue link' + it_behaves_like 'an unsubscribeable thread' + it_behaves_like 'appearance header and footer enabled' + it_behaves_like 'appearance header and footer not enabled' + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' + + it 'is sent as the author' do + expect_sender(current_user) + end + + it 'has the correct subject and body' do + aggregate_failures do + is_expected.to have_referable_subject(issue, reply: true) + is_expected.to have_body_text("Issue was closed by #{current_user_sanitized}") + is_expected.to have_plain_text_content("Issue was closed by #{current_user_sanitized}") + end + end + + context 'via commit' do + let(:closing_commit) { project.commit } + + subject { described_class.closed_issue_email(recipient.id, issue.id, current_user.id, closed_via: closing_commit.id) } + + before do + allow(Ability).to receive(:allowed?).with(recipient, :mark_note_as_internal, anything).and_return(true) + allow(Ability).to receive(:allowed?).with(recipient, :download_code, project).and_return(true) + end + + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' + + it 'has the correct subject and body' do + aggregate_failures do + is_expected.to have_referable_subject(issue, reply: true) + is_expected.to have_body_text("Issue was closed by #{current_user_sanitized} via #{closing_commit.id}") + is_expected.to have_plain_text_content("Issue was closed by #{current_user_sanitized} via #{closing_commit.id}") + end + end + end + + context 'via merge request' do + let(:closing_merge_request) { merge_request } + + subject { described_class.closed_issue_email(recipient.id, issue.id, current_user.id, closed_via: closing_merge_request) } + + before do + allow(Ability).to receive(:allowed?).with(recipient, :read_cross_project, :global).and_return(true) + allow(Ability).to receive(:allowed?).with(recipient, :mark_note_as_internal, anything).and_return(true) + allow(Ability).to receive(:allowed?).with(recipient, :read_merge_request, anything).and_return(true) + end + + it_behaves_like 'email with default notification reason' + it_behaves_like 'email with link to issue' + + it 'has the correct subject and body' do + aggregate_failures do + url = project_merge_request_url(project, closing_merge_request) + is_expected.to have_referable_subject(issue, reply: true) + is_expected.to have_body_text("Issue was closed by #{current_user_sanitized} via merge request " + + %(#{closing_merge_request.to_reference})) + is_expected.to have_plain_text_content("Issue was closed by #{current_user_sanitized} via merge request " \ + "#{closing_merge_request.to_reference} (#{url})") + end + end + end + end + describe 'moved to another project' do let(:new_issue) { create(:issue) } @@ -1406,7 +1508,7 @@ RSpec.describe Notify do issue.issue_email_participants.create!(email: 'service.desk@example.com') end - describe 'thank you email' do + describe 'thank you email', feature_category: :service_desk do subject { described_class.service_desk_thank_you_email(issue.id) } it_behaves_like 'an unsubscribeable thread' @@ -1459,16 +1561,19 @@ RSpec.describe Notify do end context 'when custom email is enabled' do + let_it_be(:credentials) do + create( + :service_desk_custom_email_credential, + project: project + ) + end + let_it_be(:settings) do create( :service_desk_setting, project: project, custom_email_enabled: true, - custom_email: 'supersupport@example.com', - custom_email_smtp_address: 'smtp.example.com', - custom_email_smtp_port: 587, - custom_email_smtp_username: 'supersupport@example.com', - custom_email_smtp_password: 'supersecret' + custom_email: 'supersupport@example.com' ) end @@ -1483,7 +1588,7 @@ RSpec.describe Notify do end end - describe 'new note email' do + describe 'new note email', feature_category: :service_desk do let_it_be(:first_note) { create(:discussion_note_on_issue, note: 'Hello world') } subject { described_class.service_desk_new_note_email(issue.id, first_note.id, 'service.desk@example.com') } @@ -1520,16 +1625,19 @@ RSpec.describe Notify do end context 'when custom email is enabled' do + let_it_be(:credentials) do + create( + :service_desk_custom_email_credential, + project: project + ) + end + let_it_be(:settings) do create( :service_desk_setting, project: project, custom_email_enabled: true, - custom_email: 'supersupport@example.com', - custom_email_smtp_address: 'smtp.example.com', - custom_email_smtp_port: 587, - custom_email_smtp_username: 'supersupport@example.com', - custom_email_smtp_password: 'supersecret' + custom_email: 'supersupport@example.com' ) end @@ -2343,21 +2451,4 @@ RSpec.describe Notify do expect(mail.body.parts.first.to_s).to include('Start a GitLab Ultimate trial today in less than one minute, no credit card required.') end end - - def expect_sender(user, sender_email: nil) - sender = subject.header[:from].addrs[0] - expect(sender.display_name).to eq("#{user.name} (@#{user.username})") - expect(sender.address).to eq(sender_email.presence || gitlab_sender) - end - - def expect_service_desk_custom_email_delivery_options(service_desk_setting) - expect(subject.delivery_method).to be_a Mail::SMTP - expect(subject.delivery_method.settings).to include( - address: service_desk_setting.custom_email_smtp_address, - port: service_desk_setting.custom_email_smtp_port, - user_name: service_desk_setting.custom_email_smtp_username, - password: service_desk_setting.custom_email_smtp_password, - domain: service_desk_setting.custom_email.split('@').last - ) - end end diff --git a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb deleted file mode 100644 index 4c7ef9ac1e8..00000000000 --- a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb +++ /dev/null @@ -1,69 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe UpsertBaseWorkItemTypes, :migration, feature_category: :team_planning do - include MigrationHelpers::WorkItemTypesHelper - - let!(:work_item_types) { table(:work_item_types) } - - let(:base_types) do - { - issue: 0, - incident: 1, - test_case: 2, - requirement: 3 - } - end - - append_after(:all) do - # Make sure base types are recreated after running the migration - # because migration specs are not run in a transaction - reset_work_item_types - end - - context 'when no default types exist' do - it 'creates default data' do - # Need to delete all as base types are seeded before entire test suite - work_item_types.delete_all - - expect(work_item_types.count).to eq(0) - - reversible_migration do |migration| - migration.before -> { - # Depending on whether the migration has been run before, - # the size could be 4, or 0, so we don't set any expectations - # as we don't delete base types on migration reverse - } - - migration.after -> { - expect(work_item_types.count).to eq(4) - expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) - } - end - end - end - - context 'when default types already exist' do - it 'does not create default types again' do - # Database needs to be in a similar state as when this migration was created - work_item_types.delete_all - work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue') - work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident') - work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case') - work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements') - - reversible_migration do |migration| - migration.before -> { - expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) - } - - migration.after -> { - expect(work_item_types.count).to eq(4) - expect(work_item_types.all.pluck(:base_type)).to match_array(base_types.values) - } - end - end - end -end diff --git a/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb b/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb deleted file mode 100644 index 56482e8bd25..00000000000 --- a/spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildNeeds, feature_category: :pipeline_composition do - let(:ci_build_needs_table) { table(:ci_build_needs) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_build_needs_table.column_names).to include('build_id_convert_to_bigint') - } - - migration.after -> { - ci_build_needs_table.reset_column_information - expect(ci_build_needs_table.column_names).not_to include('build_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb b/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb deleted file mode 100644 index eef4c7bc9fd..00000000000 --- a/spec/migrations/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropTemporaryColumnsAndTriggersForCiBuildTraceChunks, feature_category: :continuous_integration do - let(:ci_build_trace_chunks_table) { table(:ci_build_trace_chunks) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_build_trace_chunks_table.column_names).to include('build_id_convert_to_bigint') - } - - migration.after -> { - ci_build_trace_chunks_table.reset_column_information - expect(ci_build_trace_chunks_table.column_names).not_to include('build_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb b/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb deleted file mode 100644 index 208cbac2ae9..00000000000 --- a/spec/migrations/20210906130643_drop_temporary_columns_and_triggers_for_taggings_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropTemporaryColumnsAndTriggersForTaggings, feature_category: :continuous_integration do - let(:taggings_table) { table(:taggings) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(taggings_table.column_names).to include('id_convert_to_bigint') - expect(taggings_table.column_names).to include('taggable_id_convert_to_bigint') - } - - migration.after -> { - taggings_table.reset_column_information - expect(taggings_table.column_names).not_to include('id_convert_to_bigint') - expect(taggings_table.column_names).not_to include('taggable_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb b/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb deleted file mode 100644 index 63664803fba..00000000000 --- a/spec/migrations/20210907013944_cleanup_bigint_conversion_for_ci_builds_metadata_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe CleanupBigintConversionForCiBuildsMetadata, feature_category: :continuous_integration do - let(:ci_builds_metadata) { table(:ci_builds_metadata) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_builds_metadata.column_names).to include('id_convert_to_bigint') - expect(ci_builds_metadata.column_names).to include('build_id_convert_to_bigint') - } - - migration.after -> { - ci_builds_metadata.reset_column_information - expect(ci_builds_metadata.column_names).not_to include('id_convert_to_bigint') - expect(ci_builds_metadata.column_names).not_to include('build_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb b/spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb deleted file mode 100644 index 663b90f3fa7..00000000000 --- a/spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe FinalizeCiBuildsBigintConversion, :migration, schema: 20210907182359, feature_category: :continuous_integration do - context 'with an unexpected FK fk_3f0c88d7dc' do - it 'removes the FK and migrates successfully' do - # Add the unexpected FK - subject.add_foreign_key(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc') - - expect { migrate! }.to change { subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_3f0c88d7dc') }.from(true).to(false) - - # Additional check: The actually expected FK should still exist - expect(subject.foreign_key_exists?(:ci_sources_pipelines, :ci_builds, column: :source_job_id, name: 'fk_be5624bf37')).to be_truthy - end - end -end diff --git a/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb b/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb deleted file mode 100644 index e9d34fad76d..00000000000 --- a/spec/migrations/20210910194952_update_report_type_for_existing_approval_project_rules_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe UpdateReportTypeForExistingApprovalProjectRules, :migration, feature_category: :source_code_management do - using RSpec::Parameterized::TableSyntax - - let(:group) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:project) { table(:projects).create!(namespace_id: group.id) } - let(:approval_project_rule) { table(:approval_project_rules).create!(name: rule_name, rule_type: rule_type, project_id: project.id) } - let(:rule_type) { 2 } - let(:rule_name) { 'Vulnerability-Check' } - - context 'with rule_type set to :report_approver' do - where(:rule_name, :report_type) do - [ - ['Vulnerability-Check', 1], - ['License-Check', 2], - ['Coverage-Check', 3] - ] - end - - with_them do - context "with names associated with report type" do - it 'updates report_type' do - expect { migrate! }.to change { approval_project_rule.reload.report_type }.from(nil).to(report_type) - end - end - end - end - - context 'with rule_type set to another value (e.g., :regular)' do - let(:rule_type) { 0 } - - it 'does not update report_type' do - expect { migrate! }.not_to change { approval_project_rule.reload.report_type } - end - end - - context 'with the rule name set to another value (e.g., Test Rule)' do - let(:rule_name) { 'Test Rule' } - - it 'does not update report_type' do - expect { migrate! }.not_to change { approval_project_rule.reload.report_type } - end - end -end diff --git a/spec/migrations/20210914095310_cleanup_orphan_project_access_tokens_spec.rb b/spec/migrations/20210914095310_cleanup_orphan_project_access_tokens_spec.rb deleted file mode 100644 index a198ae9e473..00000000000 --- a/spec/migrations/20210914095310_cleanup_orphan_project_access_tokens_spec.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe CleanupOrphanProjectAccessTokens, :migration, feature_category: :user_profile do - def create_user(**extra_options) - defaults = { state: 'active', projects_limit: 0, email: "#{extra_options[:username]}@example.com" } - - table(:users).create!(defaults.merge(extra_options)) - end - - def create_membership(**extra_options) - defaults = { access_level: 30, notification_level: 0, source_id: 1, source_type: 'Project' } - - table(:members).create!(defaults.merge(extra_options)) - end - - let!(:regular_user) { create_user(username: 'regular') } - let!(:orphan_bot) { create_user(username: 'orphaned_bot', user_type: 6) } - let!(:used_bot) do - create_user(username: 'used_bot', user_type: 6).tap do |bot| - create_membership(user_id: bot.id) - end - end - - it 'marks all bots without memberships as deactivated' do - expect do - migrate! - regular_user.reload - orphan_bot.reload - used_bot.reload - end.to change { - [regular_user.state, orphan_bot.state, used_bot.state] - }.from(%w[active active active]).to(%w[active deactivated active]) - end - - it 'schedules for deletion all bots without memberships' do - job_class = 'DeleteUserWorker'.safe_constantize - - if job_class - expect(job_class).to receive(:bulk_perform_async).with([[orphan_bot.id, orphan_bot.id, skip_authorization: true]]) - - migrate! - end - end -end diff --git a/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb b/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb deleted file mode 100644 index 808c5371018..00000000000 --- a/spec/migrations/20210915022415_cleanup_bigint_conversion_for_ci_builds_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe CleanupBigintConversionForCiBuilds, feature_category: :continuous_integration do - let(:ci_builds) { table(:ci_builds) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_builds.column_names).to include('id_convert_to_bigint') - expect(ci_builds.column_names).to include('stage_id_convert_to_bigint') - } - - migration.after -> { - ci_builds.reset_column_information - expect(ci_builds.column_names).not_to include('id_convert_to_bigint') - expect(ci_builds.column_names).not_to include('stage_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb deleted file mode 100644 index b3d1b41c330..00000000000 --- a/spec/migrations/20210918201050_remove_old_pending_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -def create_background_migration_jobs(ids, status, created_at) - proper_status = case status - when :pending - Gitlab::Database::BackgroundMigrationJob.statuses['pending'] - when :succeeded - Gitlab::Database::BackgroundMigrationJob.statuses['succeeded'] - else - raise ArgumentError - end - - background_migration_jobs.create!( - class_name: 'RecalculateVulnerabilitiesOccurrencesUuid', - arguments: Array(ids), - status: proper_status, - created_at: created_at - ) -end - -RSpec.describe RemoveOldPendingJobsForRecalculateVulnerabilitiesOccurrencesUuid, :migration, -feature_category: :vulnerability_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - let!(:before_target_date) { -Float::INFINITY..(DateTime.new(2021, 8, 17, 23, 59, 59)) } - let!(:after_target_date) { (DateTime.new(2021, 8, 18, 0, 0, 0))..Float::INFINITY } - - context 'when old RecalculateVulnerabilitiesOccurrencesUuid jobs are pending' do - before do - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4)) - - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2)) - create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4)) - end - - it 'removes old, pending jobs' do - migrate! - - expect(background_migration_jobs.where(created_at: before_target_date).count).to eq(1) - expect(background_migration_jobs.where(created_at: after_target_date).count).to eq(3) - end - end -end diff --git a/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb b/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb deleted file mode 100644 index c463f69c80c..00000000000 --- a/spec/migrations/20210922021816_drop_int4_columns_for_ci_job_artifacts_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropInt4ColumnsForCiJobArtifacts, feature_category: :build_artifacts do - let(:ci_job_artifacts) { table(:ci_job_artifacts) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_job_artifacts.column_names).to include('id_convert_to_bigint') - expect(ci_job_artifacts.column_names).to include('job_id_convert_to_bigint') - } - - migration.after -> { - ci_job_artifacts.reset_column_information - expect(ci_job_artifacts.column_names).not_to include('id_convert_to_bigint') - expect(ci_job_artifacts.column_names).not_to include('job_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb b/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb deleted file mode 100644 index 5a3ba16fcc0..00000000000 --- a/spec/migrations/20210922025631_drop_int4_column_for_ci_sources_pipelines_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropInt4ColumnForCiSourcesPipelines, feature_category: :pipeline_composition do - let(:ci_sources_pipelines) { table(:ci_sources_pipelines) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ci_sources_pipelines.column_names).to include('source_job_id_convert_to_bigint') - } - - migration.after -> { - ci_sources_pipelines.reset_column_information - expect(ci_sources_pipelines.column_names).not_to include('source_job_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb b/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb deleted file mode 100644 index 49cf1a01f2a..00000000000 --- a/spec/migrations/20210922082019_drop_int4_column_for_events_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropInt4ColumnForEvents, feature_category: :user_profile do - let(:events) { table(:events) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(events.column_names).to include('id_convert_to_bigint') - } - - migration.after -> { - events.reset_column_information - expect(events.column_names).not_to include('id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb b/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb deleted file mode 100644 index 3e241438339..00000000000 --- a/spec/migrations/20210922091402_drop_int4_column_for_push_event_payloads_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DropInt4ColumnForPushEventPayloads, feature_category: :user_profile do - let(:push_event_payloads) { table(:push_event_payloads) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(push_event_payloads.column_names).to include('event_id_convert_to_bigint') - } - - migration.after -> { - push_event_payloads.reset_column_information - expect(push_event_payloads.column_names).not_to include('event_id_convert_to_bigint') - } - end - end -end diff --git a/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb b/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb deleted file mode 100644 index 2f3903a20a9..00000000000 --- a/spec/migrations/20211006060436_schedule_populate_topics_total_projects_count_cache_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe SchedulePopulateTopicsTotalProjectsCountCache, feature_category: :projects do - let(:topics) { table(:topics) } - let!(:topic_1) { topics.create!(name: 'Topic1') } - let!(:topic_2) { topics.create!(name: 'Topic2') } - let!(:topic_3) { topics.create!(name: 'Topic3') } - - describe '#up' do - before do - stub_const("#{described_class}::BATCH_SIZE", 2) - end - - it 'schedules BackfillProjectsWithCoverage background jobs', :aggregate_failures do - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, topic_1.id, topic_2.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, topic_3.id, topic_3.id) - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - end - end - end - end -end diff --git a/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb b/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb deleted file mode 100644 index a61e450d9ab..00000000000 --- a/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! 'clean_up_migrate_merge_request_diff_commit_users' - -RSpec.describe CleanUpMigrateMergeRequestDiffCommitUsers, :migration, feature_category: :code_review_workflow do - describe '#up' do - context 'when there are pending jobs' do - it 'processes the jobs immediately' do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - status: :pending, - arguments: [10, 20] - ) - - spy = Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers - migration = described_class.new - - allow(Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers) - .to receive(:new) - .and_return(spy) - - expect(migration).to receive(:say) - expect(spy).to receive(:perform).with(10, 20) - - migration.up - end - end - - context 'when all jobs are completed' do - it 'does nothing' do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: 'MigrateMergeRequestDiffCommitUsers', - status: :succeeded, - arguments: [10, 20] - ) - - migration = described_class.new - - expect(migration).not_to receive(:say) - expect(Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers) - .not_to receive(:new) - - migration.up - end - end - end -end diff --git a/spec/migrations/20211018152654_schedule_remove_duplicate_vulnerabilities_findings3_spec.rb b/spec/migrations/20211018152654_schedule_remove_duplicate_vulnerabilities_findings3_spec.rb deleted file mode 100644 index 3e8176a36a1..00000000000 --- a/spec/migrations/20211018152654_schedule_remove_duplicate_vulnerabilities_findings3_spec.rb +++ /dev/null @@ -1,166 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' -require_migration!('schedule_remove_duplicate_vulnerabilities_findings3') - -RSpec.describe ScheduleRemoveDuplicateVulnerabilitiesFindings3, :migration, feature_category: :vulnerability_management do - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:users) { table(:users) } - let(:user) { create_user! } - let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } - let(:scanners) { table(:vulnerability_scanners) } - let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } - let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerability_findings) { table(:vulnerability_occurrences) } - let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - let(:vulnerability_identifier) do - vulnerability_identifiers.create!( - id: 1244459, - project_id: project.id, - external_type: 'vulnerability-identifier', - external_id: 'vulnerability-identifier', - fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', - name: 'vulnerability identifier') - end - - let!(:vulnerability_for_first_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:first_finding_duplicate) do - create_finding!( - id: 5606961, - uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", - vulnerability_id: vulnerability_for_first_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner1.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_second_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:second_finding_duplicate) do - create_finding!( - id: 8765432, - uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", - vulnerability_id: vulnerability_for_second_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner2.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_third_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:third_finding_duplicate) do - create_finding!( - id: 8832995, - uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", - vulnerability_id: vulnerability_for_third_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner3.id, - project_id: project.id - ) - end - - let!(:unrelated_finding) do - create_finding!( - id: 9999999, - vulnerability_id: nil, - report_type: 1, - location_fingerprint: 'random_location_fingerprint', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: unrelated_scanner.id, - project_id: project.id - ) - end - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migration' do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to eq(4) - expect(described_class::MIGRATION).to be_scheduled_migration(first_finding_duplicate.id, first_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_migration(second_finding_duplicate.id, second_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_migration(third_finding_duplicate.id, third_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_migration(unrelated_finding.id, unrelated_finding.id) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerability_findings.create!({ - id: id, - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: vulnerability_identifier.id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - }.compact) - end - # rubocop:enable Metrics/ParameterLists - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0, - user_type: user_type, - confirmed_at: confirmed_at - ) - end -end diff --git a/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb b/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb deleted file mode 100644 index 968d9cf176c..00000000000 --- a/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! 'schedule_fix_merge_request_diff_commit_users_migration' - -RSpec.describe ScheduleFixMergeRequestDiffCommitUsersMigration, :migration, feature_category: :code_review_workflow do - let(:migration) { described_class.new } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - - describe '#up' do - it 'does nothing when there are no projects to correct' do - migration.up - - expect(Gitlab::Database::BackgroundMigrationJob.count).to be_zero - end - - it 'schedules imported projects created after July' do - project = projects.create!( - namespace_id: namespace.id, - import_type: 'gitlab_project', - created_at: '2021-08-01' - ) - - expect(migration) - .to receive(:migrate_in) - .with(2.minutes, 'FixMergeRequestDiffCommitUsers', [project.id]) - - migration.up - - expect(Gitlab::Database::BackgroundMigrationJob.count).to eq(1) - - job = Gitlab::Database::BackgroundMigrationJob.first - - expect(job.class_name).to eq('FixMergeRequestDiffCommitUsers') - expect(job.arguments).to eq([project.id]) - end - - it 'ignores projects imported before July' do - projects.create!( - namespace_id: namespace.id, - import_type: 'gitlab_project', - created_at: '2020-08-01' - ) - - migration.up - - expect(Gitlab::Database::BackgroundMigrationJob.count).to be_zero - end - - it 'ignores projects that are not imported' do - projects.create!( - namespace_id: namespace.id, - created_at: '2021-08-01' - ) - - migration.up - - expect(Gitlab::Database::BackgroundMigrationJob.count).to be_zero - end - end -end diff --git a/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb b/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb deleted file mode 100644 index 1688ebf7cb1..00000000000 --- a/spec/migrations/20211101222614_consume_remaining_user_namespace_jobs_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ConsumeRemainingUserNamespaceJobs, feature_category: :subgroups do - let(:namespaces) { table(:namespaces) } - let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org', type: nil) } - - context 'when Namespaces with nil `type` still exist' do - it 'steals sidekiq jobs from BackfillUserNamespace background migration' do - expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackfillUserNamespace') - - migrate! - end - - it 'migrates namespaces without type' do - expect { migrate! }.to change { namespaces.where(type: 'User').count }.from(0).to(1) - end - end -end diff --git a/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb b/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb deleted file mode 100644 index 3b69169b2d6..00000000000 --- a/spec/migrations/20211110143306_add_not_null_constraint_to_security_findings_uuid_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' -require_migration! - -RSpec.describe AddNotNullConstraintToSecurityFindingsUuid, feature_category: :vulnerability_management do - let!(:security_findings) { table(:security_findings) } - let!(:migration) { described_class.new } - - before do - allow(migration).to receive(:transaction_open?).and_return(false) - allow(migration).to receive(:with_lock_retries).and_yield - end - - it 'adds a check constraint' do - constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" } - expect(constraint).to be_nil - - migration.up - - constraint = security_findings.connection.check_constraints(:security_findings).find { |constraint| constraint.expression == "uuid IS NOT NULL" } - expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition) - end -end diff --git a/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb b/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb deleted file mode 100644 index d05828112e6..00000000000 --- a/spec/migrations/20211110151350_schedule_drop_invalid_security_findings_spec.rb +++ /dev/null @@ -1,72 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleDropInvalidSecurityFindings, :migration, :suppress_gitlab_schemas_validate_connection, schema: 20211108211434, - feature_category: :vulnerability_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - - let!(:pipelines) { table(:ci_pipelines) } - let!(:pipeline) { pipelines.create!(project_id: project.id) } - - let!(:ci_builds) { table(:ci_builds) } - let!(:ci_build) { ci_builds.create! } - - let!(:security_scans) { table(:security_scans) } - let!(:security_scan) do - security_scans.create!( - scan_type: 1, - status: 1, - build_id: ci_build.id, - project_id: project.id, - pipeline_id: pipeline.id - ) - end - - let!(:vulnerability_scanners) { table(:vulnerability_scanners) } - let!(:vulnerability_scanner) { vulnerability_scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let!(:security_findings) { table(:security_findings) } - let!(:security_finding_without_uuid) do - security_findings.create!( - severity: 1, - confidence: 1, - scan_id: security_scan.id, - scanner_id: vulnerability_scanner.id, - uuid: nil - ) - end - - let!(:security_finding_with_uuid) do - security_findings.create!( - severity: 1, - confidence: 1, - scan_id: security_scan.id, - scanner_id: vulnerability_scanner.id, - uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e' - ) - end - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - stub_const("#{described_class}::SUB_BATCH_SIZE", 1) - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migrations' do - migrate! - - expect(background_migration_jobs.count).to eq(1) - expect(background_migration_jobs.first.arguments).to match_array([security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE]) - - expect(BackgroundMigrationWorker.jobs.size).to eq(1) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, security_finding_without_uuid.id, security_finding_without_uuid.id, described_class::SUB_BATCH_SIZE) - end -end diff --git a/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb b/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb deleted file mode 100644 index deba6f9b87c..00000000000 --- a/spec/migrations/20211116091751_change_namespace_type_default_to_user_spec.rb +++ /dev/null @@ -1,5 +0,0 @@ -# frozen_string_literal: true - -# With https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73495, we no longer allow -# a Namespace type to be nil. There is nothing left to test for this migration, -# but we'll keep this file here as a tombstone. diff --git a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb deleted file mode 100644 index 18513656029..00000000000 --- a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb +++ /dev/null @@ -1,190 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe ScheduleRemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, - :suppress_gitlab_schemas_validate_connection, :migration, feature_category: :vulnerability_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let!(:users) { table(:users) } - let!(:user) { create_user! } - let!(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } - let!(:pipelines) { table(:ci_pipelines) } - let!(:scanners) { table(:vulnerability_scanners) } - let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } - let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } - let!(:vulnerabilities) { table(:vulnerabilities) } - let!(:vulnerability_findings) { table(:vulnerability_occurrences) } - let!(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) } - let!(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - let!(:vulnerability_identifier) do - vulnerability_identifiers.create!( - id: 1244459, - project_id: project.id, - external_type: 'vulnerability-identifier', - external_id: 'vulnerability-identifier', - fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', - name: 'vulnerability identifier') - end - - let!(:vulnerability_for_first_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:first_finding_duplicate) do - create_finding!( - id: 5606961, - uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", - vulnerability_id: vulnerability_for_first_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner1.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_second_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:second_finding_duplicate) do - create_finding!( - id: 8765432, - uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", - vulnerability_id: vulnerability_for_second_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner2.id, - project_id: project.id - ) - end - - let!(:vulnerability_for_third_duplicate) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:third_finding_duplicate) do - create_finding!( - id: 8832995, - uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", - vulnerability_id: vulnerability_for_third_duplicate.id, - report_type: 0, - location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner3.id, - project_id: project.id - ) - end - - let!(:unrelated_finding) do - create_finding!( - id: 9999999, - vulnerability_id: nil, - report_type: 1, - location_fingerprint: 'random_location_fingerprint', - primary_identifier_id: vulnerability_identifier.id, - scanner_id: unrelated_scanner.id, - project_id: project.id - ) - end - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - - 4.times do - create_finding_pipeline!(project_id: project.id, finding_id: first_finding_duplicate.id) - create_finding_pipeline!(project_id: project.id, finding_id: second_finding_duplicate.id) - create_finding_pipeline!(project_id: project.id, finding_id: third_finding_duplicate.id) - create_finding_pipeline!(project_id: project.id, finding_id: unrelated_finding.id) - end - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migrations' do - migrate! - - expect(background_migration_jobs.count).to eq(4) - expect(background_migration_jobs.first.arguments).to match_array([first_finding_duplicate.id, first_finding_duplicate.id]) - expect(background_migration_jobs.second.arguments).to match_array([second_finding_duplicate.id, second_finding_duplicate.id]) - expect(background_migration_jobs.third.arguments).to match_array([third_finding_duplicate.id, third_finding_duplicate.id]) - expect(background_migration_jobs.fourth.arguments).to match_array([unrelated_finding.id, unrelated_finding.id]) - - expect(BackgroundMigrationWorker.jobs.size).to eq(4) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, first_finding_duplicate.id, first_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, second_finding_duplicate.id, second_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, third_finding_duplicate.id, third_finding_duplicate.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(8.minutes, unrelated_finding.id, unrelated_finding.id) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - params = { - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: vulnerability_identifier.id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - } - params[:id] = id unless id.nil? - vulnerability_findings.create!(params) - end - # rubocop:enable Metrics/ParameterLists - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil, created_at: Time.zone.now, confirmed_at: Time.zone.now) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0, - user_type: user_type, - confirmed_at: confirmed_at - ) - end - - def create_finding_pipeline!(project_id:, finding_id:) - pipeline = pipelines.create!(project_id: project_id) - vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id) - end -end diff --git a/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb b/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb deleted file mode 100644 index ede9c5ea7e8..00000000000 --- a/spec/migrations/20211117084814_migrate_remaining_u2f_registrations_spec.rb +++ /dev/null @@ -1,43 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe MigrateRemainingU2fRegistrations, :migration, feature_category: :system_access do - let(:u2f_registrations) { table(:u2f_registrations) } - let(:webauthn_registrations) { table(:webauthn_registrations) } - let(:users) { table(:users) } - - let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } - - before do - create_u2f_registration(1, 'reg1') - create_u2f_registration(2, 'reg2') - create_u2f_registration(3, '') - create_u2f_registration(4, nil) - webauthn_registrations.create!({ name: 'reg1', u2f_registration_id: 1, credential_xid: '', public_key: '', user_id: user.id }) - end - - it 'correctly migrates u2f registrations previously not migrated' do - expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(4) - end - - it 'migrates all valid u2f registrations depite errors' do - create_u2f_registration(5, 'reg3', 'invalid!') - create_u2f_registration(6, 'reg4') - - expect { migrate! }.to change { webauthn_registrations.count }.from(1).to(5) - end - - def create_u2f_registration(id, name, public_key = nil) - device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5), { key_handle: SecureRandom.random_bytes(255) }) - public_key ||= Base64.strict_encode64(device.origin_public_key_raw) - u2f_registrations.create!({ id: id, - certificate: Base64.strict_encode64(device.cert_raw), - key_handle: U2F.urlsafe_encode64(device.key_handle_raw), - public_key: public_key, - counter: 5, - name: name, - user_id: user.id }) - end -end diff --git a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb deleted file mode 100644 index 09a8bb44d88..00000000000 --- a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb +++ /dev/null @@ -1,78 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe EncryptStaticObjectsExternalStorageAuthToken, :migration, feature_category: :source_code_management do - let(:application_settings) do - Class.new(ActiveRecord::Base) do - self.table_name = 'application_settings' - end - end - - context 'when static_objects_external_storage_auth_token is not set' do - it 'does nothing' do - application_settings.create! - - reversible_migration do |migration| - migration.before -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to be_nil - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil - } - - migration.after -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to be_nil - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil - } - end - end - end - - context 'when static_objects_external_storage_auth_token is set' do - it 'encrypts static_objects_external_storage_auth_token' do - settings = application_settings.create! - settings.update_column(:static_objects_external_storage_auth_token, 'Test') - - reversible_migration do |migration| - migration.before -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to eq('Test') - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil - } - migration.after -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to eq('Test') - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_present - } - end - end - end - - context 'when static_objects_external_storage_auth_token is empty string' do - it 'does not break' do - settings = application_settings.create! - settings.update_column(:static_objects_external_storage_auth_token, '') - - reversible_migration do |migration| - migration.before -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to eq('') - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil - } - migration.after -> { - settings = application_settings.first - - expect(settings.static_objects_external_storage_auth_token).to eq('') - expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil - } - end - end - end -end diff --git a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb deleted file mode 100644 index db68e895b61..00000000000 --- a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe AddTaskToWorkItemTypes, :migration, feature_category: :team_planning do - include MigrationHelpers::WorkItemTypesHelper - - let!(:work_item_types) { table(:work_item_types) } - - let(:base_types) do - { - issue: 0, - incident: 1, - test_case: 2, - requirement: 3, - task: 4 - } - end - - append_after(:all) do - # Make sure base types are recreated after running the migration - # because migration specs are not run in a transaction - reset_work_item_types - end - - it 'skips creating the record if it already exists' do - reset_db_state_prior_to_migration - work_item_types.find_or_create_by!(name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task') - - expect do - migrate! - end.to not_change(work_item_types, :count) - end - - it 'adds task to base work item types' do - reset_db_state_prior_to_migration - - expect do - migrate! - end.to change(work_item_types, :count).from(4).to(5) - - expect(work_item_types.all.pluck(:base_type)).to include(base_types[:task]) - end - - def reset_db_state_prior_to_migration - # Database needs to be in a similar state as when this migration was created - work_item_types.delete_all - work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue') - work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident') - work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case') - work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements') - end -end diff --git a/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb b/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb deleted file mode 100644 index 91646da4791..00000000000 --- a/spec/migrations/20211130165043_backfill_sequence_column_for_sprints_table_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe BackfillSequenceColumnForSprintsTable, :migration, schema: 20211126042235, feature_category: :team_planning do - let(:migration) { described_class.new } - let(:namespaces) { table(:namespaces) } - let(:sprints) { table(:sprints) } - let(:iterations_cadences) { table(:iterations_cadences) } - - let!(:group) { namespaces.create!(name: 'foo', path: 'foo') } - let!(:cadence_1) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") } - let!(:cadence_2) { iterations_cadences.create!(group_id: group.id, title: "cadence 2") } - let!(:iteration_1) { sprints.create!(id: 1, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1, title: 'a' ) } - let!(:iteration_2) { sprints.create!(id: 2, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2, title: 'b') } - let!(:iteration_3) { sprints.create!(id: 3, group_id: group.id, iterations_cadence_id: cadence_2.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 4, title: 'd') } - let!(:iteration_4) { sprints.create!(id: 4, group_id: group.id, iterations_cadence_id: nil, start_date: Date.new(2021, 11, 15), due_date: Date.new(2021, 11, 20), iid: 3, title: 'c') } - - describe '#up' do - it "correctly sets the sequence attribute with idempotency" do - migration.up - - expect(iteration_1.reload.sequence).to be 1 - expect(iteration_2.reload.sequence).to be 2 - expect(iteration_3.reload.sequence).to be 1 - expect(iteration_4.reload.sequence).to be nil - - iteration_5 = sprints.create!(id: 5, group_id: group.id, iterations_cadence_id: cadence_1.id, start_date: Date.new(2022, 1, 1), due_date: Date.new(2022, 1, 5), iid: 1, title: 'e' ) - - migration.down - migration.up - - expect(iteration_1.reload.sequence).to be 1 - expect(iteration_2.reload.sequence).to be 2 - expect(iteration_5.reload.sequence).to be 3 - expect(iteration_3.reload.sequence).to be 1 - expect(iteration_4.reload.sequence).to be nil - end - end -end diff --git a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb b/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb deleted file mode 100644 index 7be54bc13cc..00000000000 --- a/spec/migrations/20211203091642_add_index_to_projects_on_marked_for_deletion_at_spec.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe AddIndexToProjectsOnMarkedForDeletionAt, feature_category: :projects do - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).not_to include('index_projects_not_aimed_for_deletion') - } - - migration.after -> { - expect(ActiveRecord::Base.connection.indexes('projects').map(&:name)).to include('index_projects_not_aimed_for_deletion') - } - end - end -end diff --git a/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb deleted file mode 100644 index be89ee9d2aa..00000000000 --- a/spec/migrations/20211207125331_remove_jobs_for_recalculate_vulnerabilities_occurrences_uuid_spec.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' -require_migration! - -def create_background_migration_jobs(ids, status, created_at) - proper_status = case status - when :pending - Gitlab::Database::BackgroundMigrationJob.statuses['pending'] - when :succeeded - Gitlab::Database::BackgroundMigrationJob.statuses['succeeded'] - else - raise ArgumentError - end - - background_migration_jobs.create!( - class_name: 'RecalculateVulnerabilitiesOccurrencesUuid', - arguments: Array(ids), - status: proper_status, - created_at: created_at - ) -end - -RSpec.describe RemoveJobsForRecalculateVulnerabilitiesOccurrencesUuid, :migration, -feature_category: :vulnerability_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - - context 'when RecalculateVulnerabilitiesOccurrencesUuid jobs are present' do - before do - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4)) - - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2)) - create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4)) - end - - it 'removes all jobs' do - expect(background_migration_jobs.count).to eq(5) - - migrate! - - expect(background_migration_jobs.count).to eq(0) - end - end -end diff --git a/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb b/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb deleted file mode 100644 index c7401c4790d..00000000000 --- a/spec/migrations/20211207135331_schedule_recalculate_uuid_on_vulnerabilities_occurrences4_spec.rb +++ /dev/null @@ -1,148 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleRecalculateUuidOnVulnerabilitiesOccurrences4, feature_category: :vulnerability_management do - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:users) { table(:users) } - let(:user) { create_user! } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:scanners) { table(:vulnerability_scanners) } - let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - let(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) } - let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } - let(:vulnerability_identifier) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - let(:different_vulnerability_identifier) do - vulnerability_identifiers.create!( - project_id: project.id, - external_type: 'uuid-v4', - external_id: 'uuid-v4', - fingerprint: '772da93d34a1ba010bcb5efa9fb6f8e01bafcc89', - name: 'Identifier for UUIDv4') - end - - let!(:uuidv4_finding) do - create_finding!( - vulnerability_id: vulnerability_for_uuidv4.id, - project_id: project.id, - scanner_id: different_scanner.id, - primary_identifier_id: different_vulnerability_identifier.id, - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('fa18f432f1d56675f4098d318739c3cd5b14eb3e'), - uuid: 'b3cc2518-5446-4dea-871c-89d5e999c1ac' - ) - end - - let(:vulnerability_for_uuidv4) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:uuidv5_finding) do - create_finding!( - vulnerability_id: vulnerability_for_uuidv5.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier.id, - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('838574be0210968bf6b9f569df9c2576242cbf0a'), - uuid: '77211ed6-7dff-5f6b-8c9a-da89ad0a9b60' - ) - end - - let(:vulnerability_for_uuidv5) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let(:vulnerability_for_finding_with_signature) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let!(:finding_with_signature) do - create_finding!( - vulnerability_id: vulnerability_for_finding_with_signature.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: vulnerability_identifier.id, - report_type: 0, # "sast" - location_fingerprint: Gitlab::Database::ShaAttribute.serialize('123609eafffffa2207a9ca2425ba4337h34fga1b'), - uuid: '252aa474-d689-5d2b-ab42-7bbb5a100c02' - ) - end - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migrations', :aggregate_failures do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to eq(3) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, uuidv4_finding.id, uuidv4_finding.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, uuidv5_finding.id, uuidv5_finding.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, finding_with_signature.id, finding_with_signature.id) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, location_fingerprint:, uuid:, report_type: 0) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: 'test', - severity: 7, - confidence: 7, - report_type: report_type, - project_fingerprint: '123qweasdzxc', - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location_fingerprint: location_fingerprint, - metadata_version: 'test', - raw_metadata: 'test', - uuid: uuid - ) - end - - def create_user!(name: "Example User", email: "user@example.com", user_type: nil) - users.create!( - name: name, - email: email, - username: name, - projects_limit: 0 - ) - end -end diff --git a/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb b/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb deleted file mode 100644 index f103ee54990..00000000000 --- a/spec/migrations/20211210140629_encrypt_static_object_token_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe EncryptStaticObjectToken, :migration, feature_category: :source_code_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - let!(:users) { table(:users) } - - let!(:user_without_tokens) { create_user!(name: 'notoken') } - let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') } - let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') } - let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') } - let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') } - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migrations' do - migrate! - - expect(background_migration_jobs.count).to eq(2) - expect(background_migration_jobs.first.arguments).to match_array([user_with_plaintext_token_1.id, user_with_plaintext_token_1.id]) - expect(background_migration_jobs.second.arguments).to match_array([user_with_plaintext_token_2.id, user_with_plaintext_token_2.id]) - - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, user_with_plaintext_token_1.id, user_with_plaintext_token_1.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, user_with_plaintext_token_2.id, user_with_plaintext_token_2.id) - end - - private - - def create_user!(name:, token: nil, encrypted_token: nil) - email = "#{name}@example.com" - - table(:users).create!( - name: name, - email: email, - username: name, - projects_limit: 0, - static_object_token: token, - static_object_token_encrypted: encrypted_token - ) - end -end diff --git a/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb b/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb deleted file mode 100644 index 0df52df43d8..00000000000 --- a/spec/migrations/20211214012507_backfill_incident_issue_escalation_statuses_spec.rb +++ /dev/null @@ -1,19 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillIncidentIssueEscalationStatuses, feature_category: :incident_management do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:issues) { table(:issues) } - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let(:project) { projects.create!(namespace_id: namespace.id) } - - # Backfill removed - see db/migrate/20220321234317_remove_all_issuable_escalation_statuses.rb. - it 'does nothing' do - issues.create!(project_id: project.id, issue_type: 1) - - expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size } - end -end diff --git a/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb b/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb deleted file mode 100644 index 2d808adf578..00000000000 --- a/spec/migrations/20211217174331_mark_recalculate_finding_signatures_as_completed_spec.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' -require_migration! - -def create_background_migration_jobs(ids, status, created_at) - proper_status = case status - when :pending - Gitlab::Database::BackgroundMigrationJob.statuses['pending'] - when :succeeded - Gitlab::Database::BackgroundMigrationJob.statuses['succeeded'] - else - raise ArgumentError - end - - background_migration_jobs.create!( - class_name: 'RecalculateVulnerabilitiesOccurrencesUuid', - arguments: Array(ids), - status: proper_status, - created_at: created_at - ) -end - -RSpec.describe MarkRecalculateFindingSignaturesAsCompleted, :migration, feature_category: :vulnerability_management do - let!(:background_migration_jobs) { table(:background_migration_jobs) } - - context 'when RecalculateVulnerabilitiesOccurrencesUuid jobs are present' do - before do - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 5, 5, 0, 2)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 5, 5, 0, 4)) - - create_background_migration_jobs([1, 2, 3], :succeeded, DateTime.new(2021, 8, 18, 0, 0)) - create_background_migration_jobs([4, 5, 6], :pending, DateTime.new(2021, 8, 18, 0, 2)) - create_background_migration_jobs([7, 8, 9], :pending, DateTime.new(2021, 8, 18, 0, 4)) - end - - describe 'gitlab.com' do - before do - allow(::Gitlab).to receive(:com?).and_return(true) - end - - it 'marks all jobs as succeeded' do - expect(background_migration_jobs.where(status: 1).count).to eq(2) - - migrate! - - expect(background_migration_jobs.where(status: 1).count).to eq(5) - end - end - - describe 'self managed' do - before do - allow(::Gitlab).to receive(:com?).and_return(false) - end - - it 'does not change job status' do - expect(background_migration_jobs.where(status: 1).count).to eq(2) - - migrate! - - expect(background_migration_jobs.where(status: 1).count).to eq(2) - end - end - end -end diff --git a/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb deleted file mode 100644 index 263289462ba..00000000000 --- a/spec/migrations/20220106111958_add_insert_or_update_vulnerability_reads_trigger_spec.rb +++ /dev/null @@ -1,151 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe AddInsertOrUpdateVulnerabilityReadsTrigger, feature_category: :vulnerability_management do - let(:migration) { described_class.new } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerability_reads) { table(:vulnerability_reads) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let(:vulnerability) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let(:vulnerability2) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let(:identifier) do - table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - let(:finding) do - create_finding!( - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - end - - describe '#up' do - before do - migrate! - end - - describe 'UPDATE trigger' do - context 'when vulnerability_id is updated' do - it 'creates a new vulnerability_reads row' do - expect do - finding.update!(vulnerability_id: vulnerability.id) - end.to change { vulnerability_reads.count }.from(0).to(1) - end - end - - context 'when vulnerability_id is not updated' do - it 'does not create a new vulnerability_reads row' do - finding.update!(vulnerability_id: nil) - - expect do - finding.update!(location: '') - end.not_to change { vulnerability_reads.count } - end - end - end - - describe 'INSERT trigger' do - context 'when vulnerability_id is set' do - it 'creates a new vulnerability_reads row' do - expect do - create_finding!( - vulnerability_id: vulnerability2.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - end.to change { vulnerability_reads.count }.from(0).to(1) - end - end - - context 'when vulnerability_id is not set' do - it 'does not create a new vulnerability_reads row' do - expect do - create_finding!( - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - end.not_to change { vulnerability_reads.count } - end - end - end - end - - describe '#down' do - before do - migration.up - migration.down - end - - it 'drops the trigger' do - expect do - finding.update!(vulnerability_id: vulnerability.id) - end.not_to change { vulnerability_reads.count } - end - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location: location, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb deleted file mode 100644 index 152a551bc7b..00000000000 --- a/spec/migrations/20220106112043_add_update_vulnerability_reads_trigger_spec.rb +++ /dev/null @@ -1,128 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe AddUpdateVulnerabilityReadsTrigger, feature_category: :vulnerability_management do - let(:migration) { described_class.new } - let(:vulnerability_reads) { table(:vulnerability_reads) } - let(:issue_links) { table(:vulnerability_issue_links) } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) } - let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let(:vulnerability) do - create_vulnerability!( - project_id: project.id, - report_type: 7, - author_id: user.id - ) - end - - let(:identifier) do - table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - describe '#up' do - before do - migrate! - end - - describe 'UPDATE trigger' do - before do - create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - report_type: 7, - primary_identifier_id: identifier.id - ) - end - - context 'when vulnerability attributes are updated' do - it 'updates vulnerability attributes in vulnerability_reads' do - expect do - vulnerability.update!(severity: 6) - end.to change { vulnerability_reads.first.severity }.from(7).to(6) - end - end - - context 'when vulnerability attributes are not updated' do - it 'does not update vulnerability attributes in vulnerability_reads' do - expect do - vulnerability.update!(title: "New vulnerability") - end.not_to change { vulnerability_reads.first } - end - end - end - end - - describe '#down' do - before do - migration.up - migration.down - create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - report_type: 7, - primary_identifier_id: identifier.id - ) - end - - it 'drops the trigger' do - expect do - vulnerability.update!(severity: 6) - end.not_to change { vulnerability_reads.first.severity } - end - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location: location, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb b/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb deleted file mode 100644 index 9fc40b0b5f1..00000000000 --- a/spec/migrations/20220106112085_add_update_vulnerability_reads_location_trigger_spec.rb +++ /dev/null @@ -1,136 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe AddUpdateVulnerabilityReadsLocationTrigger, feature_category: :vulnerability_management do - let(:migration) { described_class.new } - let(:vulnerability_reads) { table(:vulnerability_reads) } - let(:issue_links) { table(:vulnerability_issue_links) } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) } - let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let(:vulnerability) do - create_vulnerability!( - project_id: project.id, - report_type: 7, - author_id: user.id - ) - end - - let(:identifier) do - table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - describe '#up' do - before do - migrate! - end - - describe 'UPDATE trigger' do - context 'when image is updated' do - it 'updates location_image in vulnerability_reads' do - finding = create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - report_type: 7, - location: { "image" => "alpine:3.4" }, - primary_identifier_id: identifier.id - ) - - expect do - finding.update!(location: { "image" => "alpine:4", "kubernetes_resource" => { "agent_id" => "1234" } }) - end.to change { vulnerability_reads.first.location_image }.from("alpine:3.4").to("alpine:4") - end - end - - context 'when image is not updated' do - it 'updates location_image in vulnerability_reads' do - finding = create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - report_type: 7, - location: { "image" => "alpine:3.4", "kubernetes_resource" => { "agent_id" => "1234" } }, - primary_identifier_id: identifier.id - ) - - expect do - finding.update!(project_fingerprint: "123qweasdzx") - end.not_to change { vulnerability_reads.first.location_image } - end - end - end - end - - describe '#down' do - before do - migration.up - migration.down - end - - it 'drops the trigger' do - finding = create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - - expect do - finding.update!(location: '{"image":"alpine:4"}') - end.not_to change { vulnerability_reads.first.location_image } - end - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location: location, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb b/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb deleted file mode 100644 index e58fdfb1591..00000000000 --- a/spec/migrations/20220106163326_add_has_issues_on_vulnerability_reads_trigger_spec.rb +++ /dev/null @@ -1,134 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe AddHasIssuesOnVulnerabilityReadsTrigger, feature_category: :vulnerability_management do - let(:migration) { described_class.new } - let(:vulnerability_reads) { table(:vulnerability_reads) } - let(:issue_links) { table(:vulnerability_issue_links) } - let(:vulnerabilities) { table(:vulnerabilities) } - let(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:user) { table(:users).create!(id: 13, email: 'author@example.com', username: 'author', projects_limit: 10) } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } - let(:issue) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) } - let(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - - let(:vulnerability) do - create_vulnerability!( - project_id: project.id, - author_id: user.id - ) - end - - let(:identifier) do - table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', - name: 'Identifier for UUIDv5') - end - - before do - create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - - @vulnerability_read = vulnerability_reads.first - end - - describe '#up' do - before do - migrate! - end - - describe 'INSERT trigger' do - it 'updates has_issues in vulnerability_reads' do - expect do - issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id) - end.to change { @vulnerability_read.reload.has_issues }.from(false).to(true) - end - end - - describe 'DELETE trigger' do - let(:issue2) { table(:issues).create!(description: '1234', state_id: 1, project_id: project.id) } - - it 'does not change has_issues when there exists another issue' do - issue_link1 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id) - issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue2.id) - - expect do - issue_link1.delete - end.not_to change { @vulnerability_read.reload.has_issues } - end - - it 'unsets has_issues when all issues are deleted' do - issue_link1 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id) - issue_link2 = issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue2.id) - - expect do - issue_link1.delete - issue_link2.delete - end.to change { @vulnerability_read.reload.has_issues }.from(true).to(false) - end - end - end - - describe '#down' do - before do - migration.up - migration.down - end - - it 'drops the trigger' do - expect do - issue_links.create!(vulnerability_id: vulnerability.id, issue_id: issue.id) - end.not_to change { @vulnerability_read.has_issues } - end - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - project_id:, scanner_id:, primary_identifier_id:, vulnerability_id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location: { "image" => "alpine:3.4" }, location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - vulnerabilities_findings.create!( - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location: location, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - ) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb b/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb deleted file mode 100644 index 1338f826537..00000000000 --- a/spec/migrations/20220107064845_populate_vulnerability_reads_spec.rb +++ /dev/null @@ -1,106 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe PopulateVulnerabilityReads, :migration, feature_category: :vulnerability_management do - let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:scanner) { table(:vulnerability_scanners).create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } - let!(:background_migration_jobs) { table(:background_migration_jobs) } - let!(:vulnerabilities) { table(:vulnerabilities) } - let!(:vulnerability_reads) { table(:vulnerability_reads) } - let!(:vulnerabilities_findings) { table(:vulnerability_occurrences) } - let!(:vulnerability_issue_links) { table(:vulnerability_issue_links) } - let!(:vulnerability_ids) { [] } - - before do - stub_const("#{described_class}::BATCH_SIZE", 1) - stub_const("#{described_class}::SUB_BATCH_SIZE", 1) - - 5.times.each do |x| - vulnerability = create_vulnerability!( - project_id: project.id, - report_type: 7, - author_id: user.id - ) - identifier = table(:vulnerability_identifiers).create!( - project_id: project.id, - external_type: 'uuid-v5', - external_id: 'uuid-v5', - fingerprint: Digest::SHA1.hexdigest(vulnerability.id.to_s), - name: 'Identifier for UUIDv5') - - create_finding!( - vulnerability_id: vulnerability.id, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id - ) - - vulnerability_ids << vulnerability.id - end - end - - around do |example| - freeze_time { Sidekiq::Testing.fake! { example.run } } - end - - it 'schedules background migrations' do - migrate! - - expect(background_migration_jobs.count).to eq(5) - expect(background_migration_jobs.first.arguments).to match_array([vulnerability_ids.first, vulnerability_ids.first, 1]) - expect(background_migration_jobs.second.arguments).to match_array([vulnerability_ids.second, vulnerability_ids.second, 1]) - expect(background_migration_jobs.third.arguments).to match_array([vulnerability_ids.third, vulnerability_ids.third, 1]) - expect(background_migration_jobs.fourth.arguments).to match_array([vulnerability_ids.fourth, vulnerability_ids.fourth, 1]) - expect(background_migration_jobs.fifth.arguments).to match_array([vulnerability_ids.fifth, vulnerability_ids.fifth, 1]) - - expect(BackgroundMigrationWorker.jobs.size).to eq(5) - expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(2.minutes, vulnerability_ids.first, vulnerability_ids.first, 1) - expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(4.minutes, vulnerability_ids.second, vulnerability_ids.second, 1) - expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(6.minutes, vulnerability_ids.third, vulnerability_ids.third, 1) - expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(8.minutes, vulnerability_ids.fourth, vulnerability_ids.fourth, 1) - expect(described_class::MIGRATION_NAME).to be_scheduled_delayed_migration(10.minutes, vulnerability_ids.fifth, vulnerability_ids.fifth, 1) - end - - private - - def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0) - vulnerabilities.create!( - project_id: project_id, - author_id: author_id, - title: title, - severity: severity, - confidence: confidence, - report_type: report_type - ) - end - - # rubocop:disable Metrics/ParameterLists - def create_finding!( - vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, id: nil, - name: "test", severity: 7, confidence: 7, report_type: 0, - project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', - metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid) - params = { - vulnerability_id: vulnerability_id, - project_id: project_id, - name: name, - severity: severity, - confidence: confidence, - report_type: report_type, - project_fingerprint: project_fingerprint, - scanner_id: scanner_id, - primary_identifier_id: primary_identifier_id, - location_fingerprint: location_fingerprint, - metadata_version: metadata_version, - raw_metadata: raw_metadata, - uuid: uuid - } - params[:id] = id unless id.nil? - vulnerabilities_findings.create!(params) - end - # rubocop:enable Metrics/ParameterLists -end diff --git a/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb b/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb deleted file mode 100644 index 1470f2b3cad..00000000000 --- a/spec/migrations/20220120094340_drop_position_from_security_findings_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration!('drop_position_from_security_findings') - -RSpec.describe DropPositionFromSecurityFindings, feature_category: :vulnerability_management do - let(:events) { table(:security_findings) } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(events.column_names).to include('position') - } - - migration.after -> { - events.reset_column_information - expect(events.column_names).not_to include('position') - } - end - end -end diff --git a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb b/spec/migrations/20220124130028_dedup_runner_projects_spec.rb deleted file mode 100644 index ee468f40908..00000000000 --- a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb +++ /dev/null @@ -1,66 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DedupRunnerProjects, :migration, :suppress_gitlab_schemas_validate_connection, -schema: 20220120085655, feature_category: :runner do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:runners) { table(:ci_runners) } - let(:runner_projects) { table(:ci_runner_projects) } - - let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let!(:project) { projects.create!(namespace_id: namespace.id) } - let!(:project_2) { projects.create!(namespace_id: namespace.id) } - let!(:runner) { runners.create!(runner_type: 'project_type') } - let!(:runner_2) { runners.create!(runner_type: 'project_type') } - let!(:runner_3) { runners.create!(runner_type: 'project_type') } - - let!(:duplicated_runner_project_1) { runner_projects.create!(runner_id: runner.id, project_id: project.id) } - let!(:duplicated_runner_project_2) { runner_projects.create!(runner_id: runner.id, project_id: project.id) } - let!(:duplicated_runner_project_3) { runner_projects.create!(runner_id: runner_2.id, project_id: project_2.id) } - let!(:duplicated_runner_project_4) { runner_projects.create!(runner_id: runner_2.id, project_id: project_2.id) } - - let!(:non_duplicated_runner_project) { runner_projects.create!(runner_id: runner_3.id, project_id: project.id) } - - it 'deduplicates ci_runner_projects table' do - expect { migrate! }.to change { runner_projects.count }.from(5).to(3) - end - - it 'merges `duplicated_runner_project_1` with `duplicated_runner_project_2`', :aggregate_failures do - migrate! - - expect(runner_projects.where(id: duplicated_runner_project_1.id)).not_to(exist) - - merged_runner_projects = runner_projects.find_by(id: duplicated_runner_project_2.id) - - expect(merged_runner_projects).to be_present - expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_1.created_at) - expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_2.created_at) - end - - it 'merges `duplicated_runner_project_3` with `duplicated_runner_project_4`', :aggregate_failures do - migrate! - - expect(runner_projects.where(id: duplicated_runner_project_3.id)).not_to(exist) - - merged_runner_projects = runner_projects.find_by(id: duplicated_runner_project_4.id) - - expect(merged_runner_projects).to be_present - expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_3.created_at) - expect(merged_runner_projects.created_at).to be_like_time(duplicated_runner_project_4.created_at) - end - - it 'does not change non duplicated records' do - expect { migrate! }.not_to change { non_duplicated_runner_project.reload.attributes } - end - - it 'does nothing when there are no runner projects' do - runner_projects.delete_all - - migrate! - - expect(runner_projects.count).to eq(0) - end -end diff --git a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb b/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb deleted file mode 100644 index ea88cf1a2ce..00000000000 --- a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration!('remove_dangling_running_builds') - -RSpec.describe RemoveDanglingRunningBuilds, :suppress_gitlab_schemas_validate_connection, -feature_category: :continuous_integration do - let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } - let(:project) { table(:projects).create!(namespace_id: namespace.id) } - let(:runner) { table(:ci_runners).create!(runner_type: 1) } - let(:builds) { table(:ci_builds) } - let(:running_builds) { table(:ci_running_builds) } - - let(:running_build) do - builds.create!( - name: 'test 1', - status: 'running', - project_id: project.id, - type: 'Ci::Build') - end - - let(:failed_build) do - builds.create!( - name: 'test 2', - status: 'failed', - project_id: project.id, - type: 'Ci::Build') - end - - let!(:running_metadata) do - running_builds.create!( - build_id: running_build.id, - project_id: project.id, - runner_id: runner.id, - runner_type: - runner.runner_type) - end - - let!(:failed_metadata) do - running_builds.create!( - build_id: failed_build.id, - project_id: project.id, - runner_id: runner.id, - runner_type: runner.runner_type) - end - - it 'removes failed builds' do - migrate! - - expect(running_metadata.reload).to be_present - expect { failed_metadata.reload }.to raise_error(ActiveRecord::RecordNotFound) - end -end diff --git a/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb b/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb deleted file mode 100644 index 3f3fdd0889d..00000000000 --- a/spec/migrations/20220128155814_fix_approval_rules_code_owners_rule_type_index_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration!('fix_approval_rules_code_owners_rule_type_index') - -RSpec.describe FixApprovalRulesCodeOwnersRuleTypeIndex, :migration, feature_category: :source_code_management do - let(:table_name) { :approval_merge_request_rules } - let(:index_name) { 'index_approval_rules_code_owners_rule_type' } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy - } - - migration.after -> { - expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy - } - end - end - - context 'when the index already exists' do - before do - subject.add_concurrent_index table_name, :merge_request_id, where: 'rule_type = 2', name: index_name - end - - it 'keeps the index' do - migrate! - - expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy - end - end -end diff --git a/spec/migrations/20220202105733_delete_service_template_records_spec.rb b/spec/migrations/20220202105733_delete_service_template_records_spec.rb deleted file mode 100644 index 41762a3a5c3..00000000000 --- a/spec/migrations/20220202105733_delete_service_template_records_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe DeleteServiceTemplateRecords, feature_category: :integrations do - let(:integrations) { table(:integrations) } - let(:chat_names) { table(:chat_names) } - let(:web_hooks) { table(:web_hooks) } - let(:slack_integrations) { table(:slack_integrations) } - let(:zentao_tracker_data) { table(:zentao_tracker_data) } - let(:jira_tracker_data) { table(:jira_tracker_data) } - let(:issue_tracker_data) { table(:issue_tracker_data) } - - before do - template = integrations.create!(template: true) - chat_names.create!(service_id: template.id, user_id: 1, team_id: 1, chat_id: 1) - web_hooks.create!(service_id: template.id) - slack_integrations.create!(service_id: template.id, team_id: 1, team_name: 'team', alias: 'alias', user_id: 1) - zentao_tracker_data.create!(integration_id: template.id) - jira_tracker_data.create!(service_id: template.id) - issue_tracker_data.create!(service_id: template.id) - - integrations.create!(template: false) - end - - it 'deletes template records and associated data' do - expect { migrate! } - .to change { integrations.where(template: true).count }.from(1).to(0) - .and change { chat_names.count }.from(1).to(0) - .and change { web_hooks.count }.from(1).to(0) - .and change { slack_integrations.count }.from(1).to(0) - .and change { zentao_tracker_data.count }.from(1).to(0) - .and change { jira_tracker_data.count }.from(1).to(0) - .and change { issue_tracker_data.count }.from(1).to(0) - end - - it 'does not delete non template records' do - expect { migrate! } - .not_to change { integrations.where(template: false).count } - end -end diff --git a/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb b/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb deleted file mode 100644 index cbae5674d78..00000000000 --- a/spec/migrations/20220204095121_backfill_namespace_statistics_with_dependency_proxy_size_spec.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillNamespaceStatisticsWithDependencyProxySize, feature_category: :dependency_proxy do - let!(:groups) { table(:namespaces) } - let!(:group1) { groups.create!(id: 10, name: 'test1', path: 'test1', type: 'Group') } - let!(:group2) { groups.create!(id: 20, name: 'test2', path: 'test2', type: 'Group') } - let!(:group3) { groups.create!(id: 30, name: 'test3', path: 'test3', type: 'Group') } - let!(:group4) { groups.create!(id: 40, name: 'test4', path: 'test4', type: 'Group') } - - let!(:dependency_proxy_blobs) { table(:dependency_proxy_blobs) } - let!(:dependency_proxy_manifests) { table(:dependency_proxy_manifests) } - - let!(:group1_manifest) { create_manifest(10, 10) } - let!(:group2_manifest) { create_manifest(20, 20) } - let!(:group3_manifest) { create_manifest(30, 30) } - - let!(:group1_blob) { create_blob(10, 10) } - let!(:group2_blob) { create_blob(20, 20) } - let!(:group3_blob) { create_blob(30, 30) } - - describe '#up' do - it 'correctly schedules background migrations' do - stub_const("#{described_class}::BATCH_SIZE", 2) - - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - aggregate_failures do - expect(described_class::MIGRATION) - .to be_scheduled_migration([10, 30], ['dependency_proxy_size']) - - expect(described_class::MIGRATION) - .to be_scheduled_delayed_migration(2.minutes, [20], ['dependency_proxy_size']) - - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - end - end - end - end - end - - def create_manifest(group_id, size) - dependency_proxy_manifests.create!( - group_id: group_id, - size: size, - file_name: 'test-file', - file: 'test', - digest: 'abc123' - ) - end - - def create_blob(group_id, size) - dependency_proxy_blobs.create!( - group_id: group_id, - size: size, - file_name: 'test-file', - file: 'test' - ) - end -end diff --git a/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb b/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb deleted file mode 100644 index 5e728bb396c..00000000000 --- a/spec/migrations/20220204194347_encrypt_integration_properties_spec.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe EncryptIntegrationProperties, :migration, schema: 20220204193000, feature_category: :integrations do - subject(:migration) { described_class.new } - - let(:integrations) { table(:integrations) } - - before do - stub_const("#{described_class.name}::BATCH_SIZE", 2) - end - - it 'correctly schedules background migrations', :aggregate_failures do - # update required - record1 = integrations.create!(properties: some_props) - record2 = integrations.create!(properties: some_props) - record3 = integrations.create!(properties: some_props) - record4 = integrations.create!(properties: nil) - record5 = integrations.create!(properties: nil) - - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(described_class::MIGRATION).to be_scheduled_migration(record1.id, record2.id) - expect(described_class::MIGRATION).to be_scheduled_migration(record3.id, record4.id) - expect(described_class::MIGRATION).to be_scheduled_migration(record5.id, record5.id) - - expect(BackgroundMigrationWorker.jobs.size).to eq(3) - end - end - end - - def some_props - { iid: generate(:iid), url: generate(:url), username: generate(:username) }.to_json - end -end diff --git a/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb b/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb deleted file mode 100644 index 89583d1050b..00000000000 --- a/spec/migrations/20220208080921_schedule_migrate_personal_namespace_project_maintainer_to_owner_spec.rb +++ /dev/null @@ -1,20 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleMigratePersonalNamespaceProjectMaintainerToOwner, feature_category: :subgroups do - let!(:migration) { described_class::MIGRATION } - - describe '#up' do - it 'schedules background jobs for each batch of members' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :members, - column_name: :id, - interval: described_class::INTERVAL - ) - end - end -end diff --git a/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb b/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb deleted file mode 100644 index 8a6a542bc5e..00000000000 --- a/spec/migrations/20220211214605_update_integrations_trigger_type_new_on_insert_null_safe_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe UpdateIntegrationsTriggerTypeNewOnInsertNullSafe, :migration, feature_category: :integrations do - let(:integrations) { table(:integrations) } - - before do - migrate! - end - - it 'leaves defined values alone' do - record = integrations.create!(type: 'XService', type_new: 'Integrations::Y') - - expect(integrations.find(record.id)).to have_attributes(type: 'XService', type_new: 'Integrations::Y') - end - - it 'keeps type_new synchronized with type' do - record = integrations.create!(type: 'AbcService', type_new: nil) - - expect(integrations.find(record.id)).to have_attributes( - type: 'AbcService', - type_new: 'Integrations::Abc' - ) - end - - it 'keeps type synchronized with type_new' do - record = integrations.create!(type: nil, type_new: 'Integrations::Abc') - - expect(integrations.find(record.id)).to have_attributes( - type: 'AbcService', - type_new: 'Integrations::Abc' - ) - end -end diff --git a/spec/migrations/20220213103859_remove_integrations_type_spec.rb b/spec/migrations/20220213103859_remove_integrations_type_spec.rb deleted file mode 100644 index 8f6d9b0d9b5..00000000000 --- a/spec/migrations/20220213103859_remove_integrations_type_spec.rb +++ /dev/null @@ -1,31 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe RemoveIntegrationsType, :migration, feature_category: :integrations do - subject(:migration) { described_class.new } - - let(:integrations) { table(:integrations) } - let(:bg_migration) { instance_double(bg_migration_class) } - - before do - stub_const("#{described_class.name}::BATCH_SIZE", 2) - end - - it 'performs remaining background migrations', :aggregate_failures do - # Already migrated - integrations.create!(type: 'SlackService', type_new: 'Integrations::Slack') - # update required - record1 = integrations.create!(type: 'SlackService') - record2 = integrations.create!(type: 'JiraService') - record3 = integrations.create!(type: 'SlackService') - - migrate! - - expect(record1.reload.type_new).to eq 'Integrations::Slack' - expect(record2.reload.type_new).to eq 'Integrations::Jira' - expect(record3.reload.type_new).to eq 'Integrations::Slack' - end -end diff --git a/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb b/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb deleted file mode 100644 index b8a37dcd6d9..00000000000 --- a/spec/migrations/20220222192524_create_not_null_constraint_releases_tag_spec.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' -require_migration! - -RSpec.describe CreateNotNullConstraintReleasesTag, feature_category: :release_orchestration do - let!(:releases) { table(:releases) } - let!(:migration) { described_class.new } - - before do - allow(migration).to receive(:transaction_open?).and_return(false) - allow(migration).to receive(:with_lock_retries).and_yield - end - - it 'adds a check constraint to tags' do - constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" } - expect(constraint).to be_nil - - migration.up - - constraint = releases.connection.check_constraints(:releases).find { |constraint| constraint.expression == "tag IS NOT NULL" } - expect(constraint).to be_a(ActiveRecord::ConnectionAdapters::CheckConstraintDefinition) - end -end diff --git a/spec/migrations/20220222192525_remove_null_releases_spec.rb b/spec/migrations/20220222192525_remove_null_releases_spec.rb deleted file mode 100644 index ce42dea077d..00000000000 --- a/spec/migrations/20220222192525_remove_null_releases_spec.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe RemoveNullReleases, feature_category: :release_orchestration do - let(:releases) { table(:releases) } - - before do - # we need to migrate to before previous migration so an invalid record can be created - migrate! - migration_context.down(previous_migration(3).version) - - releases.create!(tag: 'good', name: 'good release', released_at: Time.now) - releases.create!(tag: nil, name: 'bad release', released_at: Time.now) - end - - it 'deletes template records and associated data' do - expect { migrate! } - .to change { releases.count }.from(2).to(1) - end -end diff --git a/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb b/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb deleted file mode 100644 index 425f622581b..00000000000 --- a/spec/migrations/20220223124428_schedule_merge_topics_with_same_name_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleMergeTopicsWithSameName, feature_category: :projects do - let(:topics) { table(:topics) } - - describe '#up' do - before do - stub_const("#{described_class}::BATCH_SIZE", 2) - - topics.create!(name: 'topic1') - topics.create!(name: 'Topic2') - topics.create!(name: 'Topic3') - topics.create!(name: 'Topic4') - topics.create!(name: 'topic2') - topics.create!(name: 'topic3') - topics.create!(name: 'topic4') - topics.create!(name: 'TOPIC2') - topics.create!(name: 'topic5') - end - - it 'schedules MergeTopicsWithSameName background jobs', :aggregate_failures do - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, %w[topic2 topic3]) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, %w[topic4]) - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - end - end - end - end -end diff --git a/spec/migrations/20220305223212_add_security_training_providers_spec.rb b/spec/migrations/20220305223212_add_security_training_providers_spec.rb deleted file mode 100644 index f67db3b68cd..00000000000 --- a/spec/migrations/20220305223212_add_security_training_providers_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe AddSecurityTrainingProviders, :migration, feature_category: :vulnerability_management do - include MigrationHelpers::WorkItemTypesHelper - - let!(:security_training_providers) { table(:security_training_providers) } - - it 'creates default data' do - # Need to delete all as security training providers are seeded before entire test suite - security_training_providers.delete_all - - reversible_migration do |migration| - migration.before -> { - expect(security_training_providers.count).to eq(0) - } - - migration.after -> { - expect(security_training_providers.count).to eq(2) - } - end - end -end diff --git a/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb b/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb deleted file mode 100644 index 3bdd6e5fab9..00000000000 --- a/spec/migrations/20220307192610_remove_duplicate_project_tag_releases_spec.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe RemoveDuplicateProjectTagReleases, feature_category: :release_orchestration do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:users) { table(:users) } - let(:releases) { table(:releases) } - - let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') } - let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') } - - let(:dup_releases) do - Array.new(4).fill do |i| - rel = releases.new(project_id: project.id, - tag: "duplicate tag", - released_at: (DateTime.now + i.days)) - rel.save!(validate: false) - rel - end - end - - let(:valid_release) do - releases.create!( - project_id: project.id, - tag: "valid tag", - released_at: DateTime.now - ) - end - - describe '#up' do - it "correctly removes duplicate tags from the same project" do - expect(dup_releases.length).to eq 4 - expect(valid_release).not_to be nil - expect(releases.where(tag: 'duplicate tag').count).to eq 4 - expect(releases.where(tag: 'valid tag').count).to eq 1 - - migrate! - - expect(releases.where(tag: 'duplicate tag').count).to eq 1 - expect(releases.where(tag: 'valid tag').count).to eq 1 - expect(releases.all.map(&:tag)).to match_array ['valid tag', 'duplicate tag'] - end - end -end diff --git a/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb b/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb deleted file mode 100644 index c0b94313d4d..00000000000 --- a/spec/migrations/20220309084954_remove_leftover_external_pull_request_deletions_spec.rb +++ /dev/null @@ -1,43 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -require_migration! - -RSpec.describe RemoveLeftoverExternalPullRequestDeletions, feature_category: :pods do - let(:deleted_records) { table(:loose_foreign_keys_deleted_records) } - - let(:pending_record1) { deleted_records.create!(id: 1, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 1, status: 1) } - let(:pending_record2) { deleted_records.create!(id: 2, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 2, status: 1) } - let(:other_pending_record1) { deleted_records.create!(id: 3, fully_qualified_table_name: 'public.projects', primary_key_value: 1, status: 1) } - let(:other_pending_record2) { deleted_records.create!(id: 4, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 1, status: 1) } - let(:processed_record1) { deleted_records.create!(id: 5, fully_qualified_table_name: 'public.external_pull_requests', primary_key_value: 3, status: 2) } - let(:other_processed_record1) { deleted_records.create!(id: 6, fully_qualified_table_name: 'public.ci_builds', primary_key_value: 2, status: 2) } - - let!(:persisted_ids_before) do - [ - pending_record1, - pending_record2, - other_pending_record1, - other_pending_record2, - processed_record1, - other_processed_record1 - ].map(&:id).sort - end - - let!(:persisted_ids_after) do - [ - other_pending_record1, - other_pending_record2, - processed_record1, - other_processed_record1 - ].map(&:id).sort - end - - def all_ids - deleted_records.all.map(&:id).sort - end - - it 'deletes pending external_pull_requests records' do - expect { migrate! }.to change { all_ids }.from(persisted_ids_before).to(persisted_ids_after) - end -end diff --git a/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb b/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb deleted file mode 100644 index f40f9c70833..00000000000 --- a/spec/migrations/20220310141349_remove_dependency_list_usage_data_from_redis_spec.rb +++ /dev/null @@ -1,24 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe RemoveDependencyListUsageDataFromRedis, :migration, :clean_gitlab_redis_shared_state, -feature_category: :dependency_management do - let(:key) { "DEPENDENCY_LIST_USAGE_COUNTER" } - - describe "#up" do - it 'removes the hash from redis' do - with_redis do |redis| - redis.hincrby(key, 1, 1) - redis.hincrby(key, 2, 1) - end - - expect { migrate! }.to change { with_redis { |r| r.hgetall(key) } }.from({ '1' => '1', '2' => '1' }).to({}) - end - end - - def with_redis(&block) - Gitlab::Redis::SharedState.with(&block) - end -end diff --git a/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb b/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb index 15c16a2b232..6f9e70aa8c8 100644 --- a/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb +++ b/spec/migrations/20220324032250_migrate_shimo_confluence_service_category_spec.rb @@ -11,8 +11,9 @@ RSpec.describe MigrateShimoConfluenceServiceCategory, :migration, feature_catego before do namespace = namespaces.create!(name: 'test', path: 'test') projects.create!(id: 1, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') - integrations.create!(id: 1, active: true, type_new: "Integrations::SlackSlashCommands", - category: 'chat', project_id: 1) + integrations.create!( + id: 1, active: true, type_new: "Integrations::SlackSlashCommands", category: 'chat', project_id: 1 + ) integrations.create!(id: 3, active: true, type_new: "Integrations::Confluence", category: 'common', project_id: 1) integrations.create!(id: 5, active: true, type_new: "Integrations::Shimo", category: 'common', project_id: 1) end diff --git a/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb b/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb index e9bca42f37f..ca2ee6d8aba 100644 --- a/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb +++ b/spec/migrations/20220329175119_remove_leftover_ci_job_artifact_deletions_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require_migration! -RSpec.describe RemoveLeftoverCiJobArtifactDeletions, feature_category: :pods do +RSpec.describe RemoveLeftoverCiJobArtifactDeletions, feature_category: :cell do let(:deleted_records) { table(:loose_foreign_keys_deleted_records) } target_table_name = Ci::JobArtifact.table_name diff --git a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb index 3a6a8f5dbe5..16258eeb0fb 100644 --- a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb +++ b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb @@ -28,19 +28,19 @@ RSpec.describe FixAutomaticIterationsCadencesStartDate, feature_category: :team_ before do sprints.create!(id: 2, start_date: jan2022, due_date: jan2022 + 1.week, iterations_cadence_id: cadence1.id, - group_id: group1.id, iid: 1) + group_id: group1.id, iid: 1) sprints.create!(id: 1, start_date: dec2022, due_date: dec2022 + 1.week, iterations_cadence_id: cadence1.id, - group_id: group1.id, iid: 2) + group_id: group1.id, iid: 2) sprints.create!(id: 4, start_date: feb2022, due_date: feb2022 + 1.week, iterations_cadence_id: cadence3.id, - group_id: group2.id, iid: 1) + group_id: group2.id, iid: 1) sprints.create!(id: 3, start_date: may2022, due_date: may2022 + 1.week, iterations_cadence_id: cadence3.id, - group_id: group2.id, iid: 2) + group_id: group2.id, iid: 2) sprints.create!(id: 5, start_date: may2022, due_date: may2022 + 1.week, iterations_cadence_id: cadence4.id, - group_id: group2.id, iid: 4) + group_id: group2.id, iid: 4) sprints.create!(id: 6, start_date: feb2022, due_date: feb2022 + 1.week, iterations_cadence_id: cadence4.id, - group_id: group2.id, iid: 3) + group_id: group2.id, iid: 3) end describe '#up' do diff --git a/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb b/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb index 2eff65d5873..c01d982c34e 100644 --- a/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb +++ b/spec/migrations/20220601152916_add_user_id_and_ip_address_success_index_to_authentication_events_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe AddUserIdAndIpAddressSuccessIndexToAuthenticationEvents, -feature_category: :system_access do + feature_category: :system_access do let(:db) { described_class.new } let(:old_index) { described_class::OLD_INDEX_NAME } let(:new_index) { described_class::NEW_INDEX_NAME } diff --git a/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb index b74e15d804f..4ae40933541 100644 --- a/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb +++ b/spec/migrations/20220606082910_add_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb @@ -5,7 +5,7 @@ require "spec_helper" require_migration! RSpec.describe AddTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences, -feature_category: :vulnerability_management do + feature_category: :vulnerability_management do let(:async_index) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex } let(:index_name) { described_class::INDEX_NAME } diff --git a/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb index 8d3ef9a46d7..d4a800eb1db 100644 --- a/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb +++ b/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb @@ -5,7 +5,7 @@ require "spec_helper" require_migration! RSpec.describe AddSyncTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences, -feature_category: :vulnerability_management do + feature_category: :vulnerability_management do let(:table) { "vulnerability_occurrences" } let(:index) { described_class::INDEX_NAME } diff --git a/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb b/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb index 55cabc21997..fb1a4782f3b 100644 --- a/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb +++ b/spec/migrations/20220628012902_finalise_project_namespace_members_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinaliseProjectNamespaceMembers, :migration, feature_category: :s shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('BackfillProjectMemberNamespaceId', :members, :id, []) + expect(runner).to receive(:finalize).with(migration, :members, :id, []) end end end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinaliseProjectNamespaceMembers, :migration, feature_category: :s context 'with migration present' do let!(:project_member_namespace_id_backfill) do batched_migrations.create!( - job_class_name: 'BackfillProjectMemberNamespaceId', + job_class_name: migration, table_name: :members, column_name: :id, job_arguments: [], diff --git a/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb b/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb index f8f1565fe4c..a9f0bdc8487 100644 --- a/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb +++ b/spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' require_migration! -RSpec.describe ScheduleDisableLegacyOpenSourceLicenceForRecentPublicProjects, schema: 20220801155858, - feature_category: :projects do +RSpec.describe ScheduleDisableLegacyOpenSourceLicenceForRecentPublicProjects, + schema: 20220801155858, feature_category: :projects do context 'when on gitlab.com' do let(:background_migration) { described_class::MIGRATION } let(:migration) { described_class.new } diff --git a/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb b/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb index 36c65612bb9..b731a8c8c18 100644 --- a/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb +++ b/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require_migration! -RSpec.describe RemoveDeactivatedUserHighestRoleStats, feature_category: :subscription_cost_management do +RSpec.describe RemoveDeactivatedUserHighestRoleStats, feature_category: :seat_cost_management do let!(:users) { table(:users) } let!(:user_highest_roles) { table(:user_highest_roles) } diff --git a/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb b/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb index 25b2b5c2e18..0807f5d4e38 100644 --- a/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb +++ b/spec/migrations/20220816163444_update_start_date_for_iterations_cadences_spec.rb @@ -31,31 +31,31 @@ RSpec.describe UpdateStartDateForIterationsCadences, :freeze_time, feature_categ before do # Past iteratioin sprints.create!(id: 1, iid: 1, **cadence_params(auto_cadence1), - start_date: Date.current - 1.week, due_date: Date.current - 1.day) + start_date: Date.current - 1.week, due_date: Date.current - 1.day) # Current iteraition sprints.create!(id: 3, iid: 5, **cadence_params(auto_cadence1), - start_date: Date.current, due_date: Date.current + 1.week) + start_date: Date.current, due_date: Date.current + 1.week) # First upcoming iteration sprints.create!(id: 4, iid: 8, **cadence_params(auto_cadence1), - start_date: first_upcoming_start_date, due_date: first_upcoming_start_date + 1.week) + start_date: first_upcoming_start_date, due_date: first_upcoming_start_date + 1.week) # Second upcoming iteration sprints.create!(id: 5, iid: 9, **cadence_params(auto_cadence1), - start_date: first_upcoming_start_date + 2.weeks, due_date: first_upcoming_start_date + 3.weeks) + start_date: first_upcoming_start_date + 2.weeks, due_date: first_upcoming_start_date + 3.weeks) sprints.create!(id: 6, iid: 1, **cadence_params(manual_cadence2), - start_date: Date.current, due_date: Date.current + 1.week) + start_date: Date.current, due_date: Date.current + 1.week) sprints.create!(id: 7, iid: 5, **cadence_params(manual_cadence2), - start_date: Date.current + 2.weeks, due_date: Date.current + 3.weeks) + start_date: Date.current + 2.weeks, due_date: Date.current + 3.weeks) end describe '#up' do it "updates the start date of an automatic cadence to the start date of its first upcoming sprint record." do expect { migration.up } - .to change { auto_cadence1.reload.start_date }.to(first_upcoming_start_date) - .and not_change { auto_cadence2.reload.start_date } # the cadence doesn't have any upcoming iteration. - .and not_change { auto_cadence3.reload.start_date } # the cadence is empty; it has no iterations. - .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched. - .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched. + .to change { auto_cadence1.reload.start_date }.to(first_upcoming_start_date) + .and not_change { auto_cadence2.reload.start_date } # the cadence doesn't have any upcoming iteration. + .and not_change { auto_cadence3.reload.start_date } # the cadence is empty; it has no iterations. + .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched. + .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched. end end @@ -64,10 +64,10 @@ RSpec.describe UpdateStartDateForIterationsCadences, :freeze_time, feature_categ migration.up expect { migration.down } - .to change { auto_cadence1.reload.start_date }.to(original_cadence_start_date) - .and not_change { auto_cadence2.reload.start_date } # the cadence is empty; it has no iterations. - .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched. - .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched. + .to change { auto_cadence1.reload.start_date }.to(original_cadence_start_date) + .and not_change { auto_cadence2.reload.start_date } # the cadence is empty; it has no iterations. + .and not_change { manual_cadence1.reload.start_date } # manual cadence don't need to be touched. + .and not_change { manual_cadence2.reload.start_date } # manual cadence don't need to be touched. end end end diff --git a/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb b/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb index 5a61f49485c..1d18862c8ee 100644 --- a/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb +++ b/spec/migrations/20220819153725_add_vulnerability_advisory_foreign_key_to_sbom_vulnerable_component_versions_spec.rb @@ -5,7 +5,7 @@ require "spec_helper" require_migration! RSpec.describe AddVulnerabilityAdvisoryForeignKeyToSbomVulnerableComponentVersions, -feature_category: :dependency_management do + feature_category: :dependency_management do let(:table) { described_class::SOURCE_TABLE } let(:column) { described_class::COLUMN } let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } } diff --git a/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb b/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb index 999c833f9e3..a280795380d 100644 --- a/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb +++ b/spec/migrations/20220819162852_add_sbom_component_version_foreign_key_to_sbom_vulnerable_component_versions_spec.rb @@ -5,7 +5,7 @@ require "spec_helper" require_migration! RSpec.describe AddSbomComponentVersionForeignKeyToSbomVulnerableComponentVersions, -feature_category: :dependency_management do + feature_category: :dependency_management do let(:table) { described_class::SOURCE_TABLE } let(:column) { described_class::COLUMN } let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } } diff --git a/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb b/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb index 7b0df403e30..5cfcb2eb3dd 100644 --- a/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb +++ b/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require_migration! RSpec.describe RemoveOrphanGroupTokenUsers, :migration, :sidekiq_inline, -feature_category: :system_access do + feature_category: :system_access do subject(:migration) { described_class.new } let(:users) { table(:users) } @@ -18,12 +18,14 @@ feature_category: :system_access do let!(:valid_used_bot) do create_bot(username: 'used_bot', email: 'used_bot@bot.com').tap do |bot| group = namespaces.create!(type: 'Group', path: 'used_bot_group', name: 'used_bot_group') - members.create!(user_id: bot.id, - source_id: group.id, - member_namespace_id: group.id, - source_type: 'Group', - access_level: 10, - notification_level: 0) + members.create!( + user_id: bot.id, + source_id: group.id, + member_namespace_id: group.id, + source_type: 'Group', + access_level: 10, + notification_level: 0 + ) end end diff --git a/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb b/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb index 5c1b5c8f2a7..085e9726663 100644 --- a/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb +++ b/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' require_migration! -RSpec.describe ScheduleUpdateCiPipelineArtifactsLockedStatus, migration: :gitlab_ci, - feature_category: :build_artifacts do +RSpec.describe ScheduleUpdateCiPipelineArtifactsLockedStatus, + migration: :gitlab_ci, feature_category: :build_artifacts do let!(:migration) { described_class::MIGRATION } describe '#up' do diff --git a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb b/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb index 4ff16111417..632b23a8384 100644 --- a/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb +++ b/spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration, feature_cate shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('BackfillMemberNamespaceForGroupMembers', :members, :id, []) + expect(runner).to receive(:finalize).with(migration, :members, :id, []) end end end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration, feature_cate context 'with migration present' do let!(:group_member_namespace_id_backfill) do batched_migrations.create!( - job_class_name: 'BackfillMemberNamespaceForGroupMembers', + job_class_name: migration, table_name: :members, column_name: :id, job_arguments: [], diff --git a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb b/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb index 6284608becb..d591b370d65 100644 --- a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb +++ b/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb @@ -28,10 +28,12 @@ RSpec.describe AddObjectiveAndKeyresultToWorkItemTypes, :migration, feature_cate it 'skips creating both objective & keyresult type record if it already exists' do reset_db_state_prior_to_migration - work_item_types.find_or_create_by!(name: 'Key Result', namespace_id: nil, base_type: base_types[:key_result], - icon_name: 'issue-type-keyresult') - work_item_types.find_or_create_by!(name: 'Objective', namespace_id: nil, base_type: base_types[:objective], - icon_name: 'issue-type-objective') + work_item_types.find_or_create_by!( + name: 'Key Result', namespace_id: nil, base_type: base_types[:key_result], icon_name: 'issue-type-keyresult' + ) + work_item_types.find_or_create_by!( + name: 'Objective', namespace_id: nil, base_type: base_types[:objective], icon_name: 'issue-type-objective' + ) expect do migrate! @@ -52,15 +54,20 @@ RSpec.describe AddObjectiveAndKeyresultToWorkItemTypes, :migration, feature_cate def reset_db_state_prior_to_migration # Database needs to be in a similar state as when this migration was created work_item_types.delete_all - work_item_types.find_or_create_by!(name: 'Issue', namespace_id: nil, base_type: base_types[:issue], - icon_name: 'issue-type-issue') - work_item_types.find_or_create_by!(name: 'Incident', namespace_id: nil, base_type: base_types[:incident], - icon_name: 'issue-type-incident') - work_item_types.find_or_create_by!(name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], - icon_name: 'issue-type-test-case') - work_item_types.find_or_create_by!(name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], - icon_name: 'issue-type-requirements') - work_item_types.find_or_create_by!(name: 'Task', namespace_id: nil, base_type: base_types[:task], - icon_name: 'issue-type-task') + work_item_types.find_or_create_by!( + name: 'Issue', namespace_id: nil, base_type: base_types[:issue], icon_name: 'issue-type-issue' + ) + work_item_types.find_or_create_by!( + name: 'Incident', namespace_id: nil, base_type: base_types[:incident], icon_name: 'issue-type-incident' + ) + work_item_types.find_or_create_by!( + name: 'Test Case', namespace_id: nil, base_type: base_types[:test_case], icon_name: 'issue-type-test-case' + ) + work_item_types.find_or_create_by!( + name: 'Requirement', namespace_id: nil, base_type: base_types[:requirement], icon_name: 'issue-type-requirements' + ) + work_item_types.find_or_create_by!( + name: 'Task', namespace_id: nil, base_type: base_types[:task], icon_name: 'issue-type-task' + ) end end diff --git a/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb b/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb index 8b599881359..da1df92691e 100644 --- a/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb +++ b/spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb @@ -25,6 +25,10 @@ RSpec.describe EnsureTaskNoteRenamingBackgroundMigrationFinished, :migration, fe end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -36,7 +40,7 @@ RSpec.describe EnsureTaskNoteRenamingBackgroundMigrationFinished, :migration, fe context 'with migration present' do let!(:task_renaming_migration) do batched_migrations.create!( - job_class_name: 'RenameTaskSystemNoteToChecklistItem', + job_class_name: migration, table_name: :system_note_metadata, column_name: :id, job_arguments: [], diff --git a/spec/migrations/20221102231130_finalize_backfill_user_details_fields_spec.rb b/spec/migrations/20221102231130_finalize_backfill_user_details_fields_spec.rb index 37bff128edd..da2f4364e5c 100644 --- a/spec/migrations/20221102231130_finalize_backfill_user_details_fields_spec.rb +++ b/spec/migrations/20221102231130_finalize_backfill_user_details_fields_spec.rb @@ -26,6 +26,10 @@ RSpec.describe FinalizeBackfillUserDetailsFields, :migration, feature_category: end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) diff --git a/spec/migrations/20221104115712_backfill_project_statistics_storage_size_without_uploads_size_spec.rb b/spec/migrations/20221104115712_backfill_project_statistics_storage_size_without_uploads_size_spec.rb index d86720365c4..9658b5a699a 100644 --- a/spec/migrations/20221104115712_backfill_project_statistics_storage_size_without_uploads_size_spec.rb +++ b/spec/migrations/20221104115712_backfill_project_statistics_storage_size_without_uploads_size_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe BackfillProjectStatisticsStorageSizeWithoutUploadsSize, - feature_category: :subscription_cost_management do + feature_category: :consumables_cost_management do let!(:batched_migration) { described_class::MIGRATION_CLASS } it 'does not schedule background jobs when Gitlab.org_or_com? is false' do diff --git a/spec/migrations/20221115173607_ensure_work_item_type_backfill_migration_finished_spec.rb b/spec/migrations/20221115173607_ensure_work_item_type_backfill_migration_finished_spec.rb index e9250625832..d560da40c21 100644 --- a/spec/migrations/20221115173607_ensure_work_item_type_backfill_migration_finished_spec.rb +++ b/spec/migrations/20221115173607_ensure_work_item_type_backfill_migration_finished_spec.rb @@ -13,6 +13,10 @@ RSpec.describe EnsureWorkItemTypeBackfillMigrationFinished, :migration, feature_ describe '#up', :redis do context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration_class).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) diff --git a/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb b/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb index d7aa53ec35b..7cc0bd96a0d 100644 --- a/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb +++ b/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb @@ -10,21 +10,27 @@ RSpec.describe ScheduleBackfillReleasesAuthorId, feature_category: :release_orch let(:date_time) { DateTime.now } let!(:batched_migration) { described_class::MIGRATION } let!(:test_user) do - user_table.create!(name: 'test', - email: 'test@example.com', - username: 'test', - projects_limit: 10) + user_table.create!( + name: 'test', + email: 'test@example.com', + username: 'test', + projects_limit: 10 + ) end before do - releases_table.create!(tag: 'tag1', name: 'tag1', - released_at: (date_time - 1.minute), author_id: test_user.id) - releases_table.create!(tag: 'tag2', name: 'tag2', - released_at: (date_time - 2.minutes), author_id: test_user.id) - releases_table.new(tag: 'tag3', name: 'tag3', - released_at: (date_time - 3.minutes), author_id: nil).save!(validate: false) - releases_table.new(tag: 'tag4', name: 'tag4', - released_at: (date_time - 4.minutes), author_id: nil).save!(validate: false) + releases_table.create!( + tag: 'tag1', name: 'tag1', released_at: (date_time - 1.minute), author_id: test_user.id + ) + releases_table.create!( + tag: 'tag2', name: 'tag2', released_at: (date_time - 2.minutes), author_id: test_user.id + ) + releases_table.new( + tag: 'tag3', name: 'tag3', released_at: (date_time - 3.minutes), author_id: nil + ).save!(validate: false) + releases_table.new( + tag: 'tag4', name: 'tag4', released_at: (date_time - 4.minutes), author_id: nil + ).save!(validate: false) end it 'schedules a new batched migration' do diff --git a/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb b/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb index 6f9cfe4764a..440a932c76b 100644 --- a/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb +++ b/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe RemoveTempIndexForProjectStatisticsUploadSizeMigration, -feature_category: :subscription_cost_management do + feature_category: :consumables_cost_management do let(:table_name) { 'project_statistics' } let(:index_name) { described_class::INDEX_NAME } diff --git a/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb b/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb index aa82ca2661b..70c9c1333b8 100644 --- a/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb +++ b/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require_migration! RSpec.describe SyncNewAmountUsedWithAmountUsedOnCiNamespaceMonthlyUsagesTable, migration: :gitlab_ci, -feature_category: :continuous_integration do + feature_category: :continuous_integration do let(:namespace_usages) { table(:ci_namespace_monthly_usages) } let(:migration) { described_class.new } diff --git a/spec/migrations/20230201171450_finalize_backfill_environment_tier_migration_spec.rb b/spec/migrations/20230201171450_finalize_backfill_environment_tier_migration_spec.rb index 3fc9c7d8af7..e7a78f11f16 100644 --- a/spec/migrations/20230201171450_finalize_backfill_environment_tier_migration_spec.rb +++ b/spec/migrations/20230201171450_finalize_backfill_environment_tier_migration_spec.rb @@ -18,6 +18,10 @@ RSpec.describe FinalizeBackfillEnvironmentTierMigration, :migration, feature_cat end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeBackfillEnvironmentTierMigration, :migration, feature_cat context 'with migration present' do let!(:group_member_namespace_id_backfill) do batched_migrations.create!( - job_class_name: 'BackfillEnvironmentTiers', + job_class_name: migration, table_name: :environments, column_name: :id, job_arguments: [], diff --git a/spec/migrations/20230202131928_encrypt_ci_trigger_token_spec.rb b/spec/migrations/20230202131928_encrypt_ci_trigger_token_spec.rb index a8896e7d3cf..597cd7c1581 100644 --- a/spec/migrations/20230202131928_encrypt_ci_trigger_token_spec.rb +++ b/spec/migrations/20230202131928_encrypt_ci_trigger_token_spec.rb @@ -9,14 +9,6 @@ RSpec.describe EncryptCiTriggerToken, migration: :gitlab_ci, feature_category: : let!(:migration) { described_class::MIGRATION } describe '#up' do - shared_examples 'finalizes the migration' do - it 'finalizes the migration' do - allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('EncryptCiTriggerToken', :ci_triggers, :id, []) - end - end - end - context 'with migration present' do let!(:ci_trigger_token_encryption_migration) do batched_migrations.create!( @@ -51,25 +43,6 @@ RSpec.describe EncryptCiTriggerToken, migration: :gitlab_ci, feature_category: : ) end end - - context 'with different migration statuses' do - using RSpec::Parameterized::TableSyntax - - where(:status, :description) do - 0 | 'paused' - 1 | 'active' - 4 | 'failed' - 5 | 'finalizing' - end - - with_them do - before do - ci_trigger_token_encryption_migration.update!(status: status) - end - - it_behaves_like 'finalizes the migration' - end - end end end diff --git a/spec/migrations/20230208125736_schedule_migration_for_links_spec.rb b/spec/migrations/20230208125736_schedule_migration_for_links_spec.rb index dd1c30415a4..035f13b8309 100644 --- a/spec/migrations/20230208125736_schedule_migration_for_links_spec.rb +++ b/spec/migrations/20230208125736_schedule_migration_for_links_spec.rb @@ -10,13 +10,7 @@ RSpec.describe ScheduleMigrationForLinks, :migration, feature_category: :vulnera it 'schedules a batched background migration' do migrate! - expect(migration).to have_scheduled_batched_migration( - table_name: :vulnerability_occurrences, - column_name: :id, - interval: described_class::DELAY_INTERVAL, - batch_size: described_class::BATCH_SIZE, - sub_batch_size: described_class::SUB_BATCH_SIZE - ) + expect(migration).not_to have_scheduled_batched_migration end end diff --git a/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb b/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb new file mode 100644 index 00000000000..13ae12b2774 --- /dev/null +++ b/spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe ScheduleRemoveProjectGroupLinkWithMissingGroups, feature_category: :subgroups do + let!(:migration) { described_class::MIGRATION } + + describe '#up' do + it 'schedules background migration' do + migrate! + + expect(migration).to have_scheduled_batched_migration( + table_name: :project_group_links, + column_name: :id, + interval: described_class::DELAY_INTERVAL, + batch_size: described_class::BATCH_SIZE, + max_batch_size: described_class::MAX_BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) + end + end + + describe '#down' do + it 'removes scheduled background migrations' do + migrate! + schema_migrate_down! + + expect(migration).not_to have_scheduled_batched_migration + end + end +end diff --git a/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb b/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb index 9163c30fe30..e4adf3ca540 100644 --- a/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb +++ b/spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb @@ -26,6 +26,10 @@ RSpec.describe FinalizeNullifyCreatorIdOfOrphanedProjects, :migration, feature_c end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -37,7 +41,7 @@ RSpec.describe FinalizeNullifyCreatorIdOfOrphanedProjects, :migration, feature_c context 'with migration present' do let!(:migration_record) do batched_migrations.create!( - job_class_name: 'NullifyCreatorIdColumnOfOrphanedProjects', + job_class_name: migration, table_name: :projects, column_name: :id, job_arguments: [], diff --git a/spec/migrations/20230224144233_migrate_evidences_from_raw_metadata_spec.rb b/spec/migrations/20230224144233_migrate_evidences_from_raw_metadata_spec.rb index 9b38557c8c3..6610f70be2b 100644 --- a/spec/migrations/20230224144233_migrate_evidences_from_raw_metadata_spec.rb +++ b/spec/migrations/20230224144233_migrate_evidences_from_raw_metadata_spec.rb @@ -10,13 +10,7 @@ RSpec.describe MigrateEvidencesFromRawMetadata, :migration, feature_category: :v it 'schedules a batched background migration' do migrate! - expect(migration).to have_scheduled_batched_migration( - table_name: :vulnerability_occurrences, - column_name: :id, - interval: described_class::DELAY_INTERVAL, - batch_size: described_class::BATCH_SIZE, - sub_batch_size: described_class::SUB_BATCH_SIZE - ) + expect(migration).not_to have_scheduled_batched_migration end end diff --git a/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb b/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb index 065b6d00ddb..7161ca35edd 100644 --- a/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb +++ b/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb @@ -4,24 +4,5 @@ require 'spec_helper' require_migration! RSpec.describe AddNotificationsWorkItemWidget, :migration, feature_category: :team_planning do - let(:migration) { described_class.new } - let(:work_item_definitions) { table(:work_item_widget_definitions) } - - describe '#up' do - it 'creates notifications widget definition in all types' do - work_item_definitions.where(name: 'Notifications').delete_all - - expect { migrate! }.to change { work_item_definitions.count }.by(7) - expect(work_item_definitions.all.pluck(:name)).to include('Notifications') - end - end - - describe '#down' do - it 'removes definitions for notifications widget' do - migrate! - - expect { migration.down }.to change { work_item_definitions.count }.by(-7) - expect(work_item_definitions.all.pluck(:name)).not_to include('Notifications') - end - end + it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Notifications' end diff --git a/spec/migrations/20230306195007_queue_backfill_project_wiki_repositories_spec.rb b/spec/migrations/20230306195007_queue_backfill_project_wiki_repositories_spec.rb deleted file mode 100644 index 07f501a3f98..00000000000 --- a/spec/migrations/20230306195007_queue_backfill_project_wiki_repositories_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe QueueBackfillProjectWikiRepositories, feature_category: :geo_replication do - let!(:batched_migration) { described_class::MIGRATION } - - it 'schedules a new batched migration' do - reversible_migration do |migration| - migration.before -> { - expect(batched_migration).not_to have_scheduled_batched_migration - } - - migration.after -> { - expect(batched_migration).to have_scheduled_batched_migration( - table_name: :projects, - column_name: :id, - interval: described_class::DELAY_INTERVAL, - batch_size: described_class::BATCH_SIZE, - sub_batch_size: described_class::SUB_BATCH_SIZE - ) - } - end - end -end diff --git a/spec/migrations/20230313142631_backfill_ml_candidates_package_id_spec.rb b/spec/migrations/20230313142631_backfill_ml_candidates_package_id_spec.rb new file mode 100644 index 00000000000..57ddb0504ec --- /dev/null +++ b/spec/migrations/20230313142631_backfill_ml_candidates_package_id_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe BackfillMlCandidatesPackageId, feature_category: :mlops do + let(:migration) { described_class.new } + + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + let(:ml_experiments) { table(:ml_experiments) } + let(:ml_candidates) { table(:ml_candidates) } + let(:packages_packages) { table(:packages_packages) } + + let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } + let(:project) { projects.create!(project_namespace_id: namespace.id, namespace_id: namespace.id) } + let(:experiment) { ml_experiments.create!(project_id: project.id, iid: 1, name: 'experiment') } + let!(:candidate1) { ml_candidates.create!(experiment_id: experiment.id, iid: SecureRandom.uuid) } + let!(:candidate2) { ml_candidates.create!(experiment_id: experiment.id, iid: SecureRandom.uuid) } + let!(:package1) do + packages_packages.create!( + project_id: project.id, + name: "ml_candidate_#{candidate1.id}", + version: "-", + package_type: 7 + ) + end + + let!(:package2) do + packages_packages.create!( + project_id: project.id, + name: "ml_candidate_1000", + version: "-", + package_type: 7) + end + + let!(:package3) do + packages_packages.create!( + project_id: project.id, + name: "ml_candidate_abcde", + version: "-", + package_type: 7 + ) + end + + describe '#up' do + it 'sets the correct package_ids with idempotency', :aggregate_failures do + migration.up + + expect(candidate1.reload.package_id).to be(package1.id) + expect(candidate2.reload.package_id).to be(nil) + + migration.down + migration.up + + expect(candidate1.reload.package_id).to be(package1.id) + expect(candidate2.reload.package_id).to be(nil) + end + end +end diff --git a/spec/migrations/20230314144640_reschedule_migration_for_links_spec.rb b/spec/migrations/20230314144640_reschedule_migration_for_links_spec.rb new file mode 100644 index 00000000000..45c00416bcc --- /dev/null +++ b/spec/migrations/20230314144640_reschedule_migration_for_links_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RescheduleMigrationForLinks, :migration, feature_category: :vulnerability_management do + let(:migration) { described_class::MIGRATION } + + describe '#up' do + it 'schedules a batched background migration' do + migrate! + + expect(migration).to have_scheduled_batched_migration( + table_name: :vulnerability_occurrences, + column_name: :id, + interval: described_class::DELAY_INTERVAL, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) + end + end + + describe '#down' do + it 'deletes all batched migration records' do + migrate! + schema_migrate_down! + + expect(migration).not_to have_scheduled_batched_migration + end + end +end diff --git a/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb b/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb new file mode 100644 index 00000000000..1df80a519f2 --- /dev/null +++ b/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddCurrentUserTodosWorkItemWidget, :migration, feature_category: :team_planning do + it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Current user todos' +end diff --git a/spec/migrations/20230321153035_add_package_id_created_at_desc_index_to_package_files_spec.rb b/spec/migrations/20230321153035_add_package_id_created_at_desc_index_to_package_files_spec.rb new file mode 100644 index 00000000000..68f3b1f23a9 --- /dev/null +++ b/spec/migrations/20230321153035_add_package_id_created_at_desc_index_to_package_files_spec.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddPackageIdCreatedAtDescIndexToPackageFiles, feature_category: :package_registry do + it 'correctly migrates up and down' do + reversible_migration do |migration| + migration.before -> { + expect(ActiveRecord::Base.connection.indexes('packages_package_files').map(&:name)) + .not_to include('index_packages_package_files_on_package_id_and_created_at_desc') + } + + migration.after -> { + expect(ActiveRecord::Base.connection.indexes('packages_package_files').map(&:name)) + .to include('index_packages_package_files_on_package_id_and_created_at_desc') + } + end + end +end diff --git a/spec/migrations/20230321163947_backfill_ml_candidates_project_id_spec.rb b/spec/migrations/20230321163947_backfill_ml_candidates_project_id_spec.rb new file mode 100644 index 00000000000..da76794a74c --- /dev/null +++ b/spec/migrations/20230321163947_backfill_ml_candidates_project_id_spec.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe BackfillMlCandidatesProjectId, feature_category: :mlops do + let(:migration) { described_class.new } + + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + let(:ml_experiments) { table(:ml_experiments) } + let(:ml_candidates) { table(:ml_candidates) } + + let(:namespace1) { namespaces.create!(name: 'foo', path: 'foo') } + let(:namespace2) { namespaces.create!(name: 'bar', path: 'bar') } + let(:project1) { projects.create!(project_namespace_id: namespace1.id, namespace_id: namespace1.id) } + let(:project2) { projects.create!(project_namespace_id: namespace2.id, namespace_id: namespace2.id) } + let(:experiment1) { ml_experiments.create!(project_id: project1.id, iid: 1, name: 'experiment') } + let(:experiment2) { ml_experiments.create!(project_id: project2.id, iid: 1, name: 'experiment') } + let!(:candidate1) do + ml_candidates.create!(experiment_id: experiment1.id, project_id: nil, eid: SecureRandom.uuid) + end + + let!(:candidate2) do + ml_candidates.create!(experiment_id: experiment2.id, project_id: nil, eid: SecureRandom.uuid) + end + + let!(:candidate3) do + ml_candidates.create!(experiment_id: experiment1.id, project_id: project1.id, eid: SecureRandom.uuid) + end + + describe '#up' do + it 'sets the correct project_id with idempotency', :aggregate_failures do + migration.up + + expect(candidate1.reload.project_id).to be(project1.id) + expect(candidate2.reload.project_id).to be(project2.id) + # in case we have candidates added between the column addition and the migration + expect(candidate3.reload.project_id).to be(project1.id) + + migration.down + migration.up + + expect(candidate1.reload.project_id).to be(project1.id) + expect(candidate2.reload.project_id).to be(project2.id) + expect(candidate3.reload.project_id).to be(project1.id) + end + end +end diff --git a/spec/migrations/20230321170823_backfill_ml_candidates_internal_id_spec.rb b/spec/migrations/20230321170823_backfill_ml_candidates_internal_id_spec.rb new file mode 100644 index 00000000000..c8f7b19490a --- /dev/null +++ b/spec/migrations/20230321170823_backfill_ml_candidates_internal_id_spec.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe BackfillMlCandidatesInternalId, feature_category: :mlops do + let(:migration) { described_class.new } + + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + let(:ml_experiments) { table(:ml_experiments) } + let(:ml_candidates) { table(:ml_candidates) } + + let(:namespace1) { namespaces.create!(name: 'foo', path: 'foo') } + let(:namespace2) { namespaces.create!(name: 'bar', path: 'bar') } + let(:project1) { projects.create!(project_namespace_id: namespace1.id, namespace_id: namespace1.id) } + let(:project2) { projects.create!(project_namespace_id: namespace2.id, namespace_id: namespace2.id) } + let(:experiment1) { ml_experiments.create!(project_id: project1.id, iid: 1, name: 'experiment1') } + let(:experiment2) { ml_experiments.create!(project_id: project1.id, iid: 2, name: 'experiment2') } + let(:experiment3) { ml_experiments.create!(project_id: project2.id, iid: 1, name: 'experiment3') } + + let!(:candidate1) do + ml_candidates.create!(experiment_id: experiment1.id, project_id: project1.id, eid: SecureRandom.uuid) + end + + let!(:candidate2) do + ml_candidates.create!(experiment_id: experiment2.id, project_id: project1.id, eid: SecureRandom.uuid) + end + + let!(:candidate3) do + ml_candidates.create!(experiment_id: experiment1.id, project_id: project1.id, eid: SecureRandom.uuid) + end + + let!(:candidate4) do + ml_candidates.create!(experiment_id: experiment1.id, project_id: project1.id, internal_id: 1, + eid: SecureRandom.uuid) + end + + let!(:candidate5) do + ml_candidates.create!(experiment_id: experiment3.id, project_id: project2.id, eid: SecureRandom.uuid) + end + + describe '#up' do + it 'sets the correct project_id with idempotency', :aggregate_failures do + migration.up + + expect(candidate4.reload.internal_id).to be(1) # candidate 4 already has an internal_id + expect(candidate1.reload.internal_id).to be(2) + expect(candidate2.reload.internal_id).to be(3) + expect(candidate3.reload.internal_id).to be(4) + expect(candidate5.reload.internal_id).to be(1) # candidate 5 is a different project + + migration.down + migration.up + + expect(candidate4.reload.internal_id).to be(1) + expect(candidate1.reload.internal_id).to be(2) + expect(candidate2.reload.internal_id).to be(3) + expect(candidate3.reload.internal_id).to be(4) + expect(candidate5.reload.internal_id).to be(1) + end + end +end diff --git a/spec/migrations/20230322085041_remove_user_namespace_records_from_vsa_aggregation_spec.rb b/spec/migrations/20230322085041_remove_user_namespace_records_from_vsa_aggregation_spec.rb new file mode 100644 index 00000000000..e5f64ef2e70 --- /dev/null +++ b/spec/migrations/20230322085041_remove_user_namespace_records_from_vsa_aggregation_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RemoveUserNamespaceRecordsFromVsaAggregation, + migration: :gitlab_main, + feature_category: :value_stream_management do + let(:migration) { described_class::MIGRATION } + let!(:namespaces) { table(:namespaces) } + let!(:aggregations) { table(:analytics_cycle_analytics_aggregations) } + + let!(:group) { namespaces.create!(name: 'aaa', path: 'aaa', type: 'Group') } + let!(:user_namespace) { namespaces.create!(name: 'ccc', path: 'ccc', type: 'User') } + let!(:project_namespace) { namespaces.create!(name: 'bbb', path: 'bbb', type: 'Project') } + + let!(:group_aggregation) { aggregations.create!(group_id: group.id) } + let!(:user_namespace_aggregation) { aggregations.create!(group_id: user_namespace.id) } + let!(:project_namespace_aggregation) { aggregations.create!(group_id: project_namespace.id) } + + describe '#up' do + it 'deletes the non-group namespace aggregation records' do + stub_const('RemoveUserNamespaceRecordsFromVsaAggregation::BATCH_SIZE', 1) + + expect { migrate! }.to change { + aggregations.order(:group_id) + }.from([group_aggregation, user_namespace_aggregation, + project_namespace_aggregation]).to([group_aggregation]) + end + end + + describe '#down' do + it 'does nothing' do + migrate! + + expect { schema_migrate_down! }.not_to change { + aggregations.order(:group_id).pluck(:group_id) + }.from([group_aggregation.id]) + end + end +end diff --git a/spec/migrations/20230322145403_add_project_id_foreign_key_to_packages_npm_metadata_caches_spec.rb b/spec/migrations/20230322145403_add_project_id_foreign_key_to_packages_npm_metadata_caches_spec.rb new file mode 100644 index 00000000000..647c583aa39 --- /dev/null +++ b/spec/migrations/20230322145403_add_project_id_foreign_key_to_packages_npm_metadata_caches_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe AddProjectIdForeignKeyToPackagesNpmMetadataCaches, + feature_category: :package_registry do + let(:table) { described_class::SOURCE_TABLE } + let(:column) { described_class::COLUMN } + let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } } + + it 'creates and drops the foreign key' do + reversible_migration do |migration| + migration.before -> do + expect(foreign_key.call).to be(nil) + end + + migration.after -> do + expect(foreign_key.call).to have_attributes(column: column.to_s) + end + end + end +end diff --git a/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb b/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb new file mode 100644 index 00000000000..16a205c5da5 --- /dev/null +++ b/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddAwardEmojiWorkItemWidget, :migration, feature_category: :team_planning do + it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Award emoji' +end diff --git a/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb b/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb new file mode 100644 index 00000000000..253512c9194 --- /dev/null +++ b/spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +require "spec_helper" +require_migration! + +RSpec.describe BackfillProductAnalyticsDataCollectorHost, feature_category: :product_analytics do + let!(:application_settings) { table(:application_settings) } + + describe '#up' do + before do + create_application_settings!(id: 1, jitsu_host: "https://configurator.testing.my-product-analytics.com", + product_analytics_data_collector_host: nil) + create_application_settings!(id: 2, jitsu_host: "https://config-urator_1.testing.my-product-analytics.com", + product_analytics_data_collector_host: nil) + create_application_settings!(id: 3, jitsu_host: "https://configurator.testing.my-product-analytics.com", + product_analytics_data_collector_host: "https://existingcollector.my-product-analytics.com") + create_application_settings!(id: 4, jitsu_host: nil, product_analytics_data_collector_host: nil) + migrate! + end + + describe 'when jitsu host is present' do + it 'backfills missing product_analytics_data_collector_host' do + expect(application_settings.find(1).product_analytics_data_collector_host).to eq("https://collector.testing.my-product-analytics.com") + expect(application_settings.find(2).product_analytics_data_collector_host).to eq("https://collector.testing.my-product-analytics.com") + end + + it 'does not modify existing product_analytics_data_collector_host' do + expect(application_settings.find(3).product_analytics_data_collector_host).to eq("https://existingcollector.my-product-analytics.com") + end + end + + describe 'when jitsu host is not present' do + it 'does not backfill product_analytics_data_collector_host' do + expect(application_settings.find(4).product_analytics_data_collector_host).to be_nil + end + end + end + + def create_application_settings!(id:, jitsu_host:, product_analytics_data_collector_host:) + params = { + id: id, + jitsu_host: jitsu_host, + product_analytics_data_collector_host: product_analytics_data_collector_host + } + application_settings.create!(params) + end +end diff --git a/spec/migrations/20230328100534_truncate_error_tracking_tables_spec.rb b/spec/migrations/20230328100534_truncate_error_tracking_tables_spec.rb new file mode 100644 index 00000000000..efbbe22fd1b --- /dev/null +++ b/spec/migrations/20230328100534_truncate_error_tracking_tables_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe TruncateErrorTrackingTables, :migration, feature_category: :redis do + let(:migration) { described_class.new } + + context 'when on GitLab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(true) + end + + context 'when using Main db' do + it 'truncates the table' do + expect(described_class.connection).to receive(:execute).with('TRUNCATE table error_tracking_errors CASCADE') + + migration.up + end + end + + context 'when uses CI db', migration: :gitlab_ci do + before do + skip_if_multiple_databases_not_setup(:ci) + end + + it 'does not truncate the table' do + expect(described_class.connection).not_to receive(:execute).with('TRUNCATE table error_tracking_errors CASCADE') + + migration.up + end + end + end + + context 'when on self-managed' do + before do + allow(Gitlab).to receive(:com?).and_return(false) + end + + context 'when using Main db' do + it 'does not truncate the table' do + expect(described_class.connection).not_to receive(:execute).with('TRUNCATE table error_tracking_errors CASCADE') + + migration.up + end + end + + context 'when uses CI db', migration: :gitlab_ci do + it 'does not truncate the table' do + expect(described_class.connection).not_to receive(:execute).with('TRUNCATE table error_tracking_errors CASCADE') + + migration.up + end + end + end +end diff --git a/spec/migrations/20230329100222_drop_software_licenses_temp_index_spec.rb b/spec/migrations/20230329100222_drop_software_licenses_temp_index_spec.rb new file mode 100644 index 00000000000..d4d276980f8 --- /dev/null +++ b/spec/migrations/20230329100222_drop_software_licenses_temp_index_spec.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe DropSoftwareLicensesTempIndex, feature_category: :security_policy_management do + it 'correctly migrates up and down' do + reversible_migration do |migration| + migration.before -> { + expect(ActiveRecord::Base.connection.indexes('software_licenses').map(&:name)) + .to include(described_class::INDEX_NAME) + } + + migration.after -> { + expect(ActiveRecord::Base.connection.indexes('software_licenses').map(&:name)) + .not_to include(described_class::INDEX_NAME) + } + end + end +end diff --git a/spec/migrations/20230330103104_reschedule_migrate_evidences_spec.rb b/spec/migrations/20230330103104_reschedule_migrate_evidences_spec.rb new file mode 100644 index 00000000000..4f490ec90cb --- /dev/null +++ b/spec/migrations/20230330103104_reschedule_migrate_evidences_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RescheduleMigrateEvidences, :migration, feature_category: :vulnerability_management do + let(:migration) { described_class::MIGRATION } + + describe '#up' do + it 'schedules a batched background migration' do + migrate! + + expect(migration).to have_scheduled_batched_migration( + table_name: :vulnerability_occurrences, + column_name: :id, + interval: described_class::DELAY_INTERVAL, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) + end + end + + describe '#down' do + it 'deletes all batched migration records' do + migrate! + schema_migrate_down! + + expect(migration).not_to have_scheduled_batched_migration + end + end +end diff --git a/spec/migrations/20230403085957_add_tmp_partial_index_on_vulnerability_report_types2_spec.rb b/spec/migrations/20230403085957_add_tmp_partial_index_on_vulnerability_report_types2_spec.rb new file mode 100644 index 00000000000..5203e772d15 --- /dev/null +++ b/spec/migrations/20230403085957_add_tmp_partial_index_on_vulnerability_report_types2_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require "spec_helper" + +require_migration! + +RSpec.describe AddTmpPartialIndexOnVulnerabilityReportTypes2, feature_category: :vulnerability_management do + let(:async_index) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex } + let(:index_name) { described_class::INDEX_NAME } + + before do + allow_any_instance_of(ActiveRecord::ConnectionAdapters::SchemaStatements) # rubocop:disable RSpec/AnyInstanceOf + .to receive(:index_exists?) + .with("vulnerability_occurrences", :id, hash_including(name: index_name)) + .and_return(index_exists) + end + + context "with index absent" do + let(:index_exists) { false } + + it "schedules the index" do + reversible_migration do |migration| + migration.before -> do + expect(async_index.where(name: index_name).count).to be(0) + end + + migration.after -> do + expect(async_index.where(name: index_name).count).to be(1) + end + end + end + end + + context "with index present" do + let(:index_exists) { true } + + it "does not schedule the index" do + reversible_migration do |migration| + migration.before -> do + expect(async_index.where(name: index_name).count).to be(0) + end + + migration.after -> do + expect(async_index.where(name: index_name).count).to be(0) + end + end + end + end +end diff --git a/spec/migrations/20230405200858_requeue_backfill_project_wiki_repositories_spec.rb b/spec/migrations/20230405200858_requeue_backfill_project_wiki_repositories_spec.rb new file mode 100644 index 00000000000..cf42818152f --- /dev/null +++ b/spec/migrations/20230405200858_requeue_backfill_project_wiki_repositories_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RequeueBackfillProjectWikiRepositories, feature_category: :geo_replication do + let!(:batched_migration) { described_class::MIGRATION } + + it 'schedules a new batched migration' do + reversible_migration do |migration| + migration.before -> { + expect(batched_migration).not_to have_scheduled_batched_migration + } + + migration.after -> { + expect(batched_migration).to have_scheduled_batched_migration( + table_name: :projects, + column_name: :id, + interval: described_class::DELAY_INTERVAL, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE + ) + } + end + end +end diff --git a/spec/migrations/20230411153310_cleanup_bigint_conversion_for_sent_notifications_spec.rb b/spec/migrations/20230411153310_cleanup_bigint_conversion_for_sent_notifications_spec.rb new file mode 100644 index 00000000000..5780aa365da --- /dev/null +++ b/spec/migrations/20230411153310_cleanup_bigint_conversion_for_sent_notifications_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration!('cleanup_bigint_conversion_for_sent_notifications') + +RSpec.describe CleanupBigintConversionForSentNotifications, feature_category: :database do + let(:sent_notifications) { table(:sent_notifications) } + + it 'correctly migrates up and down' do + reversible_migration do |migration| + migration.before -> { + expect(sent_notifications.column_names).to include('id_convert_to_bigint') + } + + migration.after -> { + sent_notifications.reset_column_information + expect(sent_notifications.column_names).not_to include('id_convert_to_bigint') + } + end + end +end diff --git a/spec/migrations/20230412214119_finalize_encrypt_ci_trigger_token_spec.rb b/spec/migrations/20230412214119_finalize_encrypt_ci_trigger_token_spec.rb new file mode 100644 index 00000000000..c30cafc915d --- /dev/null +++ b/spec/migrations/20230412214119_finalize_encrypt_ci_trigger_token_spec.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe FinalizeEncryptCiTriggerToken, migration: :gitlab_ci, feature_category: :continuous_integration do + let(:batched_migrations) { table(:batched_background_migrations) } + let(:batch_failed_status) { 2 } + let(:batch_finalized_status) { 3 } + + let!(:migration) { described_class::MIGRATION } + + describe '#up' do + context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + + it 'warns migration not found' do + expect(Gitlab::AppLogger) + .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) + + migrate! + end + end + + context 'with migration present' do + let!(:migration_record) do + batched_migrations.create!( + job_class_name: migration, + table_name: :ci_triggers, + column_name: :id, + job_arguments: [], + interval: 2.minutes, + min_value: 1, + max_value: 2, + batch_size: 1000, + sub_batch_size: 100, + max_batch_size: 2000, + gitlab_schema: :gitlab_ci, + status: batch_finalized_status + ) + end + + context 'when migration finished successfully' do + it 'does not raise exception' do + expect { migrate! }.not_to raise_error + end + end + + context 'with different migration statuses', :redis do + using RSpec::Parameterized::TableSyntax + + where(:status, :description) do + 0 | 'paused' + 1 | 'active' + 4 | 'failed' + 5 | 'finalizing' + end + + with_them do + let!(:failed_job) do + table(:batched_background_migration_jobs).create!( + batched_background_migration_id: migration_record.id, + status: batch_failed_status, + min_value: 1, + max_value: 10, + attempts: 2, + batch_size: 100, + sub_batch_size: 10 + ) + end + + before do + migration_record.update!(status: status) + end + + it 'finalizes the migration' do + expect do + migrate! + + migration_record.reload + failed_job.reload + end.to( + change { migration_record.status }.from(status).to(batch_finalized_status) + .and( + change { failed_job.status }.from(batch_failed_status).to(batch_finalized_status) + ) + ) + end + end + end + end + end +end diff --git a/spec/migrations/add_open_source_plan_spec.rb b/spec/migrations/add_open_source_plan_spec.rb deleted file mode 100644 index f5d68f455e6..00000000000 --- a/spec/migrations/add_open_source_plan_spec.rb +++ /dev/null @@ -1,86 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe AddOpenSourcePlan, :migration, feature_category: :purchase do - describe '#up' do - before do - allow(Gitlab).to receive(:com?).and_return true - end - - it 'creates 1 entry within the plans table' do - expect { migrate! }.to change { AddOpenSourcePlan::Plan.count }.by 1 - expect(AddOpenSourcePlan::Plan.last.name).to eql('opensource') - end - - it 'creates 1 entry for plan limits' do - expect { migrate! }.to change { AddOpenSourcePlan::PlanLimits.count }.by 1 - end - - context 'when the plan limits for gold and silver exists' do - before do - table(:plans).create!(id: 1, name: 'ultimate', title: 'Ultimate') - table(:plan_limits).create!(id: 1, plan_id: 1, storage_size_limit: 2000) - end - - it 'duplicates the gold and silvers plan limits entries' do - migrate! - - opensource_limits = AddOpenSourcePlan::Plan.find_by(name: 'opensource').limits - expect(opensource_limits.storage_size_limit).to be 2000 - end - end - - context 'when the instance is not SaaS' do - before do - allow(Gitlab).to receive(:com?).and_return false - end - - it 'does not create plans and plan limits and returns' do - expect { migrate! }.not_to change { AddOpenSourcePlan::Plan.count } - end - end - end - - describe '#down' do - before do - table(:plans).create!(id: 3, name: 'other') - table(:plan_limits).create!(plan_id: 3) - end - - context 'when the instance is SaaS' do - before do - allow(Gitlab).to receive(:com?).and_return true - end - - it 'removes the newly added opensource entry' do - migrate! - - expect { described_class.new.down }.to change { AddOpenSourcePlan::Plan.count }.by(-1) - expect(AddOpenSourcePlan::Plan.find_by(name: 'opensource')).to be_nil - - other_plan = AddOpenSourcePlan::Plan.find_by(name: 'other') - expect(other_plan).to be_persisted - expect(AddOpenSourcePlan::PlanLimits.count).to eq(1) - expect(AddOpenSourcePlan::PlanLimits.first.plan_id).to eq(other_plan.id) - end - end - - context 'when the instance is not SaaS' do - before do - allow(Gitlab).to receive(:com?).and_return false - table(:plans).create!(id: 1, name: 'opensource', title: 'Open Source Program') - table(:plan_limits).create!(id: 1, plan_id: 1) - end - - it 'does not delete plans and plan limits and returns' do - migrate! - - expect { described_class.new.down }.not_to change { AddOpenSourcePlan::Plan.count } - expect(AddOpenSourcePlan::PlanLimits.count).to eq(2) - end - end - end -end diff --git a/spec/migrations/backfill_all_project_namespaces_spec.rb b/spec/migrations/backfill_all_project_namespaces_spec.rb deleted file mode 100644 index 52fa46eea57..00000000000 --- a/spec/migrations/backfill_all_project_namespaces_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillAllProjectNamespaces, :migration, feature_category: :subgroups do - let!(:migration) { described_class::MIGRATION } - - let(:projects) { table(:projects) } - let(:namespaces) { table(:namespaces) } - let(:user_namespace) { namespaces.create!(name: 'user1', path: 'user1', visibility_level: 20, type: 'User') } - let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') } - let!(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) } - let!(:user_namespace_project) { projects.create!(name: 'user1_project', path: 'user1_project', namespace_id: user_namespace.id, visibility_level: 20) } - - describe '#up' do - it 'schedules background jobs for each batch of namespaces' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :projects, - column_name: :id, - job_arguments: [nil, 'up'], - interval: described_class::DELAY_INTERVAL - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb b/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb deleted file mode 100644 index 47950f918c3..00000000000 --- a/spec/migrations/backfill_cycle_analytics_aggregations_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillCycleAnalyticsAggregations, :migration, feature_category: :value_stream_management do - let(:migration) { described_class.new } - - let(:aggregations) { table(:analytics_cycle_analytics_aggregations) } - let(:namespaces) { table(:namespaces) } - let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) } - - context 'when there are value stream records' do - it 'inserts a record for each top-level namespace' do - group1 = namespaces.create!(path: 'aaa', name: 'aaa') - subgroup1 = namespaces.create!(path: 'bbb', name: 'bbb', parent_id: group1.id) - group2 = namespaces.create!(path: 'ccc', name: 'ccc') - - namespaces.create!(path: 'ddd', name: 'ddd') # not used - - group_value_streams.create!(name: 'for top level group', group_id: group2.id) - group_value_streams.create!(name: 'another for top level group', group_id: group2.id) - - group_value_streams.create!(name: 'for subgroup', group_id: subgroup1.id) - group_value_streams.create!(name: 'another for subgroup', group_id: subgroup1.id) - - migrate! - - expect(aggregations.pluck(:group_id)).to match_array([group1.id, group2.id]) - end - end - - it 'does nothing' do - expect { migrate! }.not_to change { aggregations.count } - end -end diff --git a/spec/migrations/backfill_group_features_spec.rb b/spec/migrations/backfill_group_features_spec.rb deleted file mode 100644 index 1e7729a97d8..00000000000 --- a/spec/migrations/backfill_group_features_spec.rb +++ /dev/null @@ -1,31 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillGroupFeatures, :migration, feature_category: :feature_flags do - let(:migration) { described_class::MIGRATION } - - describe '#up' do - it 'schedules background jobs for each batch of namespaces' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :namespaces, - column_name: :id, - job_arguments: [described_class::BATCH_SIZE], - interval: described_class::INTERVAL, - batch_size: described_class::BATCH_SIZE - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb b/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb deleted file mode 100644 index 892589dd770..00000000000 --- a/spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillMemberNamespaceIdForGroupMembers, feature_category: :subgroups do - let!(:migration) { described_class::MIGRATION } - - describe '#up' do - it 'schedules background jobs for each batch of group members' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :members, - column_name: :id, - interval: described_class::INTERVAL - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb b/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb deleted file mode 100644 index 627b18cd889..00000000000 --- a/spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillNamespaceIdForNamespaceRoutes, feature_category: :projects do - let!(:migration) { described_class::MIGRATION } - - describe '#up' do - it 'schedules background jobs for each batch of routes' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :routes, - column_name: :id, - interval: described_class::INTERVAL - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/backfill_project_namespaces_for_group_spec.rb b/spec/migrations/backfill_project_namespaces_for_group_spec.rb deleted file mode 100644 index b21ed6e1aa2..00000000000 --- a/spec/migrations/backfill_project_namespaces_for_group_spec.rb +++ /dev/null @@ -1,43 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillProjectNamespacesForGroup, feature_category: :subgroups do - let!(:migration) { described_class::MIGRATION } - - let(:projects) { table(:projects) } - let(:namespaces) { table(:namespaces) } - let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') } - let!(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) } - - before do - allow(Gitlab).to receive(:com?).and_return(true) - end - - describe '#up' do - before do - stub_const("BackfillProjectNamespacesForGroup::GROUP_ID", parent_group1.id) - end - - it 'schedules background jobs for each batch of namespaces' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :projects, - column_name: :id, - job_arguments: [described_class::GROUP_ID, 'up'], - interval: described_class::DELAY_INTERVAL - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/backfill_user_namespace_spec.rb b/spec/migrations/backfill_user_namespace_spec.rb deleted file mode 100644 index a58030803b1..00000000000 --- a/spec/migrations/backfill_user_namespace_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe BackfillUserNamespace, feature_category: :subgroups do - let!(:migration) { described_class::MIGRATION } - - describe '#up' do - it 'schedules background jobs for each batch of namespaces' do - migrate! - - expect(migration).to have_scheduled_batched_migration( - table_name: :namespaces, - column_name: :id, - interval: described_class::INTERVAL - ) - end - end - - describe '#down' do - it 'deletes all batched migration records' do - migrate! - schema_migrate_down! - - expect(migration).not_to have_scheduled_batched_migration - end - end -end diff --git a/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb b/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb index e85489198ee..71ffdd66d62 100644 --- a/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb +++ b/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require_migration! -RSpec.describe BulkInsertClusterEnabledGrants, :migration, feature_category: :kubernetes_management do +RSpec.describe BulkInsertClusterEnabledGrants, :migration, feature_category: :deployment_management do let(:migration) { described_class.new } let(:cluster_enabled_grants) { table(:cluster_enabled_grants) } diff --git a/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb b/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb index 5854dcd3cb0..01c85f85e0b 100644 --- a/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb +++ b/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe CleanupBackfillIntegrationsEnableSslVerification, :migration, -feature_category: :system_access do + feature_category: :system_access do let(:job_class_name) { 'BackfillIntegrationsEnableSslVerification' } before do diff --git a/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb b/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb index b808f03428d..b270f2b100f 100644 --- a/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb +++ b/spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe CleanupVulnerabilityStateTransitionsWithSameFromStateToState, :migration, -feature_category: :vulnerability_management do + feature_category: :vulnerability_management do let!(:namespace) { table(:namespaces).create!(name: 'namespace', type: 'Group', path: 'namespace') } let!(:user) { table(:users).create!(email: 'author@example.com', username: 'author', projects_limit: 10) } let!(:project) do diff --git a/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb b/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb index 562b1e25db4..8a0c0250cdf 100644 --- a/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb +++ b/spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb @@ -9,37 +9,41 @@ RSpec.describe DeleteMigrateSharedVulnerabilityScanners, :migration, feature_cat let(:batched_background_migration_jobs) { table(:batched_background_migration_jobs) } let(:migration) do - batched_background_migrations.create!(created_at: Time.zone.now, - updated_at: Time.zone.now, - min_value: 1, - max_value: 1, - batch_size: described_class::BATCH_SIZE, - sub_batch_size: 100, - interval: 300, - status: 3, - job_class_name: described_class::MIGRATION, - batch_class_name: "PrimaryKeyBatchingStrategy", - table_name: described_class::TABLE_NAME, - column_name: described_class::BATCH_COLUMN, - job_arguments: [], - pause_ms: 100, - max_batch_size: 1000, - gitlab_schema: "gitlab_main") + batched_background_migrations.create!( + created_at: Time.zone.now, + updated_at: Time.zone.now, + min_value: 1, + max_value: 1, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: 100, + interval: 300, + status: 3, + job_class_name: described_class::MIGRATION, + batch_class_name: "PrimaryKeyBatchingStrategy", + table_name: described_class::TABLE_NAME, + column_name: described_class::BATCH_COLUMN, + job_arguments: [], + pause_ms: 100, + max_batch_size: 1000, + gitlab_schema: "gitlab_main" + ) end let(:jobs) do Array.new(10) do - batched_background_migration_jobs.create!(batched_background_migration_id: migration.id, - created_at: Time.zone.now, - updated_at: Time.zone.now, - min_value: 1, - max_value: 1, - batch_size: 1, - sub_batch_size: 1, - status: 0, - attempts: 0, - metrics: {}, - pause_ms: 100) + batched_background_migration_jobs.create!( + batched_background_migration_id: migration.id, + created_at: Time.zone.now, + updated_at: Time.zone.now, + min_value: 1, + max_value: 1, + batch_size: 1, + sub_batch_size: 1, + status: 0, + attempts: 0, + metrics: {}, + pause_ms: 100 + ) end end diff --git a/spec/migrations/disable_job_token_scope_when_unused_spec.rb b/spec/migrations/disable_job_token_scope_when_unused_spec.rb deleted file mode 100644 index fddf3594e2b..00000000000 --- a/spec/migrations/disable_job_token_scope_when_unused_spec.rb +++ /dev/null @@ -1,10 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe DisableJobTokenScopeWhenUnused, feature_category: :continuous_integration do - it 'is a no-op' do - migrate! - end -end diff --git a/spec/migrations/drop_packages_events_table_spec.rb b/spec/migrations/drop_packages_events_table_spec.rb new file mode 100644 index 00000000000..539a3b88196 --- /dev/null +++ b/spec/migrations/drop_packages_events_table_spec.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +require "spec_helper" + +require_migration! + +RSpec.describe DropPackagesEventsTable, feature_category: :package_registry do + let(:table) { described_class::SOURCE_TABLE } + let(:column) { described_class::COLUMN } + + subject { described_class.new } + + it 'drops and creates the packages_events table' do + reversible_migration do |migration| + migration.before -> do + expect(subject.table_exists?(:packages_events)).to eq(true) + end + + migration.after -> do + expect(subject.table_exists?(:packages_events)).to eq(false) + end + end + end +end diff --git a/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..89e14650034 --- /dev/null +++ b/spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureCommitUserMentionsNoteIdBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'commit_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_design_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_design_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..ac763af1a70 --- /dev/null +++ b/spec/migrations/ensure_design_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureDesignUserMentionsNoteIdBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'design_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_epic_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_epic_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..a6b2f751b3b --- /dev/null +++ b/spec/migrations/ensure_epic_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureEpicUserMentionsBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'epic_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_issue_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_issue_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..602dd87c593 --- /dev/null +++ b/spec/migrations/ensure_issue_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureIssueUserMentionsBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'issue_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..af9fc3f3b07 --- /dev/null +++ b/spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureMrUserMentionsNoteIdBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'merge_request_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_note_diff_files_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_note_diff_files_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..acafc211e8c --- /dev/null +++ b/spec/migrations/ensure_note_diff_files_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureNoteDiffFilesBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'note_diff_files', + column_name: 'id', + job_arguments: [['diff_note_id'], ['diff_note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_snippet_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_snippet_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..b942a9a67a3 --- /dev/null +++ b/spec/migrations/ensure_snippet_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureSnippetUserMentionsBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'snippet_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_suggestions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_suggestions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..f8dd700b160 --- /dev/null +++ b/spec/migrations/ensure_suggestions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureSuggestionsNoteIdBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'suggestions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_system_note_metadata_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_system_note_metadata_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..11e087b63e2 --- /dev/null +++ b/spec/migrations/ensure_system_note_metadata_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureSystemNoteMetadataBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'system_note_metadata', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/ensure_timelogs_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb b/spec/migrations/ensure_timelogs_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb index 9066413ce68..9f733f1e1f4 100644 --- a/spec/migrations/ensure_timelogs_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb +++ b/spec/migrations/ensure_timelogs_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb @@ -5,8 +5,6 @@ require_migration! RSpec.describe EnsureTimelogsNoteIdBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do describe '#up' do - using RSpec::Parameterized::TableSyntax - let(:migration_arguments) do { job_class_name: 'CopyColumnUsingBackgroundMigrationJob', diff --git a/spec/migrations/ensure_unique_debian_packages_spec.rb b/spec/migrations/ensure_unique_debian_packages_spec.rb new file mode 100644 index 00000000000..eaa87ebd45e --- /dev/null +++ b/spec/migrations/ensure_unique_debian_packages_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! +require_migration! 'add_unique_packages_index_when_debian' +require_migration! 'add_tmp_unique_packages_index_when_debian' + +RSpec.describe EnsureUniqueDebianPackages, feature_category: :package_registry do + let(:namespaces) { table(:namespaces) } + let(:projects) { table(:projects) } + let(:packages) { table(:packages_packages) } + + let!(:group) { namespaces.create!(name: 'group', path: 'group_path') } + let!(:project_namespace1) { namespaces.create!(name: 'name1', path: 'path1') } + let!(:project_namespace2) { namespaces.create!(name: 'name2', path: 'path2') } + + let!(:project1) { projects.create!(namespace_id: group.id, project_namespace_id: project_namespace1.id) } + let!(:project2) { projects.create!(namespace_id: group.id, project_namespace_id: project_namespace2.id) } + + let!(:debian_package1_1) do + packages.create!(project_id: project1.id, package_type: 9, name: FFaker::Lorem.word, version: 'v1.0') + end + + let(:debian_package1_2) do + packages.create!(project_id: project1.id, package_type: 9, name: debian_package1_1.name, + version: debian_package1_1.version) + end + + let!(:pypi_package1_3) do + packages.create!(project_id: project1.id, package_type: 5, name: debian_package1_1.name, + version: debian_package1_1.version) + end + + let!(:debian_package2_1) do + packages.create!(project_id: project2.id, package_type: 9, name: debian_package1_1.name, + version: debian_package1_1.version) + end + + before do + # Remove unique indices + AddUniquePackagesIndexWhenDebian.new.down + AddTmpUniquePackagesIndexWhenDebian.new.down + # Then create the duplicate packages + debian_package1_2 + end + + it 'marks as pending destruction the duplicated packages', :aggregate_failures do + expect { migrate! } + .to change { packages.where(status: 0).count }.from(4).to(3) + .and not_change { packages.where(status: 1).count } + .and not_change { packages.where(status: 2).count } + .and not_change { packages.where(status: 3).count } + .and change { packages.where(status: 4).count }.from(0).to(1) + end +end diff --git a/spec/migrations/ensure_vum_bigint_backfill_is_finished_for_gl_dot_com_spec.rb b/spec/migrations/ensure_vum_bigint_backfill_is_finished_for_gl_dot_com_spec.rb new file mode 100644 index 00000000000..d582a8a9460 --- /dev/null +++ b/spec/migrations/ensure_vum_bigint_backfill_is_finished_for_gl_dot_com_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe EnsureVumBigintBackfillIsFinishedForGlDotCom, feature_category: :database do + describe '#up' do + let(:migration_arguments) do + { + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: 'vulnerability_user_mentions', + column_name: 'id', + job_arguments: [['note_id'], ['note_id_convert_to_bigint']] + } + end + + it 'ensures the migration is completed for GitLab.com, dev, or test' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments) + end + + migrate! + end + + it 'skips the check for other instances' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + expect(instance).not_to receive(:ensure_batched_background_migration_is_finished) + end + + migrate! + end + end +end diff --git a/spec/migrations/finalize_invalid_member_cleanup_spec.rb b/spec/migrations/finalize_invalid_member_cleanup_spec.rb index 29d03f8983c..c039edcc319 100644 --- a/spec/migrations/finalize_invalid_member_cleanup_spec.rb +++ b/spec/migrations/finalize_invalid_member_cleanup_spec.rb @@ -18,6 +18,10 @@ RSpec.describe FinalizeInvalidMemberCleanup, :migration, feature_category: :subg end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) diff --git a/spec/migrations/finalize_issues_iid_scoping_to_namespace_spec.rb b/spec/migrations/finalize_issues_iid_scoping_to_namespace_spec.rb new file mode 100644 index 00000000000..1834e8c6e0e --- /dev/null +++ b/spec/migrations/finalize_issues_iid_scoping_to_namespace_spec.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe FinalizeIssuesIidScopingToNamespace, :migration, feature_category: :team_planning do + let(:batched_migrations) { table(:batched_background_migrations) } + + let!(:migration) { described_class::MIGRATION } + + describe '#up' do + shared_examples 'finalizes the migration' do + it 'finalizes the migration' do + allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| + expect(runner).to receive(:finalize).with('"IssuesInternalIdScopeUpdater"', :internal_ids, :id, [nil, "up"]) + end + end + end + + context 'when migration is missing' do + it 'warns migration not found' do + expect(Gitlab::AppLogger) + .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) + + migrate! + end + end + + context 'with migration present' do + let!(:migration) do + batched_migrations.create!( + job_class_name: 'IssuesInternalIdScopeUpdater', + table_name: :internal_ids, + column_name: :id, + job_arguments: [nil, 'up'], + interval: 2.minutes, + min_value: 1, + max_value: 2, + batch_size: 1000, + sub_batch_size: 200, + gitlab_schema: :gitlab_main, + status: 3 # finished + ) + end + + context 'when migration finished successfully' do + it 'does not raise exception' do + expect { migrate! }.not_to raise_error + end + end + + context 'with different migration statuses' do + using RSpec::Parameterized::TableSyntax + + where(:status, :description) do + 0 | 'paused' + 1 | 'active' + 4 | 'failed' + 5 | 'finalizing' + end + + with_them do + before do + migration.update!(status: status) + end + + it_behaves_like 'finalizes the migration' + end + end + end + end +end diff --git a/spec/migrations/finalize_issues_namespace_id_backfilling_spec.rb b/spec/migrations/finalize_issues_namespace_id_backfilling_spec.rb index d0c25fb3dd6..0800a049767 100644 --- a/spec/migrations/finalize_issues_namespace_id_backfilling_spec.rb +++ b/spec/migrations/finalize_issues_namespace_id_backfilling_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinalizeIssuesNamespaceIdBackfilling, :migration, feature_categor shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('BackfillProjectNamespaceOnIssues', :projects, :id, []) + expect(runner).to receive(:finalize).with(migration, :projects, :id, []) end end end context 'when routes backfilling migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeIssuesNamespaceIdBackfilling, :migration, feature_categor context 'with backfilling migration present' do let!(:project_namespace_backfill) do batched_migrations.create!( - job_class_name: 'BackfillProjectNamespaceOnIssues', + job_class_name: migration, table_name: :routes, column_name: :id, job_arguments: [], diff --git a/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb b/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb index 78546806039..215fdbb05ad 100644 --- a/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb +++ b/spec/migrations/finalize_orphaned_routes_cleanup_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinalizeOrphanedRoutesCleanup, :migration, feature_category: :pro shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('CleanupOrphanedRoutes', :projects, :id, []) + expect(runner).to receive(:finalize).with(migration, :projects, :id, []) end end end context 'when migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeOrphanedRoutesCleanup, :migration, feature_category: :pro context 'with migration present' do let!(:project_namespace_backfill) do batched_migrations.create!( - job_class_name: 'CleanupOrphanedRoutes', + job_class_name: migration, table_name: :routes, column_name: :id, job_arguments: [], diff --git a/spec/migrations/finalize_project_namespaces_backfill_spec.rb b/spec/migrations/finalize_project_namespaces_backfill_spec.rb index 6cc3a694de8..880bb6661a4 100644 --- a/spec/migrations/finalize_project_namespaces_backfill_spec.rb +++ b/spec/migrations/finalize_project_namespaces_backfill_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinalizeProjectNamespacesBackfill, :migration, feature_category: shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('"ProjectNamespaces::BackfillProjectNamespaces"', :projects, :id, [nil, "up"]) + expect(runner).to receive(:finalize).with(migration, :projects, :id, [nil, "up"]) end end end context 'when project namespace backfilling migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeProjectNamespacesBackfill, :migration, feature_category: context 'with backfilling migration present' do let!(:project_namespace_backfill) do batched_migrations.create!( - job_class_name: 'ProjectNamespaces::BackfillProjectNamespaces', + job_class_name: migration, table_name: :projects, column_name: :id, job_arguments: [nil, 'up'], diff --git a/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb b/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb index b79fdc98425..7618957d2f7 100644 --- a/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb +++ b/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb @@ -12,12 +12,16 @@ RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_categor shared_examples 'finalizes the migration' do it 'finalizes the migration' do allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner| - expect(runner).to receive(:finalize).with('BackfillNamespaceIdForProjectRoute', :projects, :id, []) + expect(runner).to receive(:finalize).with(migration, :projects, :id, []) end end end context 'when routes backfilling migration is missing' do + before do + batched_migrations.where(job_class_name: migration).delete_all + end + it 'warns migration not found' do expect(Gitlab::AppLogger) .to receive(:warn).with(/Could not find batched background migration for the given configuration:/) @@ -29,7 +33,7 @@ RSpec.describe FinalizeRoutesBackfillingForProjects, :migration, feature_categor context 'with backfilling migration present' do let!(:project_namespace_backfill) do batched_migrations.create!( - job_class_name: 'BackfillNamespaceIdForProjectRoute', + job_class_name: migration, table_name: :routes, column_name: :id, job_arguments: [], diff --git a/spec/migrations/finalize_traversal_ids_background_migrations_spec.rb b/spec/migrations/finalize_traversal_ids_background_migrations_spec.rb deleted file mode 100644 index 0cebe7b9f91..00000000000 --- a/spec/migrations/finalize_traversal_ids_background_migrations_spec.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration!('finalize_traversal_ids_background_migrations') - -RSpec.describe FinalizeTraversalIdsBackgroundMigrations, :migration, feature_category: :database do - shared_context 'incomplete background migration' do - before do - # Jobs enqueued in Sidekiq. - Sidekiq::Testing.disable! do - BackgroundMigrationWorker.perform_in(10, job_class_name, [1, 2, 100]) - BackgroundMigrationWorker.perform_in(20, job_class_name, [3, 4, 100]) - end - - # Jobs tracked in the database. - # table(:background_migration_jobs).create!( - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: job_class_name, - arguments: [5, 6, 100], - status: Gitlab::Database::BackgroundMigrationJob.statuses['pending'] - ) - # table(:background_migration_jobs).create!( - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: job_class_name, - arguments: [7, 8, 100], - status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded'] - ) - end - end - - context 'BackfillNamespaceTraversalIdsRoots background migration' do - let(:job_class_name) { 'BackfillNamespaceTraversalIdsRoots' } - - include_context 'incomplete background migration' - - before do - migrate! - end - - it_behaves_like( - 'finalized tracked background migration', - Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots - ) - end - - context 'BackfillNamespaceTraversalIdsChildren background migration' do - let(:job_class_name) { 'BackfillNamespaceTraversalIdsChildren' } - - include_context 'incomplete background migration' - - before do - migrate! - end - - it_behaves_like( - 'finalized tracked background migration', - Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren - ) - end -end diff --git a/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb b/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb new file mode 100644 index 00000000000..ea1476b94a9 --- /dev/null +++ b/spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe InsertDailyInvitesTrialPlanLimits, feature_category: :subgroups do + let(:plans) { table(:plans) } + let(:plan_limits) { table(:plan_limits) } + let!(:premium_trial_plan) { plans.create!(name: 'premium_trial') } + let!(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial') } + + context 'when on gitlab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(true) + end + + it 'correctly migrates up and down' do + reversible_migration do |migration| + migration.before -> { + trial_plan_ids = [premium_trial_plan.id, ultimate_trial_plan.id] + expect(plan_limits.where(plan_id: trial_plan_ids).where.not(daily_invites: 0)).to be_empty + } + + migration.after -> { + expect(plan_limits.pluck(:plan_id, :daily_invites)) + .to contain_exactly([premium_trial_plan.id, 50], [ultimate_trial_plan.id, 50]) + } + end + end + end + + context 'when on self-managed' do + before do + allow(Gitlab).to receive(:com?).and_return(false) + end + + it 'correctly migrates up and down' do + reversible_migration do |migration| + trial_plan_ids = [premium_trial_plan.id, ultimate_trial_plan.id] + + migration.before -> { + expect(plan_limits.where(plan_id: trial_plan_ids).where.not(daily_invites: 0)).to be_empty + } + + migration.after -> { + expect(plan_limits.where(plan_id: trial_plan_ids).where.not(daily_invites: 0)).to be_empty + } + end + end + end +end diff --git a/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb b/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb deleted file mode 100644 index e2c117903d4..00000000000 --- a/spec/migrations/populate_audit_event_streaming_verification_token_spec.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe PopulateAuditEventStreamingVerificationToken, feature_category: :audit_events do - let(:groups) { table(:namespaces) } - let(:destinations) { table(:audit_events_external_audit_event_destinations) } - let(:migration) { described_class.new } - - let!(:group) { groups.create!(name: 'test-group', path: 'test-group') } - let!(:destination) { destinations.create!(namespace_id: group.id, destination_url: 'https://example.com/destination', verification_token: nil) } - - describe '#up' do - it 'adds verification tokens to records created before the migration' do - expect do - migrate! - destination.reload - end.to change { destination.verification_token }.from(nil).to(a_string_matching(/\w{24}/)) - end - end -end diff --git a/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb b/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb deleted file mode 100644 index 068da23113d..00000000000 --- a/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb +++ /dev/null @@ -1,18 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe QueueBackfillAdminModeScopeForPersonalAccessTokens, - feature_category: :system_access do - describe '#up' do - it 'schedules background migration' do - migrate! - - expect(described_class::MIGRATION).to have_scheduled_batched_migration( - table_name: :personal_access_tokens, - column_name: :id, - interval: described_class::DELAY_INTERVAL) - end - end -end diff --git a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb deleted file mode 100644 index 80ecc23dfbe..00000000000 --- a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe RecreateIndexSecurityCiBuildsOnNameAndIdParserFeatures, :migration, feature_category: :database do - let(:db) { described_class.new } - let(:pg_class) { table(:pg_class) } - let(:pg_index) { table(:pg_index) } - let(:async_indexes) { table(:postgres_async_indexes) } - - it "recreates index" do - reversible_migration do |migration| - migration.before -> { - expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be false - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be true - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be false - } - - migration.after -> { - expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be true - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be false - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be true - } - end - end -end diff --git a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb b/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb deleted file mode 100644 index c7709764727..00000000000 --- a/spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe RecreateIndexSecurityCiBuildsOnNameAndIdParserWithNewFeatures, :migration, feature_category: :continuous_integration do - let(:db) { described_class.new } - let(:pg_class) { table(:pg_class) } - let(:pg_index) { table(:pg_index) } - let(:async_indexes) { table(:postgres_async_indexes) } - - it 'recreates index' do - reversible_migration do |migration| - migration.before -> { - expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be false - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be true - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be false - } - - migration.after -> { - expect(async_indexes.where(name: described_class::OLD_INDEX_NAME).exists?).to be true - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::OLD_INDEX_NAME)).to be false - expect(db.index_exists?(described_class::TABLE, described_class::COLUMNS, name: described_class::NEW_INDEX_NAME)).to be true - } - end - end -end diff --git a/spec/migrations/remove_invalid_deploy_access_level_spec.rb b/spec/migrations/remove_invalid_deploy_access_level_spec.rb deleted file mode 100644 index cc0f5679dda..00000000000 --- a/spec/migrations/remove_invalid_deploy_access_level_spec.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe RemoveInvalidDeployAccessLevel, :migration, feature_category: :continuous_integration do - let(:users) { table(:users) } - let(:groups) { table(:namespaces) } - let(:protected_environments) { table(:protected_environments) } - let(:deploy_access_levels) { table(:protected_environment_deploy_access_levels) } - - let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } - let(:group) { groups.create!(name: 'test-group', path: 'test-group') } - let(:pe) do - protected_environments.create!(name: 'test-pe', group_id: group.id) - end - - let!(:invalid_access_level) do - deploy_access_levels.create!( - access_level: 40, - user_id: user.id, - group_id: group.id, - protected_environment_id: pe.id) - end - - let!(:group_access_level) do - deploy_access_levels.create!( - group_id: group.id, - protected_environment_id: pe.id) - end - - let!(:user_access_level) do - deploy_access_levels.create!( - user_id: user.id, - protected_environment_id: pe.id) - end - - it 'removes invalid access_level entries' do - expect { migrate! }.to change { - deploy_access_levels.where( - protected_environment_id: pe.id, - access_level: nil).count - }.from(2).to(3) - - expect(invalid_access_level.reload.access_level).to be_nil - end -end diff --git a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb deleted file mode 100644 index 91687d8d730..00000000000 --- a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration, feature_category: :team_planning do - let(:migration) { described_class.new } - let(:namespaces) { table(:namespaces) } - let(:sprints) { table(:sprints) } - let(:iterations_cadences) { table(:iterations_cadences) } - - let!(:group) { namespaces.create!(name: 'foo', path: 'foo') } - let!(:cadence) { iterations_cadences.create!(group_id: group.id, title: "cadence 1") } - let!(:iteration1) { sprints.create!(id: 1, title: 'a', group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 11, 1), due_date: Date.new(2021, 11, 5), iid: 1) } - - describe '#down' do - it "removes null titles by setting them with ids" do - migration.up - - iteration2 = sprints.create!(id: 2, title: nil, group_id: group.id, iterations_cadence_id: cadence.id, start_date: Date.new(2021, 12, 1), due_date: Date.new(2021, 12, 5), iid: 2) - - migration.down - - expect(iteration1.reload.title).to eq 'a' - expect(iteration2.reload.title).to eq '2' - end - end -end diff --git a/spec/migrations/remove_packages_events_package_id_fk_spec.rb b/spec/migrations/remove_packages_events_package_id_fk_spec.rb new file mode 100644 index 00000000000..13e73de88bd --- /dev/null +++ b/spec/migrations/remove_packages_events_package_id_fk_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require "spec_helper" + +require_migration! + +RSpec.describe RemovePackagesEventsPackageIdFk, feature_category: :package_registry do + let(:table) { described_class::SOURCE_TABLE } + let(:column) { described_class::COLUMN } + let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } } + + it 'drops and creates the foreign key' do + reversible_migration do |migration| + migration.before -> do + expect(foreign_key.call).to have_attributes(column: column.to_s) + end + + migration.after -> do + expect(foreign_key.call).to be(nil) + end + end + end +end diff --git a/spec/migrations/remove_saml_provider_and_identities_non_root_group_spec.rb b/spec/migrations/remove_saml_provider_and_identities_non_root_group_spec.rb new file mode 100644 index 00000000000..07873d0ce79 --- /dev/null +++ b/spec/migrations/remove_saml_provider_and_identities_non_root_group_spec.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RemoveSamlProviderAndIdentitiesNonRootGroup, feature_category: :system_access do + let(:namespaces) { table(:namespaces) } + let(:saml_providers) { table(:saml_providers) } + let(:identities) { table(:identities) } + let(:root_group) do + namespaces.create!(name: 'root_group', path: 'foo', parent_id: nil, type: 'Group') + end + + let(:non_root_group) do + namespaces.create!(name: 'non_root_group', path: 'non_root', parent_id: root_group.id, type: 'Group') + end + + it 'removes saml_providers that belong to non-root group and related identities' do + provider_root_group = saml_providers.create!( + group_id: root_group.id, + sso_url: 'https://saml.example.com/adfs/ls', + certificate_fingerprint: '55:44:33:22:11:aa:bb:cc:dd:ee:ff:11:22:33:44:55:66:77:88:99', + default_membership_role: ::Gitlab::Access::GUEST, + enabled: true + ) + + identity_root_group = identities.create!( + saml_provider_id: provider_root_group.id, + extern_uid: "12345" + ) + + provider_non_root_group = saml_providers.create!( + group_id: non_root_group.id, + sso_url: 'https://saml.example.com/adfs/ls', + certificate_fingerprint: '55:44:33:22:11:aa:bb:cc:dd:ee:ff:11:22:33:44:55:66:77:88:99', + default_membership_role: ::Gitlab::Access::GUEST, + enabled: true + ) + + identity_non_root_group = identities.create!( + saml_provider_id: provider_non_root_group.id, + extern_uid: "12345" + ) + + expect { migrate! }.to change { saml_providers.count }.from(2).to(1) + + expect(identities.find_by_id(identity_non_root_group.id)).to be_nil + expect(saml_providers.find_by_id(provider_non_root_group.id)).to be_nil + + expect(identities.find_by_id(identity_root_group.id)).not_to be_nil + expect(saml_providers.find_by_id(provider_root_group.id)).not_to be_nil + end +end diff --git a/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb b/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb deleted file mode 100644 index 86e161cea43..00000000000 --- a/spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe RemoveScheduleAndStatusFromPendingAlertEscalations, feature_category: :incident_management do - let(:escalations) { table(:incident_management_pending_alert_escalations) } - let(:schedule_index) { 'index_incident_management_pending_alert_escalations_on_schedule' } - let(:schedule_foreign_key) { 'fk_rails_fcbfd9338b' } - - it 'correctly migrates up and down' do - reversible_migration do |migration| - migration.before -> { - expect(escalations.column_names).to include('schedule_id', 'status') - expect(escalations_indexes).to include(schedule_index) - expect(escalations_constraints).to include(schedule_foreign_key) - } - - migration.after -> { - escalations.reset_column_information - expect(escalations.column_names).not_to include('schedule_id', 'status') - expect(escalations_indexes).not_to include(schedule_index) - expect(escalations_constraints).not_to include(schedule_foreign_key) - } - end - end - - private - - def escalations_indexes - ActiveRecord::Base.connection.indexes(:incident_management_pending_alert_escalations).collect(&:name) - end - - def escalations_constraints - ActiveRecord::Base.connection.foreign_keys(:incident_management_pending_alert_escalations).collect(&:name) - end -end diff --git a/spec/migrations/remove_scim_token_and_scim_identity_non_root_group_spec.rb b/spec/migrations/remove_scim_token_and_scim_identity_non_root_group_spec.rb new file mode 100644 index 00000000000..31915365c91 --- /dev/null +++ b/spec/migrations/remove_scim_token_and_scim_identity_non_root_group_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RemoveScimTokenAndScimIdentityNonRootGroup, feature_category: :system_access do + let(:namespaces) { table(:namespaces) } + let(:scim_oauth_access_tokens) { table(:scim_oauth_access_tokens) } + let(:scim_identities) { table(:scim_identities) } + let(:users) { table(:users) } + let(:root_group) do + namespaces.create!(name: 'root_group', path: 'foo', parent_id: nil, type: 'Group') + end + + let(:non_root_group) do + namespaces.create!(name: 'non_root_group', path: 'non_root', parent_id: root_group.id, type: 'Group') + end + + let(:root_group_user) do + users.create!(name: 'Example User', email: 'user@example.com', projects_limit: 0) + end + + let(:non_root_group_user) do + users.create!(username: 'user2', email: 'user2@example.com', projects_limit: 10) + end + + it 'removes scim_oauth_access_tokens that belong to non-root group and related scim_identities' do + scim_oauth_access_token_root_group = scim_oauth_access_tokens.create!( + group_id: root_group.id, + token_encrypted: Gitlab::CryptoHelper.aes256_gcm_encrypt(SecureRandom.hex(50)) + ) + scim_oauth_access_token_non_root_group = scim_oauth_access_tokens.create!( + group_id: non_root_group.id, + token_encrypted: Gitlab::CryptoHelper.aes256_gcm_encrypt(SecureRandom.hex(50)) + ) + + scim_identity_root_group = scim_identities.create!( + group_id: root_group.id, + extern_uid: "12345", + user_id: root_group_user.id, + active: true + ) + + scim_identity_non_root_group = scim_identities.create!( + group_id: non_root_group.id, + extern_uid: "12345", + user_id: non_root_group_user.id, + active: true + ) + + expect { migrate! }.to change { scim_oauth_access_tokens.count }.from(2).to(1) + expect(scim_oauth_access_tokens.find_by_id(scim_oauth_access_token_non_root_group.id)).to be_nil + expect(scim_identities.find_by_id(scim_identity_non_root_group.id)).to be_nil + + expect(scim_oauth_access_tokens.find_by_id(scim_oauth_access_token_root_group.id)).not_to be_nil + expect(scim_identities.find_by_id(scim_identity_root_group.id)).not_to be_nil + end +end diff --git a/spec/migrations/requeue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb b/spec/migrations/requeue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb new file mode 100644 index 00000000000..b9af6d98beb --- /dev/null +++ b/spec/migrations/requeue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe RequeueBackfillAdminModeScopeForPersonalAccessTokens, feature_category: :system_access do + describe '#up' do + it 'schedules background migration' do + migrate! + + expect(described_class::MIGRATION).to( + have_scheduled_batched_migration( + table_name: :personal_access_tokens, + column_name: :id, + interval: described_class::DELAY_INTERVAL) + ) + end + end +end diff --git a/spec/migrations/rerun_remove_invalid_deploy_access_level_spec.rb b/spec/migrations/rerun_remove_invalid_deploy_access_level_spec.rb new file mode 100644 index 00000000000..72663e63996 --- /dev/null +++ b/spec/migrations/rerun_remove_invalid_deploy_access_level_spec.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_migration! + +RSpec.describe RerunRemoveInvalidDeployAccessLevel, :migration, feature_category: :continuous_integration do + let(:users) { table(:users) } + let(:groups) { table(:namespaces) } + let(:protected_environments) { table(:protected_environments) } + let(:deploy_access_levels) { table(:protected_environment_deploy_access_levels) } + + let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } + let(:group) { groups.create!(name: 'test-group', path: 'test-group') } + let(:pe) do + protected_environments.create!(name: 'test-pe', group_id: group.id) + end + + let!(:invalid_access_level) do + deploy_access_levels.create!( + access_level: 40, + user_id: user.id, + group_id: group.id, + protected_environment_id: pe.id) + end + + let!(:access_level) do + deploy_access_levels.create!( + access_level: 40, + user_id: nil, + group_id: nil, + protected_environment_id: pe.id) + end + + let!(:group_access_level) do + deploy_access_levels.create!( + group_id: group.id, + protected_environment_id: pe.id) + end + + let!(:user_access_level) do + deploy_access_levels.create!( + user_id: user.id, + protected_environment_id: pe.id) + end + + let!(:user_and_group_access_level) do + deploy_access_levels.create!( + user_id: user.id, + group_id: group.id, + protected_environment_id: pe.id) + end + + it 'fixes invalid access_level entries and does not affect others' do + expect { migrate! }.to change { + deploy_access_levels.where(protected_environment_id: pe.id) + .where("num_nonnulls(user_id, group_id, access_level) = 1").count + }.from(3).to(5) + + invalid_access_level.reload + access_level.reload + group_access_level.reload + user_access_level.reload + user_and_group_access_level.reload + + expect(invalid_access_level.access_level).to be_nil + expect(invalid_access_level.user_id).to eq(user.id) + expect(invalid_access_level.group_id).to be_nil + + expect(access_level.access_level).to eq(40) + expect(access_level.user_id).to be_nil + expect(access_level.group_id).to be_nil + + expect(group_access_level.access_level).to be_nil + expect(group_access_level.user_id).to be_nil + expect(group_access_level.group_id).to eq(group.id) + + expect(user_access_level.access_level).to be_nil + expect(user_access_level.user_id).to eq(user.id) + expect(user_access_level.group_id).to be_nil + + expect(user_and_group_access_level.access_level).to be_nil + expect(user_and_group_access_level.user_id).to eq(user.id) + expect(user_and_group_access_level.group_id).to be_nil + end +end diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb index a3bec40c3f0..abcdde7f075 100644 --- a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb +++ b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require_migration! RSpec.describe ScheduleBackfillDraftStatusOnMergeRequestsCorrectedRegex, - :sidekiq, feature_category: :code_review_workflow do + :sidekiq, feature_category: :code_review_workflow do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:merge_requests) { table(:merge_requests) } diff --git a/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb b/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb deleted file mode 100644 index 26764f855b7..00000000000 --- a/spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleFixIncorrectMaxSeatsUsed2, :migration, feature_category: :purchase do - let(:migration_name) { described_class::MIGRATION.to_s.demodulize } - - describe '#up' do - it 'schedules a job on Gitlab.com' do - allow(Gitlab).to receive(:com?).and_return(true) - - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(migration_name).to be_scheduled_delayed_migration(1.hour, 'batch_2_for_start_date_before_02_aug_2021') - expect(BackgroundMigrationWorker.jobs.size).to eq(1) - end - end - end - - it 'does not schedule any jobs when not Gitlab.com' do - allow(Gitlab).to receive(:com?).and_return(false) - - Sidekiq::Testing.fake! do - migrate! - - expect(migration_name).not_to be_scheduled_delayed_migration - expect(BackgroundMigrationWorker.jobs.size).to eq(0) - end - end - end -end diff --git a/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb b/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb deleted file mode 100644 index 194a1d39ad1..00000000000 --- a/spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleFixIncorrectMaxSeatsUsed, :migration, feature_category: :purchase do - let(:migration) { described_class.new } - - describe '#up' do - it 'schedules a job on Gitlab.com' do - allow(Gitlab).to receive(:com?).and_return(true) - - expect(migration).to receive(:migrate_in).with(1.hour, 'FixIncorrectMaxSeatsUsed') - - migration.up - end - - it 'does not schedule any jobs when not Gitlab.com' do - allow(Gitlab::CurrentSettings).to receive(:com?).and_return(false) - - expect(migration).not_to receive(:migrate_in) - - migration.up - end - end -end diff --git a/spec/migrations/schedule_fixing_security_scan_statuses_spec.rb b/spec/migrations/schedule_fixing_security_scan_statuses_spec.rb index c4c7819bda7..56d30e71676 100644 --- a/spec/migrations/schedule_fixing_security_scan_statuses_spec.rb +++ b/spec/migrations/schedule_fixing_security_scan_statuses_spec.rb @@ -3,8 +3,8 @@ require 'spec_helper' require_migration! -RSpec.describe ScheduleFixingSecurityScanStatuses, :suppress_gitlab_schemas_validate_connection, - feature_category: :vulnerability_management do +RSpec.describe ScheduleFixingSecurityScanStatuses, + :suppress_gitlab_schemas_validate_connection, feature_category: :vulnerability_management do let!(:namespaces) { table(:namespaces) } let!(:projects) { table(:projects) } let!(:pipelines) { table(:ci_pipelines) } diff --git a/spec/migrations/schedule_migrate_shared_vulnerability_identifiers_spec.rb b/spec/migrations/schedule_migrate_shared_vulnerability_identifiers_spec.rb new file mode 100644 index 00000000000..c1802a1a339 --- /dev/null +++ b/spec/migrations/schedule_migrate_shared_vulnerability_identifiers_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require "spec_helper" + +require_migration! + +RSpec.describe ScheduleMigrateSharedVulnerabilityIdentifiers, :migration, feature_category: :vulnerability_management do + describe "#up" do + before do + migrate! + end + + it "schedules" do + Gitlab::Database::BackgroundMigration::BatchedMigration.find_by!( + job_class_name: described_class::MIGRATION, + table_name: described_class::TABLE_NAME, + column_name: described_class::BATCH_COLUMN, + batch_size: described_class::BATCH_SIZE, + sub_batch_size: described_class::SUB_BATCH_SIZE) + end + end + + describe '#down' do + before do + schema_migrate_down! + end + + it "deletes" do + expect(described_class::MIGRATION).not_to have_scheduled_batched_migration + end + end +end diff --git a/spec/migrations/schedule_purging_stale_security_scans_spec.rb b/spec/migrations/schedule_purging_stale_security_scans_spec.rb index b39baa145ff..906dc90bcc4 100644 --- a/spec/migrations/schedule_purging_stale_security_scans_spec.rb +++ b/spec/migrations/schedule_purging_stale_security_scans_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe SchedulePurgingStaleSecurityScans, :suppress_gitlab_schemas_validate_connection, -feature_category: :vulnerability_management do + feature_category: :vulnerability_management do let!(:namespaces) { table(:namespaces) } let!(:projects) { table(:projects) } let!(:pipelines) { table(:ci_pipelines) } diff --git a/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb b/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb deleted file mode 100644 index 8903a32285e..00000000000 --- a/spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb +++ /dev/null @@ -1,90 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleRecalculateVulnerabilityFindingSignaturesForFindings, :migration, -feature_category: :vulnerability_management do - before do - allow(Gitlab).to receive(:ee?).and_return(ee?) - stub_const("#{described_class.name}::BATCH_SIZE", 2) - end - - context 'when the Gitlab instance is FOSS' do - let(:ee?) { false } - - it 'does not run the migration' do - expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size } - end - end - - context 'when the Gitlab instance is EE' do - let(:ee?) { true } - - let!(:namespaces) { table(:namespaces) } - let!(:projects) { table(:projects) } - let!(:findings) { table(:vulnerability_occurrences) } - let!(:scanners) { table(:vulnerability_scanners) } - let!(:identifiers) { table(:vulnerability_identifiers) } - let!(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) } - - let!(:namespace) { namespaces.create!(name: 'test', path: 'test') } - let!(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') } - - let!(:scanner) do - scanners.create!(project_id: project.id, external_id: 'trivy', name: 'Security Scanner') - end - - let!(:identifier) do - identifiers.create!(project_id: project.id, - fingerprint: 'd432c2ad2953e8bd587a3a43b3ce309b5b0154c123', - external_type: 'SECURITY_ID', - external_id: 'SECURITY_0', - name: 'SECURITY_IDENTIFIER 0') - end - - let!(:finding1) { findings.create!(finding_params) } - let!(:signature1) { vulnerability_finding_signatures.create!(finding_id: finding1.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } - - let!(:finding2) { findings.create!(finding_params) } - let!(:signature2) { vulnerability_finding_signatures.create!(finding_id: finding2.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } - - let!(:finding3) { findings.create!(finding_params) } - let!(:signature3) { vulnerability_finding_signatures.create!(finding_id: finding3.id, algorithm_type: 0, signature_sha: ::Digest::SHA1.digest(SecureRandom.hex(50))) } - - # this migration is now a no-op - it 'does not schedule the background jobs', :aggregate_failure do - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(BackgroundMigrationWorker.jobs.size).to eq(0) - expect(described_class::MIGRATION) - .not_to be_scheduled_migration_with_multiple_args(signature1.id, signature2.id) - expect(described_class::MIGRATION) - .not_to be_scheduled_migration_with_multiple_args(signature3.id, signature3.id) - end - end - end - - def finding_params - uuid = SecureRandom.uuid - - { - severity: 0, - confidence: 5, - report_type: 2, - project_id: project.id, - scanner_id: scanner.id, - primary_identifier_id: identifier.id, - location: nil, - project_fingerprint: SecureRandom.hex(20), - location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)), - uuid: uuid, - name: "Vulnerability Finding #{uuid}", - metadata_version: '1.3', - raw_metadata: '{}' - } - end - end -end diff --git a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb b/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb deleted file mode 100644 index 99ee9e58f4e..00000000000 --- a/spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe ScheduleUpdateTimelogsNullSpentAt, feature_category: :team_planning do - let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } - let!(:project) { table(:projects).create!(namespace_id: namespace.id) } - let!(:issue) { table(:issues).create!(project_id: project.id) } - let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') } - let!(:timelog1) { create_timelog!(merge_request_id: merge_request.id) } - let!(:timelog2) { create_timelog!(merge_request_id: merge_request.id) } - let!(:timelog3) { create_timelog!(merge_request_id: merge_request.id) } - let!(:timelog4) { create_timelog!(issue_id: issue.id) } - let!(:timelog5) { create_timelog!(issue_id: issue.id) } - - before do - table(:timelogs).where.not(id: timelog3.id).update_all(spent_at: nil) - end - - it 'correctly schedules background migrations' do - stub_const("#{described_class}::BATCH_SIZE", 2) - - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(described_class::MIGRATION) - .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id) - - expect(described_class::MIGRATION) - .to be_scheduled_delayed_migration(4.minutes, timelog4.id, timelog5.id) - - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - end - end - end - - private - - def create_timelog!(**args) - table(:timelogs).create!(**args, time_spent: 1) - end -end diff --git a/spec/migrations/set_email_confirmation_setting_before_removing_send_user_confirmation_email_column_spec.rb b/spec/migrations/set_email_confirmation_setting_before_removing_send_user_confirmation_email_column_spec.rb index 8e00fbe4b89..02ecbe90ee0 100644 --- a/spec/migrations/set_email_confirmation_setting_before_removing_send_user_confirmation_email_column_spec.rb +++ b/spec/migrations/set_email_confirmation_setting_before_removing_send_user_confirmation_email_column_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require_migration! RSpec.describe SetEmailConfirmationSettingBeforeRemovingSendUserConfirmationEmailColumn, - feature_category: :user_profile do + feature_category: :user_profile do let(:migration) { described_class.new } let(:application_settings_table) { table(:application_settings) } diff --git a/spec/migrations/set_email_confirmation_setting_from_soft_email_confirmation_ff_spec.rb b/spec/migrations/set_email_confirmation_setting_from_soft_email_confirmation_ff_spec.rb new file mode 100644 index 00000000000..202baebf1da --- /dev/null +++ b/spec/migrations/set_email_confirmation_setting_from_soft_email_confirmation_ff_spec.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SetEmailConfirmationSettingFromSoftEmailConfirmationFf, feature_category: :feature_flags do + let(:migration) { described_class.new } + let(:application_settings_table) { table(:application_settings) } + let(:feature_gates_table) { table(:feature_gates) } + + describe '#up' do + context 'when feature gate for `soft_email_confirmation` does not exist' do + it 'does not update `email_confirmation_setting`' do + application_settings_table.create!(email_confirmation_setting: 0) + + migration.up + + expect(application_settings_table.last.email_confirmation_setting).to eq 0 + end + end + + context 'when feature gate for `soft_email_confirmation` does exist' do + context 'when feature gate value is `false`' do + before do + feature_gates_table.create!(feature_key: 'soft_email_confirmation', key: 'boolean', value: 'false') + end + + it 'does not update `email_confirmation_setting`' do + application_settings_table.create!(email_confirmation_setting: 0) + + migration.up + + expect(application_settings_table.last.email_confirmation_setting).to eq 0 + end + end + + context 'when feature gate value is `true`' do + before do + feature_gates_table.create!(feature_key: 'soft_email_confirmation', key: 'boolean', value: 'true') + end + + it "updates `email_confirmation_setting` to '1' (soft)" do + application_settings_table.create!(email_confirmation_setting: 0) + + migration.up + + expect(application_settings_table.last.email_confirmation_setting).to eq 1 + end + end + end + end + + describe '#down' do + it "updates 'email_confirmation_setting' to default value: '0' (off)" do + application_settings_table.create!(email_confirmation_setting: 1) + + migration.down + + expect(application_settings_table.last.email_confirmation_setting).to eq 0 + end + end +end diff --git a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb deleted file mode 100644 index ffd25152a45..00000000000 --- a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb +++ /dev/null @@ -1,70 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration, feature_category: :code_review_workflow do - let(:migration) { described_class.new } - - describe '#up' do - context 'when there are no jobs to process' do - it 'does nothing' do - expect(migration).not_to receive(:migrate_in) - expect(Gitlab::Database::BackgroundMigrationJob).not_to receive(:create!) - - migration.up - end - end - - context 'when there are pending jobs' do - let!(:job1) do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: described_class::MIGRATION_CLASS, - arguments: [1, 10_001] - ) - end - - let!(:job2) do - Gitlab::Database::BackgroundMigrationJob.create!( - class_name: described_class::MIGRATION_CLASS, - arguments: [10_001, 20_001] - ) - end - - it 'marks the old jobs as finished' do - migration.up - - job1.reload - job2.reload - - expect(job1).to be_succeeded - expect(job2).to be_succeeded - end - - it 'the jobs are slices into smaller ranges' do - migration.up - - new_jobs = Gitlab::Database::BackgroundMigrationJob - .for_migration_class(described_class::MIGRATION_CLASS) - .pending - .to_a - - expect(new_jobs.map(&:arguments)).to eq( - [ - [1, 5_001], - [5_001, 10_001], - [10_001, 15_001], - [15_001, 20_001] - ]) - end - - it 'schedules a background migration for the first job' do - expect(migration) - .to receive(:migrate_in) - .with(1.hour, described_class::STEAL_MIGRATION_CLASS, [1, 5_001]) - - migration.up - end - end - end -end diff --git a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb b/spec/migrations/start_backfill_ci_queuing_tables_spec.rb deleted file mode 100644 index c308a16d5b8..00000000000 --- a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe StartBackfillCiQueuingTables, :suppress_gitlab_schemas_validate_connection, -feature_category: :continuous_integration do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:builds) { table(:ci_builds) } - - let!(:namespace) do - namespaces.create!(name: 'namespace1', path: 'namespace1') - end - - let!(:project) do - projects.create!(namespace_id: namespace.id, name: 'test1', path: 'test1') - end - - let!(:pending_build_1) do - builds.create!(status: :pending, name: 'test1', type: 'Ci::Build', project_id: project.id) - end - - let!(:running_build) do - builds.create!(status: :running, name: 'test2', type: 'Ci::Build', project_id: project.id) - end - - let!(:pending_build_2) do - builds.create!(status: :pending, name: 'test3', type: 'Ci::Build', project_id: project.id) - end - - before do - stub_const("#{described_class.name}::BATCH_SIZE", 1) - end - - it 'schedules jobs for builds that are pending' do - Sidekiq::Testing.fake! do - freeze_time do - migrate! - - expect(described_class::MIGRATION).to be_scheduled_delayed_migration( - 2.minutes, pending_build_1.id, pending_build_1.id) - expect(described_class::MIGRATION).to be_scheduled_delayed_migration( - 4.minutes, pending_build_2.id, pending_build_2.id) - expect(BackgroundMigrationWorker.jobs.size).to eq(2) - end - end - end -end diff --git a/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..d219d544033 --- /dev/null +++ b/spec/migrations/swap_commit_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapCommitUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE commit_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:commit_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:commit_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_design_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_design_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..c7cbf7bfe2a --- /dev/null +++ b/spec/migrations/swap_design_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapDesignUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE design_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:design_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:design_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_epic_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_epic_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..41cc75672e1 --- /dev/null +++ b/spec/migrations/swap_epic_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapEpicUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE epic_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:epic_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:epic_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb b/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb new file mode 100644 index 00000000000..2c561730d95 --- /dev/null +++ b/spec/migrations/swap_issue_user_mentions_note_id_to_bigint_for_gitlab_dot_com_2_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +# rubocop: disable RSpec/FilePath +RSpec.describe SwapIssueUserMentionsNoteIdToBigintForGitlabDotCom2, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:issue_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:issue_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + + it 'is a no-op if columns are already swapped' do + connection = described_class.new.connection + connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id TYPE bigint') + connection.execute('ALTER TABLE issue_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE integer') + + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + migrate! + + user_mentions = table(:issue_user_mentions) + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + end + # rubocop: enable RSpec/AnyInstanceOf + end +end +# rubocop: enable RSpec/FilePath diff --git a/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb new file mode 100644 index 00000000000..15b21d34714 --- /dev/null +++ b/spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapMergeRequestUserMentionsNoteIdToBigint, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE merge_request_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:merge_request_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:merge_request_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_note_diff_files_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_note_diff_files_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..b0147f3ef58 --- /dev/null +++ b/spec/migrations/swap_note_diff_files_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapNoteDiffFilesNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id TYPE integer') + connection.execute('ALTER TABLE note_diff_files ALTER COLUMN diff_note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + ndf = table(:note_diff_files) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + ndf.reset_column_information + + expect(ndf.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('integer') + expect(ndf.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + ndf.reset_column_information + + expect(ndf.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('bigint') + expect(ndf.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + ndf = table(:note_diff_files) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + ndf.reset_column_information + + expect(ndf.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('integer') + expect(ndf.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + ndf.reset_column_information + + expect(ndf.columns.find { |c| c.name == 'diff_note_id' }.sql_type).to eq('integer') + expect(ndf.columns.find { |c| c.name == 'diff_note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_sent_notifications_id_columns_spec.rb b/spec/migrations/swap_sent_notifications_id_columns_spec.rb new file mode 100644 index 00000000000..2f681a2a587 --- /dev/null +++ b/spec/migrations/swap_sent_notifications_id_columns_spec.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapSentNotificationsIdColumns, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer') + connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id_convert_to_bigint TYPE bigint') + # rubocop: disable RSpec/AnyInstanceOf + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(run_migration?) + # rubocop: enable RSpec/AnyInstanceOf + end + + context 'when we are GitLab.com, dev, or test' do + let(:run_migration?) { true } + + it 'swaps the integer and bigint columns' do + sent_notifications = table(:sent_notifications) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + sent_notifications.reset_column_information + + expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer') + expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + sent_notifications.reset_column_information + + expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint') + expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + end + + context 'when we are NOT GitLab.com, dev, or test' do + let(:run_migration?) { false } + + it 'does not swap the integer and bigint columns' do + sent_notifications = table(:sent_notifications) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + sent_notifications.reset_column_information + + expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer') + expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + sent_notifications.reset_column_information + + expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer') + expect(sent_notifications.columns.find { |c| c.name == 'id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + end + end +end diff --git a/spec/migrations/swap_snippet_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_snippet_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..628c0fba528 --- /dev/null +++ b/spec/migrations/swap_snippet_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapSnippetUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:snippet_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:snippet_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_suggestions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_suggestions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..48d72ec151e --- /dev/null +++ b/spec/migrations/swap_suggestions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapSuggestionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE suggestions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + suggestions = table(:suggestions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + suggestions.reset_column_information + + expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + suggestions.reset_column_information + + expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + suggestions = table(:suggestions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + suggestions.reset_column_information + + expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + suggestions.reset_column_information + + expect(suggestions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(suggestions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/swap_vulnerability_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb b/spec/migrations/swap_vulnerability_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb new file mode 100644 index 00000000000..1e358387536 --- /dev/null +++ b/spec/migrations/swap_vulnerability_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe SwapVulnerabilityUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do + describe '#up' do + before do + # A we call `schema_migrate_down!` before each example, and for this migration + # `#down` is same as `#up`, we need to ensure we start from the expected state. + connection = described_class.new.connection + connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id TYPE integer') + connection.execute('ALTER TABLE vulnerability_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint') + end + + # rubocop: disable RSpec/AnyInstanceOf + it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true) + + user_mentions = table(:vulnerability_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer') + } + end + end + end + + it 'is a no-op for other instances' do + allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false) + + user_mentions = table(:vulnerability_user_mentions) + + disable_migrations_output do + reversible_migration do |migration| + migration.before -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + + migration.after -> { + user_mentions.reset_column_information + + expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer') + expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint') + } + end + end + end + # rubocop: enable RSpec/AnyInstanceOf + end +end diff --git a/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb b/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb index da8790f4450..c60447d04a1 100644 --- a/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb +++ b/spec/migrations/sync_new_amount_used_for_ci_namespace_monthly_usages_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require_migration! RSpec.describe SyncNewAmountUsedForCiNamespaceMonthlyUsages, migration: :gitlab_ci, - feature_category: :continuous_integration do + feature_category: :continuous_integration do let(:namespace_usages) { table(:ci_namespace_monthly_usages) } before do diff --git a/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb b/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb index 1c9b2711687..d7add66a97f 100644 --- a/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb +++ b/spec/migrations/sync_new_amount_used_for_ci_project_monthly_usages_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' require_migration! RSpec.describe SyncNewAmountUsedForCiProjectMonthlyUsages, migration: :gitlab_ci, - feature_category: :continuous_integration do + feature_category: :continuous_integration do let(:project_usages) { table(:ci_project_monthly_usages) } before do diff --git a/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb b/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb deleted file mode 100644 index d249fcecf66..00000000000 --- a/spec/migrations/update_application_settings_container_registry_exp_pol_worker_capacity_default_spec.rb +++ /dev/null @@ -1,41 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe UpdateApplicationSettingsContainerRegistryExpPolWorkerCapacityDefault, -feature_category: :container_registry do - let(:settings) { table(:application_settings) } - - context 'with no rows in the application_settings table' do - it 'does not insert a row' do - expect { migrate! }.to not_change { settings.count } - end - end - - context 'with a row in the application_settings table' do - before do - settings.create!(container_registry_expiration_policies_worker_capacity: capacity) - end - - context 'with container_registry_expiration_policy_worker_capacity set to a value different than 0' do - let(:capacity) { 1 } - - it 'does not update the row' do - expect { migrate! } - .to not_change { settings.count } - .and not_change { settings.first.container_registry_expiration_policies_worker_capacity } - end - end - - context 'with container_registry_expiration_policy_worker_capacity set to 0' do - let(:capacity) { 0 } - - it 'updates the existing row' do - expect { migrate! } - .to not_change { settings.count } - .and change { settings.first.container_registry_expiration_policies_worker_capacity }.from(0).to(4) - end - end - end -end diff --git a/spec/migrations/update_application_settings_protected_paths_spec.rb b/spec/migrations/update_application_settings_protected_paths_spec.rb deleted file mode 100644 index 055955c56f1..00000000000 --- a/spec/migrations/update_application_settings_protected_paths_spec.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe UpdateApplicationSettingsProtectedPaths, :aggregate_failures, -feature_category: :system_access do - subject(:migration) { described_class.new } - - let!(:application_settings) { table(:application_settings) } - let!(:oauth_paths) { %w[/oauth/authorize /oauth/token] } - let!(:custom_paths) { %w[/foo /bar] } - - let(:default_paths) { application_settings.column_defaults.fetch('protected_paths') } - - before do - application_settings.create!(protected_paths: custom_paths) - application_settings.create!(protected_paths: custom_paths + oauth_paths) - application_settings.create!(protected_paths: custom_paths + oauth_paths.take(1)) - end - - describe '#up' do - before do - migrate! - application_settings.reset_column_information - end - - it 'removes the OAuth paths from the default value and persisted records' do - expect(default_paths).not_to include(*oauth_paths) - expect(default_paths).to eq(described_class::NEW_DEFAULT_PROTECTED_PATHS) - expect(application_settings.all).to all(have_attributes(protected_paths: custom_paths)) - end - end - - describe '#down' do - before do - migrate! - schema_migrate_down! - end - - it 'adds the OAuth paths to the default value and persisted records' do - expect(default_paths).to include(*oauth_paths) - expect(default_paths).to eq(described_class::OLD_DEFAULT_PROTECTED_PATHS) - expect(application_settings.all).to all(have_attributes(protected_paths: custom_paths + oauth_paths)) - end - end -end diff --git a/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb b/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb deleted file mode 100644 index ac7a4171063..00000000000 --- a/spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -require_migration! - -RSpec.describe UpdateDefaultScanMethodOfDastSiteProfile, feature_category: :dynamic_application_security_testing do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:dast_sites) { table(:dast_sites) } - let(:dast_site_profiles) { table(:dast_site_profiles) } - - before do - namespace = namespaces.create!(name: 'test', path: 'test') - project = projects.create!(id: 12, namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') - dast_site = dast_sites.create!(id: 1, url: 'https://www.gitlab.com', project_id: project.id) - - dast_site_profiles.create!(id: 1, project_id: project.id, dast_site_id: dast_site.id, - name: "#{FFaker::Product.product_name.truncate(192)} #{SecureRandom.hex(4)} - 0", - scan_method: 0, target_type: 0) - - dast_site_profiles.create!(id: 2, project_id: project.id, dast_site_id: dast_site.id, - name: "#{FFaker::Product.product_name.truncate(192)} #{SecureRandom.hex(4)} - 1", - scan_method: 0, target_type: 1) - end - - it 'updates the scan_method to 1 for profiles with target_type 1' do - migrate! - - expect(dast_site_profiles.where(scan_method: 1).count).to eq 1 - expect(dast_site_profiles.where(scan_method: 0).count).to eq 1 - end -end diff --git a/spec/migrations/update_invalid_member_states_spec.rb b/spec/migrations/update_invalid_member_states_spec.rb deleted file mode 100644 index 6ae4b9f3c0f..00000000000 --- a/spec/migrations/update_invalid_member_states_spec.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -require_migration! - -RSpec.describe UpdateInvalidMemberStates, feature_category: :subgroups do - let(:members) { table(:members) } - let(:groups) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:users) { table(:users) } - - before do - user = users.create!(first_name: 'Test', last_name: 'User', email: 'test@user.com', projects_limit: 1) - group = groups.create!(name: 'gitlab', path: 'gitlab-org') - project = projects.create!(namespace_id: group.id) - - members.create!(state: 2, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0) - members.create!(state: 2, source_id: project.id, source_type: 'Project', type: 'ProjectMember', user_id: user.id, access_level: 50, notification_level: 0) - members.create!(state: 1, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0) - members.create!(state: 0, source_id: group.id, source_type: 'Group', type: 'GroupMember', user_id: user.id, access_level: 50, notification_level: 0) - end - - it 'updates matching member record states' do - expect { migrate! } - .to change { members.where(state: 0).count }.from(1).to(3) - .and change { members.where(state: 2).count }.from(2).to(0) - .and change { members.where(state: 1).count }.by(0) - end -end diff --git a/spec/models/abuse/trust_score_spec.rb b/spec/models/abuse/trust_score_spec.rb new file mode 100644 index 00000000000..755309ac699 --- /dev/null +++ b/spec/models/abuse/trust_score_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Abuse::TrustScore, feature_category: :instance_resiliency do + let_it_be(:user) { create(:user) } + + let(:correlation_id) { nil } + + let(:abuse_trust_score) do + create(:abuse_trust_score, user: user, correlation_id_value: correlation_id) + end + + describe 'associations' do + it { is_expected.to belong_to(:user) } + end + + describe 'validations' do + it { is_expected.to validate_presence_of(:user) } + it { is_expected.to validate_presence_of(:score) } + it { is_expected.to validate_presence_of(:source) } + end + + describe 'create' do + subject { abuse_trust_score } + + before do + allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('123abc') + stub_const('Abuse::TrustScore::MAX_EVENTS', 2) + end + + context 'if correlation ID is nil' do + it 'adds the correlation id' do + expect(subject.correlation_id_value).to eq('123abc') + end + end + + context 'if correlation ID is set' do + let(:correlation_id) { 'already-set' } + + it 'does not change the correlation id' do + expect(subject.correlation_id_value).to eq('already-set') + end + end + + context 'if max events is exceeded' do + it 'removes the oldest events' do + first = create(:abuse_trust_score, user: user) + create(:abuse_trust_score, user: user) + create(:abuse_trust_score, user: user) + + expect(user.abuse_trust_scores.count).to eq(2) + expect(described_class.find_by_id(first.id)).to eq(nil) + end + end + end +end diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb index 9026a870138..8a9ac618e00 100644 --- a/spec/models/abuse_report_spec.rb +++ b/spec/models/abuse_report_spec.rb @@ -68,6 +68,17 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do "https://gitlab.com/#{SecureRandom.alphanumeric(494)}" ]).for(:links_to_spam) } + + context 'for screenshot' do + let(:txt_file) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') } + let(:img_file) { fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') } + + it { is_expected.not_to allow_value(txt_file).for(:screenshot) } + it { is_expected.to allow_value(img_file).for(:screenshot) } + + it { is_expected.to allow_value(nil).for(:screenshot) } + it { is_expected.to allow_value('').for(:screenshot) } + end end describe 'scopes' do @@ -145,6 +156,30 @@ RSpec.describe AbuseReport, feature_category: :insider_threat do end end + describe '#screenshot_path' do + let(:report) { create(:abuse_report, :with_screenshot) } + + context 'with asset host configured' do + let(:asset_host) { 'https://gitlab-assets.example.com' } + + before do + allow(ActionController::Base).to receive(:asset_host) { asset_host } + end + + it 'returns a full URL with the asset host and system path' do + expect(report.screenshot_path).to eq("#{asset_host}#{report.screenshot.url}") + end + end + + context 'when no asset path configured' do + let(:base_url) { Gitlab.config.gitlab.base_url } + + it 'returns a full URL with the base url and system path' do + expect(report.screenshot_path).to eq("#{base_url}#{report.screenshot.url}") + end + end + end + describe 'enums' do let(:categories) do { diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb index 3665f13015e..8717b2a1075 100644 --- a/spec/models/active_session_spec.rb +++ b/spec/models/active_session_spec.rb @@ -190,8 +190,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do Gitlab::Redis::Sessions.with do |redis| expect(redis.scan_each.to_a).to include( - described_class.key_name(user.id, session_id), # current session - described_class.key_name_v1(user.id, session_id), # support for mixed deployment + described_class.key_name(user.id, session_id), # current session lookup_key ) end @@ -217,19 +216,6 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do end end - it 'is possible to log in only using the old session key' do - session_id = "2::418729c72310bbf349a032f0bb6e3fce9f5a69df8f000d8ae0ac5d159d8f21ae" - ActiveSession.set(user, request) - - Gitlab::Redis::SharedState.with do |redis| - redis.del(described_class.key_name(user.id, session_id)) - end - - sessions = ActiveSession.list(user) - - expect(sessions).to be_present - end - it 'keeps the created_at from the login on consecutive requests' do created_at = Time.zone.parse('2018-03-12 09:06') updated_at = created_at + 1.minute @@ -593,7 +579,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do let(:active_count) { 3 } before do - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| active_count.times do |number| redis.set( key_name(user.id, number), @@ -608,13 +594,13 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do end it 'removes obsolete lookup entries' do - active = Gitlab::Redis::SharedState.with do |redis| + active = Gitlab::Redis::Sessions.with do |redis| ActiveSession.cleaned_up_lookup_entries(redis, user) end expect(active.count).to eq(active_count) - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| lookup_entries = redis.smembers(lookup_key) expect(lookup_entries.count).to eq(active_count) @@ -627,7 +613,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do it 'reports the removed entries' do removed = [] - Gitlab::Redis::SharedState.with do |redis| + Gitlab::Redis::Sessions.with do |redis| ActiveSession.cleaned_up_lookup_entries(redis, user, removed) end @@ -663,4 +649,26 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do it_behaves_like 'cleaning up lookup entries' end end + + describe '.set_active_user_cookie' do + let(:auth) { double(cookies: {}) } + + it 'sets marketing cookie' do + ActiveSession.set_active_user_cookie(auth) + expect(auth.cookies[:about_gitlab_active_user][:value]).to be_truthy + end + end + + describe '.unset_active_user_cookie' do + let(:auth) { double(cookies: {}) } + + before do + ActiveSession.set_active_user_cookie(auth) + end + + it 'unsets marketing cookie' do + ActiveSession.unset_active_user_cookie(auth) + expect(auth.cookies[:about_gitlab_active_user]).to be_nil + end + end end diff --git a/spec/models/analytics/cycle_analytics/stage_spec.rb b/spec/models/analytics/cycle_analytics/stage_spec.rb index 57748f8942e..44c0be68fe3 100644 --- a/spec/models/analytics/cycle_analytics/stage_spec.rb +++ b/spec/models/analytics/cycle_analytics/stage_spec.rb @@ -105,30 +105,36 @@ RSpec.describe Analytics::CycleAnalytics::Stage, feature_category: :value_stream } end - describe '#create' do - it_behaves_like 'Snowplow event tracking' do - let(:property) { 'create' } - let(:extra) { record_tracked_attributes } + context 'with database event tracking' do + before do + allow(Gitlab::Tracking).to receive(:database_event).and_call_original + end + + describe '#create' do + it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do + let(:property) { 'create' } + let(:extra) { record_tracked_attributes } - subject(:new_group_stage) { stage } + subject(:new_group_stage) { stage } + end end - end - describe '#update', :freeze_time do - it_behaves_like 'Snowplow event tracking' do - subject(:create_group_stage) { stage.update!(name: 'st 2') } + describe '#update', :freeze_time do + it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do + subject(:create_group_stage) { stage.update!(name: 'st 2') } - let(:extra) { record_tracked_attributes.merge('name' => 'st 2') } - let(:property) { 'update' } + let(:extra) { record_tracked_attributes.merge('name' => 'st 2') } + let(:property) { 'update' } + end end - end - describe '#destroy' do - it_behaves_like 'Snowplow event tracking' do - subject(:delete_stage_group) { stage.destroy! } + describe '#destroy' do + it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do + subject(:delete_stage_group) { stage.destroy! } - let(:extra) { record_tracked_attributes } - let(:property) { 'destroy' } + let(:extra) { record_tracked_attributes } + let(:property) { 'destroy' } + end end end end diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 8387f4021b6..16a1ce9ccaa 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -45,6 +45,20 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do let(:ftp) { 'ftp://example.com' } let(:javascript) { 'javascript:alert(window.opener.document.location)' } + let_it_be(:valid_database_apdex_settings) do + { + prometheus_api_url: 'Prometheus URL', + apdex_sli_query: { + main: 'Apdex SLI query main', + ci: 'Apdex SLI query ci' + }, + apdex_slo: { + main: 0.99, + ci: 0.98 + } + } + end + it { is_expected.to allow_value(nil).for(:home_page_url) } it { is_expected.to allow_value(http).for(:home_page_url) } it { is_expected.to allow_value(https).for(:home_page_url) } @@ -256,6 +270,13 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do it { is_expected.to allow_value(false).for(:gitlab_dedicated_instance) } it { is_expected.not_to allow_value(nil).for(:gitlab_dedicated_instance) } + it { is_expected.not_to allow_value(random: :value).for(:database_apdex_settings) } + it { is_expected.to allow_value(nil).for(:database_apdex_settings) } + it { is_expected.to allow_value(valid_database_apdex_settings).for(:database_apdex_settings) } + + it { is_expected.to allow_value([true, false]).for(:silent_mode_enabled) } + it { is_expected.not_to allow_value(nil).for(:silent_mode_enabled) } + context 'when deactivate_dormant_users is enabled' do before do stub_application_setting(deactivate_dormant_users: true) @@ -534,6 +555,13 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do .is_less_than(65536) end + specify do + is_expected.to validate_numericality_of(:archive_builds_in_seconds) + .only_integer + .is_greater_than_or_equal_to(1.day.seconds.to_i) + .with_message('must be at least 1 day') + end + describe 'usage_ping_enabled setting' do shared_examples 'usage ping enabled' do it do diff --git a/spec/models/awareness_session_spec.rb b/spec/models/awareness_session_spec.rb deleted file mode 100644 index 854ce5957f7..00000000000 --- a/spec/models/awareness_session_spec.rb +++ /dev/null @@ -1,163 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe AwarenessSession, :clean_gitlab_redis_shared_state do - subject { AwarenessSession.for(session_id) } - - let!(:user) { create(:user) } - let(:session_id) { 1 } - - describe "when initiating a session" do - it "provides a string representation of the model instance" do - expected = "awareness_session=6b86b273ff34fce" - - expect(subject.to_s).to eql(expected) - end - - it "provides a parameterized version of the session identifier" do - expected = "6b86b273ff34fce" - - expect(subject.to_param).to eql(expected) - end - end - - describe "when a user joins a session" do - let(:user2) { create(:user) } - - let(:presence_ttl) { 15.minutes } - - it "changes number of session members" do - expect { subject.join(user) }.to change(subject, :size).by(1) - end - - it "returns user as member of session with last_activity timestamp" do - freeze_time do - subject.join(user) - - session_users = subject.users_with_last_activity - session_user, last_activity = session_users.first - - expect(session_user.id).to be(user.id) - expect(last_activity).to be_eql(Time.now.utc) - end - end - - it "maintains user ID and last_activity pairs" do - now = Time.zone.now - - travel_to now - 1.minute do - subject.join(user2) - end - - travel_to now do - subject.join(user) - end - - session_users = subject.users_with_last_activity - - expect(session_users[0].first.id).to eql(user.id) - expect(session_users[0].last.to_i).to eql(now.to_i) - - expect(session_users[1].first.id).to eql(user2.id) - expect(session_users[1].last.to_i).to eql((now - 1.minute).to_i) - end - - it "reports user as present" do - freeze_time do - subject.join(user) - - expect(subject.present?(user, threshold: presence_ttl)).to be true - end - end - - it "reports user as away after a certain time on inactivity" do - subject.join(user) - - travel_to((presence_ttl + 1.minute).from_now) do - expect(subject.away?(user, threshold: presence_ttl)).to be true - end - end - - it "reports user as present still when there was some activity" do - subject.join(user) - - travel_to((presence_ttl - 1.minute).from_now) do - subject.touch!(user) - end - - travel_to((presence_ttl + 1.minute).from_now) do - expect(subject.present?(user, threshold: presence_ttl)).to be true - end - end - - it "creates user and session awareness keys in store" do - subject.join(user) - - Gitlab::Redis::SharedState.with do |redis| - keys = redis.scan_each(match: "gitlab:awareness:*").to_a - - expect(keys.size).to be(2) - end - end - - it "sets a timeout for user and session key" do - subject.join(user) - subject_id = Digest::SHA256.hexdigest(session_id.to_s)[0, 15] - - Gitlab::Redis::SharedState.with do |redis| - ttl_session = redis.ttl("gitlab:awareness:session:#{subject_id}:users") - ttl_user = redis.ttl("gitlab:awareness:user:#{user.id}:sessions") - - expect(ttl_session).to be > 0 - expect(ttl_user).to be > 0 - end - end - - it "fetches user(s) from database" do - subject.join(user) - - expect(subject.users.first).to eql(user) - end - - it "fetches and filters online user(s) from database" do - subject.join(user) - - travel 2.hours do - subject.join(user2) - - online_users = subject.online_users_with_last_activity - online_user, _ = online_users.first - - expect(online_users.size).to be 1 - expect(online_user).to eql(user2) - end - end - end - - describe "when a user leaves a session" do - it "changes number of session members" do - subject.join(user) - - expect { subject.leave(user) }.to change(subject, :size).by(-1) - end - - it "destroys the session when it was the last user" do - subject.join(user) - - expect { subject.leave(user) }.to change(subject, :id).to(nil) - end - end - - describe "when last user leaves a session" do - it "session and user keys are removed" do - subject.join(user) - - Gitlab::Redis::SharedState.with do |redis| - expect { subject.leave(user) } - .to change { redis.scan_each(match: "gitlab:awareness:*").to_a.size } - .to(0) - end - end - end -end diff --git a/spec/models/blob_viewer/package_json_spec.rb b/spec/models/blob_viewer/package_json_spec.rb index 1dcba3bcb4f..781623c0d3d 100644 --- a/spec/models/blob_viewer/package_json_spec.rb +++ b/spec/models/blob_viewer/package_json_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BlobViewer::PackageJson do +RSpec.describe BlobViewer::PackageJson, feature_category: :source_code_management do include FakeBlobHelpers let(:project) { build_stubbed(:project) } @@ -59,6 +59,17 @@ RSpec.describe BlobViewer::PackageJson do expect(subject.manager_url).to eq("https://yarnpkg.com/") end end + + context 'when json is an array' do + let(:data) { '[]' } + + it 'does not raise an error', :aggregate_failures do + expect(subject).to receive(:prepare!) + + expect { subject.yarn? }.not_to raise_error + expect(subject.yarn?).to be_falsey + end + end end context 'npm' do diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb index 45f120e6773..c7ace3d2b78 100644 --- a/spec/models/bulk_imports/entity_spec.rb +++ b/spec/models/bulk_imports/entity_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers do + subject { described_class.new(group: Group.new) } + describe 'associations' do it { is_expected.to belong_to(:bulk_import).required } it { is_expected.to belong_to(:parent) } @@ -23,35 +25,8 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d it { is_expected.to define_enum_for(:source_type).with_values(%i[group_entity project_entity]) } context 'when formatting with regexes' do - subject { described_class.new(group: Group.new) } - - it { is_expected.to allow_values('namespace', 'parent/namespace', 'parent/group/subgroup', '').for(:destination_namespace) } - it { is_expected.not_to allow_values('parent/namespace/', '/namespace', 'parent group/subgroup', '@namespace').for(:destination_namespace) } - it { is_expected.to allow_values('source', 'source/path', 'source/full/path').for(:source_full_path) } it { is_expected.not_to allow_values('/source', 'http://source/path', 'sou rce/full/path', '').for(:source_full_path) } - - it { is_expected.to allow_values('destination', 'destination-slug', 'new-destination-slug').for(:destination_slug) } - - # it { is_expected.not_to allow_values('destination/slug', '/destination-slug', 'destination slug').for(:destination_slug) } <-- this test should - # succeed but it's failing possibly due to rspec caching. To ensure this case is covered see the more cumbersome test below: - context 'when destination_slug is invalid' do - let(:invalid_slugs) { ['destination/slug', '/destination-slug', 'destination slug'] } - let(:error_message) do - 'cannot start with a non-alphanumeric character except for periods or underscores, ' \ - 'can contain only alphanumeric characters, periods, and underscores, ' \ - 'cannot end with a period or forward slash, and has no ' \ - 'leading or trailing forward slashes' - end - - it 'raises an error' do - invalid_slugs.each do |slug| - entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil, destination_slug: slug) - expect(entity).not_to be_valid - expect(entity.errors.errors[0].message).to include(error_message) - end - end - end end context 'when associated with a group and project' do diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb index 7b307de87c7..49b32c6f6b8 100644 --- a/spec/models/ci/bridge_spec.rb +++ b/spec/models/ci/bridge_spec.rb @@ -12,9 +12,7 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do end let(:bridge) do - create(:ci_bridge, :variables, status: :created, - options: options, - pipeline: pipeline) + create(:ci_bridge, :variables, status: :created, options: options, pipeline: pipeline) end let(:options) do @@ -40,16 +38,6 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do it 'returns true' do expect(bridge.retryable?).to eq(true) end - - context 'without ci_recreate_downstream_pipeline ff' do - before do - stub_feature_flags(ci_recreate_downstream_pipeline: false) - end - - it 'returns false' do - expect(bridge.retryable?).to eq(false) - end - end end context 'when there is a pipeline loop detected' do @@ -564,11 +552,13 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do let!(:prepare2) { create(:ci_build, name: 'prepare2', pipeline: pipeline, stage_idx: 0) } let!(:prepare3) { create(:ci_build, name: 'prepare3', pipeline: pipeline, stage_idx: 0) } let!(:bridge) do - create(:ci_bridge, pipeline: pipeline, - stage_idx: 1, - scheduling_type: 'dag', - needs_attributes: [{ name: 'prepare1', artifacts: true }, - { name: 'prepare2', artifacts: false }]) + create( + :ci_bridge, + pipeline: pipeline, + stage_idx: 1, + scheduling_type: 'dag', + needs_attributes: [{ name: 'prepare1', artifacts: true }, { name: 'prepare2', artifacts: false }] + ) end let!(:job_variable_1) { create(:ci_job_variable, :dotenv_source, job: prepare1) } @@ -581,7 +571,7 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do end end - describe 'metadata partitioning', :ci_partitioning do + describe 'metadata partitioning', :ci_partitionable do let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) } let(:bridge) do diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb index 1dd0386060d..0709aa47ff1 100644 --- a/spec/models/ci/build_dependencies_spec.rb +++ b/spec/models/ci/build_dependencies_spec.rb @@ -7,10 +7,13 @@ RSpec.describe Ci::BuildDependencies do let_it_be(:project, reload: true) { create(:project, :repository) } let_it_be(:pipeline, reload: true) do - create(:ci_pipeline, project: project, - sha: project.commit.id, - ref: project.default_branch, - status: 'success') + create( + :ci_pipeline, + project: project, + sha: project.commit.id, + ref: project.default_branch, + status: 'success' + ) end let(:build_stage) { create(:ci_stage, name: 'build', pipeline: pipeline) } diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb index c3b445cbbe5..8ed0e50e4b0 100644 --- a/spec/models/ci/build_metadata_spec.rb +++ b/spec/models/ci/build_metadata_spec.rb @@ -7,10 +7,13 @@ RSpec.describe Ci::BuildMetadata do let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, :repository, group: group, build_timeout: 2000) } let_it_be(:pipeline) do - create(:ci_pipeline, project: project, - sha: project.commit.id, - ref: project.default_branch, - status: 'success') + create( + :ci_pipeline, + project: project, + sha: project.commit.id, + ref: project.default_branch, + status: 'success' + ) end let_it_be_with_reload(:runner) { create(:ci_runner) } diff --git a/spec/models/ci/build_need_spec.rb b/spec/models/ci/build_need_spec.rb index aa1c57d1788..e46a2b8cf85 100644 --- a/spec/models/ci/build_need_spec.rb +++ b/spec/models/ci/build_need_spec.rb @@ -74,7 +74,7 @@ RSpec.describe Ci::BuildNeed, model: true, feature_category: :continuous_integra stub_current_partition_id end - it 'creates build needs successfully', :aggregate_failures do + it 'creates build needs successfully', :aggregate_failures, :ci_partitionable do ci_build.needs_attributes = [ { name: "build", artifacts: true }, { name: "build2", artifacts: true }, diff --git a/spec/models/ci/build_report_result_spec.rb b/spec/models/ci/build_report_result_spec.rb index 90b23d3e824..90426f60c73 100644 --- a/spec/models/ci/build_report_result_spec.rb +++ b/spec/models/ci/build_report_result_spec.rb @@ -33,6 +33,19 @@ RSpec.describe Ci::BuildReportResult do expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"]) end end + + context 'when data tests is invalid' do + it 'returns errors' do + build_report_result.data = { + 'tests' => { + 'invalid' => 'invalid' + } + } + + expect(build_report_result).to be_invalid + expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"]) + end + end end describe '#tests_name' do diff --git a/spec/models/ci/build_runner_session_spec.rb b/spec/models/ci/build_runner_session_spec.rb index 5e1a489ed8b..002aff25593 100644 --- a/spec/models/ci/build_runner_session_spec.rb +++ b/spec/models/ci/build_runner_session_spec.rb @@ -175,7 +175,7 @@ RSpec.describe Ci::BuildRunnerSession, model: true, feature_category: :continuou end end - describe 'partitioning' do + describe 'partitioning', :ci_partitionable do include Ci::PartitioningHelpers let(:new_pipeline) { create(:ci_pipeline) } diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb index ca7f4794a0c..f2c713c22a7 100644 --- a/spec/models/ci/build_spec.rb +++ b/spec/models/ci/build_spec.rb @@ -11,10 +11,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def let_it_be(:project, reload: true) { create_default(:project, :repository, group: group) } let_it_be(:pipeline, reload: true) do - create_default(:ci_pipeline, project: project, - sha: project.commit.id, - ref: project.default_branch, - status: 'success') + create_default( + :ci_pipeline, + project: project, + sha: project.commit.id, + ref: project.default_branch, + status: 'success' + ) end let_it_be(:build, refind: true) { create(:ci_build, pipeline: pipeline) } @@ -36,7 +39,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def it { is_expected.to have_many(:pages_deployments).with_foreign_key(:ci_build_id) } it { is_expected.to have_one(:deployment) } - it { is_expected.to have_one(:runner_machine).through(:runner_machine_build) } + it { is_expected.to have_one(:runner_manager).through(:runner_manager_build) } it { is_expected.to have_one(:runner_session).with_foreign_key(:build_id) } it { is_expected.to have_one(:trace_metadata).with_foreign_key(:build_id) } it { is_expected.to have_one(:runtime_metadata).with_foreign_key(:build_id) } @@ -1699,10 +1702,12 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def context 'when environment uses $CI_COMMIT_REF_NAME' do let(:build) do - create(:ci_build, - ref: 'master', - environment: 'review/$CI_COMMIT_REF_NAME', - pipeline: pipeline) + create( + :ci_build, + ref: 'master', + environment: 'review/$CI_COMMIT_REF_NAME', + pipeline: pipeline + ) end it { is_expected.to eq('review/master') } @@ -1710,10 +1715,12 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def context 'when environment uses yaml_variables containing symbol keys' do let(:build) do - create(:ci_build, - yaml_variables: [{ key: :APP_HOST, value: 'host' }], - environment: 'review/$APP_HOST', - pipeline: pipeline) + create( + :ci_build, + yaml_variables: [{ key: :APP_HOST, value: 'host' }], + environment: 'review/$APP_HOST', + pipeline: pipeline + ) end it 'returns an expanded environment name with a list of variables' do @@ -1735,12 +1742,26 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def context 'when using persisted variables' do let(:build) do - create(:ci_build, environment: 'review/x$CI_BUILD_ID', pipeline: pipeline) + create(:ci_build, environment: 'review/x$CI_JOB_ID', pipeline: pipeline) end it { is_expected.to eq('review/x') } end + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + context 'when using persisted variables' do + let(:build) do + create(:ci_build, environment: 'review/x$CI_BUILD_ID', pipeline: pipeline) + end + + it { is_expected.to eq('review/x') } + end + end + context 'when environment name uses a nested variable' do let(:yaml_variables) do [ @@ -1749,11 +1770,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end let(:build) do - create(:ci_build, - ref: 'master', - yaml_variables: yaml_variables, - environment: 'review/$ENVIRONMENT_NAME', - pipeline: pipeline) + create( + :ci_build, + ref: 'master', + yaml_variables: yaml_variables, + environment: 'review/$ENVIRONMENT_NAME', + pipeline: pipeline + ) end it { is_expected.to eq('review/master') } @@ -2013,14 +2036,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end end - describe '#runner_machine' do + describe '#runner_manager' do let_it_be(:runner) { create(:ci_runner) } - let_it_be(:runner_machine) { create(:ci_runner_machine, runner: runner) } - let_it_be(:build) { create(:ci_build, runner_machine: runner_machine) } + let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) } + let_it_be(:build) { create(:ci_build, runner_manager: runner_manager) } - subject(:build_runner_machine) { described_class.find(build.id).runner_machine } + subject(:build_runner_manager) { described_class.find(build.id).runner_manager } - it { is_expected.to eq(runner_machine) } + it { is_expected.to eq(runner_manager) } end describe '#tag_list' do @@ -2085,8 +2108,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def describe 'build auto retry feature' do context 'with deployment job' do let(:build) do - create(:ci_build, :deploy_to_production, :with_deployment, - user: user, pipeline: pipeline, project: project) + create( + :ci_build, + :deploy_to_production, + :with_deployment, + user: user, + pipeline: pipeline, + project: project + ) end before do @@ -2710,6 +2739,89 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def context 'returns variables' do let(:predefined_variables) do + [ + { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true, masked: false }, + { key: 'CI_PIPELINE_URL', value: project.web_url + "/-/pipelines/#{pipeline.id}", public: true, masked: false }, + { key: 'CI_JOB_ID', value: build.id.to_s, public: true, masked: false }, + { key: 'CI_JOB_URL', value: project.web_url + "/-/jobs/#{build.id}", public: true, masked: false }, + { key: 'CI_JOB_TOKEN', value: 'my-token', public: false, masked: true }, + { key: 'CI_JOB_STARTED_AT', value: build.started_at&.iso8601, public: true, masked: false }, + { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false }, + { key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: true }, + { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false }, + { key: 'CI_DEPENDENCY_PROXY_USER', value: 'gitlab-ci-token', public: true, masked: false }, + { key: 'CI_DEPENDENCY_PROXY_PASSWORD', value: 'my-token', public: false, masked: true }, + { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true }, + { key: 'CI_JOB_JWT_V1', value: 'ci.job.jwt', public: false, masked: true }, + { key: 'CI_JOB_JWT_V2', value: 'ci.job.jwtv2', public: false, masked: true }, + { key: 'CI_JOB_NAME', value: 'test', public: true, masked: false }, + { key: 'CI_JOB_NAME_SLUG', value: 'test', public: true, masked: false }, + { key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false }, + { key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false }, + { key: 'CI', value: 'true', public: true, masked: false }, + { key: 'GITLAB_CI', value: 'true', public: true, masked: false }, + { key: 'CI_SERVER_URL', value: Gitlab.config.gitlab.url, public: true, masked: false }, + { key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false }, + { key: 'CI_SERVER_PORT', value: Gitlab.config.gitlab.port.to_s, public: true, masked: false }, + { key: 'CI_SERVER_PROTOCOL', value: Gitlab.config.gitlab.protocol, public: true, masked: false }, + { key: 'CI_SERVER_SHELL_SSH_HOST', value: Gitlab.config.gitlab_shell.ssh_host.to_s, public: true, masked: false }, + { key: 'CI_SERVER_SHELL_SSH_PORT', value: Gitlab.config.gitlab_shell.ssh_port.to_s, public: true, masked: false }, + { key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false }, + { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false }, + { key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false }, + { key: 'CI_SERVER_VERSION_MINOR', value: Gitlab.version_info.minor.to_s, public: true, masked: false }, + { key: 'CI_SERVER_VERSION_PATCH', value: Gitlab.version_info.patch.to_s, public: true, masked: false }, + { key: 'CI_SERVER_REVISION', value: Gitlab.revision, public: true, masked: false }, + { key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true, masked: false }, + { key: 'CI_PROJECT_ID', value: project.id.to_s, public: true, masked: false }, + { key: 'CI_PROJECT_NAME', value: project.path, public: true, masked: false }, + { key: 'CI_PROJECT_TITLE', value: project.title, public: true, masked: false }, + { key: 'CI_PROJECT_DESCRIPTION', value: project.description, public: true, masked: false }, + { key: 'CI_PROJECT_PATH', value: project.full_path, public: true, masked: false }, + { key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true, masked: false }, + { key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true, masked: false }, + { key: 'CI_PROJECT_NAMESPACE_ID', value: project.namespace.id.to_s, public: true, masked: false }, + { key: 'CI_PROJECT_ROOT_NAMESPACE', value: project.namespace.root_ancestor.path, public: true, masked: false }, + { key: 'CI_PROJECT_URL', value: project.web_url, public: true, masked: false }, + { key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true, masked: false }, + { key: 'CI_PROJECT_REPOSITORY_LANGUAGES', value: project.repository_languages.map(&:name).join(',').downcase, public: true, masked: false }, + { key: 'CI_PROJECT_CLASSIFICATION_LABEL', value: project.external_authorization_classification_label, public: true, masked: false }, + { key: 'CI_DEFAULT_BRANCH', value: project.default_branch, public: true, masked: false }, + { key: 'CI_CONFIG_PATH', value: project.ci_config_path_or_default, public: true, masked: false }, + { key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false }, + { key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false }, + { key: 'CI_DEPENDENCY_PROXY_SERVER', value: Gitlab.host_with_port, public: true, masked: false }, + { key: 'CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX', + value: "#{Gitlab.host_with_port}/#{project.namespace.root_ancestor.path.downcase}#{DependencyProxy::URL_SUFFIX}", + public: true, + masked: false }, + { key: 'CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX', + value: "#{Gitlab.host_with_port}/#{project.namespace.full_path.downcase}#{DependencyProxy::URL_SUFFIX}", + public: true, + masked: false }, + { key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false }, + { key: 'CI_API_GRAPHQL_URL', value: 'http://localhost/api/graphql', public: true, masked: false }, + { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host, public: true, masked: false }, + { key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false }, + { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false }, + { key: 'CI_PIPELINE_CREATED_AT', value: pipeline.created_at.iso8601, public: true, masked: false }, + { key: 'CI_COMMIT_SHA', value: build.sha, public: true, masked: false }, + { key: 'CI_COMMIT_SHORT_SHA', value: build.short_sha, public: true, masked: false }, + { key: 'CI_COMMIT_BEFORE_SHA', value: build.before_sha, public: true, masked: false }, + { key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true, masked: false }, + { key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true, masked: false }, + { key: 'CI_COMMIT_BRANCH', value: build.ref, public: true, masked: false }, + { key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true, masked: false }, + { key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true, masked: false }, + { key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false }, + { key: 'CI_COMMIT_REF_PROTECTED', value: (!!pipeline.protected_ref?).to_s, public: true, masked: false }, + { key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp, public: true, masked: false }, + { key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text, public: true, masked: false } + ] + end + + # Remove this definition when FF `ci_remove_legacy_predefined_variables` is removed + let(:predefined_with_legacy_variables) do [ { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true, masked: false }, { key: 'CI_PIPELINE_URL', value: project.web_url + "/-/pipelines/#{pipeline.id}", public: true, masked: false }, @@ -2739,6 +2851,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def { key: 'CI_SERVER_HOST', value: Gitlab.config.gitlab.host, public: true, masked: false }, { key: 'CI_SERVER_PORT', value: Gitlab.config.gitlab.port.to_s, public: true, masked: false }, { key: 'CI_SERVER_PROTOCOL', value: Gitlab.config.gitlab.protocol, public: true, masked: false }, + { key: 'CI_SERVER_SHELL_SSH_HOST', value: Gitlab.config.gitlab_shell.ssh_host.to_s, public: true, masked: false }, + { key: 'CI_SERVER_SHELL_SSH_PORT', value: Gitlab.config.gitlab_shell.ssh_port.to_s, public: true, masked: false }, { key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false }, { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false }, { key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false }, @@ -2773,6 +2887,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def public: true, masked: false }, { key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false }, + { key: 'CI_API_GRAPHQL_URL', value: 'http://localhost/api/graphql', public: true, masked: false }, { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host, public: true, masked: false }, { key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false }, { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false }, @@ -2806,6 +2921,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) } it { expect(subject.to_runner_variables).to eq(predefined_variables) } + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it { expect(subject.to_runner_variables).to eq(predefined_with_legacy_variables) } + end + it 'excludes variables that require an environment or user' do environment_based_variables_collection = subject.filter do |variable| %w[ @@ -2896,16 +3019,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end before do - create(:environment, project: build.project, - name: 'staging') + create(:environment, project: build.project, name: 'staging') - build.yaml_variables = [{ key: 'YAML_VARIABLE', - value: 'var', - public: true }] + build.yaml_variables = [{ key: 'YAML_VARIABLE', value: 'var', public: true }] build.environment = 'staging' # CI_ENVIRONMENT_NAME is set in predefined_variables when job environment is provided - predefined_variables.insert(20, { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false }) + predefined_variables.insert(18, { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false }) end it 'matches explicit variables ordering' do @@ -2958,6 +3078,80 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end end end + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + context 'when build has environment and user-provided variables' do + let(:expected_variables) do + predefined_with_legacy_variables.map { |variable| variable.fetch(:key) } + + %w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG + CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_TIER CI_ENVIRONMENT_URL] + end + + before do + create(:environment, project: build.project, name: 'staging') + + build.yaml_variables = [{ key: 'YAML_VARIABLE', value: 'var', public: true }] + build.environment = 'staging' + + # CI_ENVIRONMENT_NAME is set in predefined_variables when job environment is provided + predefined_with_legacy_variables.insert(20, { key: 'CI_ENVIRONMENT_NAME', value: 'staging', public: true, masked: false }) + end + + it 'matches explicit variables ordering' do + received_variables = subject.map { |variable| variable[:key] } + + expect(received_variables).to eq expected_variables + end + + describe 'CI_ENVIRONMENT_ACTION' do + let(:enviroment_action_variable) { subject.find { |variable| variable[:key] == 'CI_ENVIRONMENT_ACTION' } } + + shared_examples 'defaults value' do + it 'value matches start' do + expect(enviroment_action_variable[:value]).to eq('start') + end + end + + it_behaves_like 'defaults value' + + context 'when options is set' do + before do + build.update!(options: options) + end + + context 'when options is empty' do + let(:options) { {} } + + it_behaves_like 'defaults value' + end + + context 'when options is nil' do + let(:options) { nil } + + it_behaves_like 'defaults value' + end + + context 'when options environment is specified' do + let(:options) { { environment: {} } } + + it_behaves_like 'defaults value' + end + + context 'when options environment action specified' do + let(:options) { { environment: { action: 'stop' } } } + + it 'matches the specified action' do + expect(enviroment_action_variable[:value]).to eq('stop') + end + end + end + end + end + end end end @@ -3001,12 +3195,14 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end let!(:environment) do - create(:environment, - project: build.project, - name: 'production', - slug: 'prod-slug', - tier: 'production', - external_url: '') + create( + :environment, + project: build.project, + name: 'production', + slug: 'prod-slug', + tier: 'production', + external_url: '' + ) end before do @@ -3139,10 +3335,13 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def let(:tag_message) { project.repository.tags.first.message } let!(:pipeline) do - create(:ci_pipeline, project: project, - sha: project.commit.id, - ref: tag_name, - status: 'success') + create( + :ci_pipeline, + project: project, + sha: project.commit.id, + ref: tag_name, + status: 'success' + ) end let!(:build) { create(:ci_build, pipeline: pipeline, ref: tag_name) } @@ -3173,8 +3372,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end before do - create(:ci_variable, - ci_variable.slice(:key, :value).merge(project: project)) + create(:ci_variable, ci_variable.slice(:key, :value).merge(project: project)) end it { is_expected.to include(ci_variable) } @@ -3188,9 +3386,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end before do - create(:ci_variable, - :protected, - protected_variable.slice(:key, :value).merge(project: project)) + create(:ci_variable, :protected, protected_variable.slice(:key, :value).merge(project: project)) end context 'when the branch is protected' do @@ -3220,8 +3416,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end before do - create(:ci_group_variable, - ci_variable.slice(:key, :value).merge(group: group)) + create(:ci_group_variable, ci_variable.slice(:key, :value).merge(group: group)) end it { is_expected.to include(ci_variable) } @@ -3235,9 +3430,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end before do - create(:ci_group_variable, - :protected, - protected_variable.slice(:key, :value).merge(group: group)) + create(:ci_group_variable, :protected, protected_variable.slice(:key, :value).merge(group: group)) end context 'when the branch is protected' do @@ -3290,9 +3483,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def let(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project) } let!(:pipeline_schedule_variable) do - create(:ci_pipeline_schedule_variable, - key: 'SCHEDULE_VARIABLE_KEY', - pipeline_schedule: pipeline_schedule) + create(:ci_pipeline_schedule_variable, key: 'SCHEDULE_VARIABLE_KEY', pipeline_schedule: pipeline_schedule) end before do @@ -3307,10 +3498,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def let_it_be_with_reload(:project) { create(:project, :public, :repository, group: group) } let_it_be_with_reload(:pipeline) do - create(:ci_pipeline, project: project, - sha: project.commit.id, - ref: project.default_branch, - status: 'success') + create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch, status: 'success') end let_it_be_with_refind(:build) { create(:ci_build, pipeline: pipeline) } @@ -3745,8 +3933,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def keys = %w[CI_JOB_ID CI_JOB_URL CI_JOB_TOKEN - CI_BUILD_ID - CI_BUILD_TOKEN CI_REGISTRY_USER CI_REGISTRY_PASSWORD CI_REPOSITORY_URL @@ -3758,6 +3944,30 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def expect(names).not_to include(*keys) end end + + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + it 'does not return prohibited variables' do + keys = %w[CI_JOB_ID + CI_JOB_URL + CI_JOB_TOKEN + CI_BUILD_ID + CI_BUILD_TOKEN + CI_REGISTRY_USER + CI_REGISTRY_PASSWORD + CI_REPOSITORY_URL + CI_ENVIRONMENT_URL + CI_DEPLOY_USER + CI_DEPLOY_PASSWORD] + + build.scoped_variables.map { |env| env[:key] }.tap do |names| + expect(names).not_to include(*keys) + end + end + end end context 'with dependency variables' do @@ -5715,9 +5925,11 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def describe '#runtime_hooks' do let(:build1) do - FactoryBot.build(:ci_build, - options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } }, - pipeline: pipeline) + FactoryBot.build( + :ci_build, + options: { hooks: { pre_get_sources_script: ["echo 'hello pre_get_sources_script'"] } }, + pipeline: pipeline + ) end subject(:runtime_hooks) { build1.runtime_hooks } @@ -5781,7 +5993,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def end end - describe 'metadata partitioning', :ci_partitioning do + describe 'metadata partitioning', :ci_partitionable do let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) } let(:build) do diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index f338c2727ad..355905cdabd 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -639,8 +639,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state, :clean_git build_trace_chunk.checksum = '12345' expect { build_trace_chunk.persist_data! } - .to raise_error(described_class::FailedToPersistDataError, - /Modifed build trace chunk detected/) + .to raise_error(described_class::FailedToPersistDataError, /Modifed build trace chunk detected/) end end diff --git a/spec/models/ci/build_trace_metadata_spec.rb b/spec/models/ci/build_trace_metadata_spec.rb index 2ab300e4054..866d94b4cbe 100644 --- a/spec/models/ci/build_trace_metadata_spec.rb +++ b/spec/models/ci/build_trace_metadata_spec.rb @@ -159,7 +159,7 @@ RSpec.describe Ci::BuildTraceMetadata, feature_category: :continuous_integration end end - describe 'partitioning' do + describe 'partitioning', :ci_partitionable do include Ci::PartitioningHelpers let_it_be(:pipeline) { create(:ci_pipeline) } diff --git a/spec/models/ci/build_trace_spec.rb b/spec/models/ci/build_trace_spec.rb index 907b49dc180..54b4f02fb91 100644 --- a/spec/models/ci/build_trace_spec.rb +++ b/spec/models/ci/build_trace_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::BuildTrace do +RSpec.describe Ci::BuildTrace, feature_category: :continuous_integration do let(:build) { build_stubbed(:ci_build) } let(:state) { nil } let(:data) { StringIO.new('the-stream') } @@ -13,7 +13,7 @@ RSpec.describe Ci::BuildTrace do subject { described_class.new(build: build, stream: stream, state: state) } - shared_examples 'delegates methods' do + describe 'delegated methods' do it { is_expected.to delegate_method(:state).to(:trace) } it { is_expected.to delegate_method(:append).to(:trace) } it { is_expected.to delegate_method(:truncated).to(:trace) } @@ -25,7 +25,25 @@ RSpec.describe Ci::BuildTrace do it { is_expected.to delegate_method(:complete?).to(:build).with_prefix } end - it_behaves_like 'delegates methods' + describe 'FF sign_and_verify_ansi2json_state' do + before do + stub_feature_flags(sign_and_verify_ansi2json_state: false) + end + + it 'calls convert with verify_state: true when enabled for project' do + build.project = create(:project) + stub_feature_flags(sign_and_verify_ansi2json_state: build.project) + + expect(Gitlab::Ci::Ansi2json).to receive(:convert).with(stream.stream, state, verify_state: true) + + described_class.new(build: build, stream: stream, state: state) + end + + it 'calls convert with verify_state: false when disabled' do + expect(Gitlab::Ci::Ansi2json).to receive(:convert).with(stream.stream, state, verify_state: false) + described_class.new(build: build, stream: stream, state: state) + end + end it 'returns formatted trace' do expect(subject.lines).to eq( diff --git a/spec/models/ci/catalog/listing_spec.rb b/spec/models/ci/catalog/listing_spec.rb index c9ccecbc9fe..93d70a3f63e 100644 --- a/spec/models/ci/catalog/listing_spec.rb +++ b/spec/models/ci/catalog/listing_spec.rb @@ -49,6 +49,7 @@ RSpec.describe Ci::Catalog::Listing, feature_category: :pipeline_composition do before do project_1.add_developer(user) + project_2.add_guest(user) end it 'only returns catalog resources for projects the user has access to' do diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb new file mode 100644 index 00000000000..a239bbad857 --- /dev/null +++ b/spec/models/ci/catalog/resource_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do + let_it_be(:project) { create(:project) } + let_it_be(:resource) { create(:catalog_resource, project: project) } + + let_it_be(:releases) do + [ + create(:release, project: project, released_at: Time.zone.now - 2.days), + create(:release, project: project, released_at: Time.zone.now - 1.day), + create(:release, project: project, released_at: Time.zone.now) + ] + end + + it { is_expected.to belong_to(:project) } + + it { is_expected.to delegate_method(:avatar_path).to(:project) } + it { is_expected.to delegate_method(:description).to(:project) } + it { is_expected.to delegate_method(:name).to(:project) } + + describe '.for_projects' do + it 'returns catalog resources for the given project IDs' do + resources_for_projects = described_class.for_projects(project.id) + + expect(resources_for_projects).to contain_exactly(resource) + end + end + + describe '#versions' do + it 'returns releases ordered by released date descending' do + expect(resource.versions).to eq(releases.reverse) + end + end + + describe '#latest_version' do + it 'returns the latest release' do + expect(resource.latest_version).to eq(releases.last) + end + end +end diff --git a/spec/models/ci/group_spec.rb b/spec/models/ci/group_spec.rb index 4900bc792af..82e4f7ce8fc 100644 --- a/spec/models/ci/group_spec.rb +++ b/spec/models/ci/group_spec.rb @@ -111,11 +111,13 @@ RSpec.describe Ci::Group do end def create_build(type, status: 'success', **opts) - create(type, pipeline: pipeline, - stage: stage.name, - status: status, - stage_id: stage.id, - **opts) + create( + type, pipeline: pipeline, + stage: stage.name, + status: status, + stage_id: stage.id, + **opts + ) end end end diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb index f8f184c63a1..0e249ec528f 100644 --- a/spec/models/ci/group_variable_spec.rb +++ b/spec/models/ci/group_variable_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::GroupVariable, feature_category: :pipeline_composition do +RSpec.describe Ci::GroupVariable, feature_category: :secrets_management do let_it_be_with_refind(:group) { create(:group) } subject { build(:ci_group_variable, group: group) } diff --git a/spec/models/ci/job_token/allowlist_spec.rb b/spec/models/ci/job_token/allowlist_spec.rb index 3a2673c7c26..3d29a637d68 100644 --- a/spec/models/ci/job_token/allowlist_spec.rb +++ b/spec/models/ci/job_token/allowlist_spec.rb @@ -16,10 +16,12 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio context 'when no projects are added to the scope' do [:inbound, :outbound].each do |d| - let(:direction) { d } + context "with #{d}" do + let(:direction) { d } - it 'returns the project defining the scope' do - expect(projects).to contain_exactly(source_project) + it 'returns the project defining the scope' do + expect(projects).to contain_exactly(source_project) + end end end end @@ -47,15 +49,17 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio subject { allowlist.add!(added_project, user: user) } [:inbound, :outbound].each do |d| - let(:direction) { d } + context "with #{d}" do + let(:direction) { d } - it 'adds the project' do - subject + it 'adds the project' do + subject - expect(allowlist.projects).to contain_exactly(source_project, added_project) - expect(subject.added_by_id).to eq(user.id) - expect(subject.source_project_id).to eq(source_project.id) - expect(subject.target_project_id).to eq(added_project.id) + expect(allowlist.projects).to contain_exactly(source_project, added_project) + expect(subject.added_by_id).to eq(user.id) + expect(subject.source_project_id).to eq(source_project.id) + expect(subject.target_project_id).to eq(added_project.id) + end end end end diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb index 51f0f4878e7..7aa861a3dab 100644 --- a/spec/models/ci/job_token/scope_spec.rb +++ b/spec/models/ci/job_token/scope_spec.rb @@ -63,12 +63,14 @@ RSpec.describe Ci::JobToken::Scope, feature_category: :continuous_integration, f subject { scope.add!(new_project, direction: direction, user: user) } [:inbound, :outbound].each do |d| - let(:direction) { d } + context "with #{d}" do + let(:direction) { d } - it 'adds the project' do - subject + it 'adds the project' do + subject - expect(scope.send("#{direction}_projects")).to contain_exactly(current_project, new_project) + expect(scope.send("#{direction}_projects")).to contain_exactly(current_project, new_project) + end end end diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb index d50672da8e5..5b67cbbc86b 100644 --- a/spec/models/ci/pipeline_spec.rb +++ b/spec/models/ci/pipeline_spec.rb @@ -444,6 +444,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: end end + describe '.preload_pipeline_metadata' do + let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, user: user, name: 'Chatops pipeline') } + + it 'loads associations' do + result = described_class.preload_pipeline_metadata.first + + expect(result.association(:pipeline_metadata).loaded?).to be(true) + end + end + describe '.ci_sources' do subject { described_class.ci_sources } @@ -497,11 +507,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let!(:other_pipeline) { create(:ci_pipeline, project: project) } before do - create(:ci_sources_pipeline, - source_job: create(:ci_build, pipeline: upstream_pipeline), - source_project: project, - pipeline: child_pipeline, - project: project) + create( + :ci_sources_pipeline, + source_job: create(:ci_build, pipeline: upstream_pipeline), + source_project: project, + pipeline: child_pipeline, + project: project + ) end it 'only returns pipelines outside pipeline family' do @@ -520,11 +532,13 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let!(:other_pipeline) { create(:ci_pipeline, project: project) } before do - create(:ci_sources_pipeline, - source_job: create(:ci_build, pipeline: upstream_pipeline), - source_project: project, - pipeline: child_pipeline, - project: project) + create( + :ci_sources_pipeline, + source_job: create(:ci_build, pipeline: upstream_pipeline), + source_project: project, + pipeline: child_pipeline, + project: project + ) end it 'only returns older pipelines outside pipeline family' do @@ -532,6 +546,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: end end + describe '.order_id_desc' do + subject(:pipelines_ordered_by_id) { described_class.order_id_desc } + + let(:older_pipeline) { create(:ci_pipeline, id: 99, project: project) } + let(:newest_pipeline) { create(:ci_pipeline, id: 100, project: project) } + + it 'only returns the pipelines ordered by id' do + expect(pipelines_ordered_by_id).to eq([newest_pipeline, older_pipeline]) + end + end + describe '.jobs_count_in_alive_pipelines' do before do ::Ci::HasStatus::ALIVE_STATUSES.each do |status| @@ -1186,29 +1211,41 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: describe 'legacy stages' do before do - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'linux', - stage_idx: 0, - status: 'success') - - create(:commit_status, pipeline: pipeline, - stage: 'build', - name: 'mac', - stage_idx: 0, - status: 'failed') - - create(:commit_status, pipeline: pipeline, - stage: 'deploy', - name: 'staging', - stage_idx: 2, - status: 'running') - - create(:commit_status, pipeline: pipeline, - stage: 'test', - name: 'rspec', - stage_idx: 1, - status: 'success') + create( + :commit_status, + pipeline: pipeline, + stage: 'build', + name: 'linux', + stage_idx: 0, + status: 'success' + ) + + create( + :commit_status, + pipeline: pipeline, + stage: 'build', + name: 'mac', + stage_idx: 0, + status: 'failed' + ) + + create( + :commit_status, + pipeline: pipeline, + stage: 'deploy', + name: 'staging', + stage_idx: 2, + status: 'running' + ) + + create( + :commit_status, + pipeline: pipeline, + stage: 'test', + name: 'rspec', + stage_idx: 1, + status: 'success' + ) end describe '#stages_count' do @@ -1659,8 +1696,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: before do upstream_bridge = create(:ci_bridge, :strategy_depend, pipeline: upstream_of_upstream_pipeline) - create(:ci_sources_pipeline, pipeline: upstream_pipeline, - source_job: upstream_bridge) + create(:ci_sources_pipeline, pipeline: upstream_pipeline, source_job: upstream_bridge) end context 'when the downstream pipeline first fails then retries and succeeds' do @@ -1865,12 +1901,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: end def create_build(name, *traits, queued_at: current, started_from: 0, **opts) - create(:ci_build, *traits, - name: name, - pipeline: pipeline, - queued_at: queued_at, - started_at: queued_at + started_from, - **opts) + create( + :ci_build, *traits, + name: name, + pipeline: pipeline, + queued_at: queued_at, + started_at: queued_at + started_from, + **opts + ) end end @@ -1918,9 +1956,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let(:pipeline) { build(:ci_pipeline, merge_request: merge_request) } let(:merge_request) do - create(:merge_request, :simple, - source_project: project, - target_project: project) + create(:merge_request, :simple, source_project: project, target_project: project) end it 'returns false' do @@ -1961,17 +1997,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: context 'when ref is merge request' do let(:pipeline) do - create(:ci_pipeline, - source: :merge_request_event, - merge_request: merge_request) + create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request) end let(:merge_request) do - create(:merge_request, - source_project: project, - source_branch: 'feature', - target_project: project, - target_branch: 'master') + create( + :merge_request, + source_project: project, + source_branch: 'feature', + target_project: project, + target_branch: 'master' + ) end it 'returns branch ref' do @@ -2015,35 +2051,63 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: context 'with non-empty project' do let(:pipeline) do - create(:ci_pipeline, - ref: project.default_branch, - sha: project.commit.sha) + create( + :ci_pipeline, + project: project, + ref: project.default_branch, + sha: project.commit.sha + ) end describe '#lazy_ref_commit' do let(:another) do - create(:ci_pipeline, - ref: 'feature', - sha: project.commit('feature').sha) + create( + :ci_pipeline, + project: project, + ref: 'feature', + sha: project.commit('feature').sha + ) end let(:unicode) do - create(:ci_pipeline, - ref: 'ü/unicode/multi-byte') + create( + :ci_pipeline, + project: project, + ref: 'ü/unicode/multi-byte' + ) + end + + let(:in_another_project) do + other_project = create(:project, :repository) + create( + :ci_pipeline, + project: other_project, + ref: other_project.default_branch, + sha: other_project.commit.sha + ) end - it 'returns the latest commit for a ref lazily' do + it 'returns the latest commit for a ref lazily', :aggregate_failures do expect(project.repository) .to receive(:list_commits_by_ref_name).once .and_call_original + requests_before = Gitlab::GitalyClient.get_request_count pipeline.lazy_ref_commit another.lazy_ref_commit unicode.lazy_ref_commit + in_another_project.lazy_ref_commit + requests_after = Gitlab::GitalyClient.get_request_count + + expect(requests_after - requests_before).to eq(0) expect(pipeline.lazy_ref_commit.id).to eq pipeline.sha expect(another.lazy_ref_commit.id).to eq another.sha - expect(unicode.lazy_ref_commit).to be_nil + expect(unicode.lazy_ref_commit.itself).to be_nil + expect(in_another_project.lazy_ref_commit.id).to eq in_another_project.sha + + expect(pipeline.lazy_ref_commit.repository.container).to eq project + expect(in_another_project.lazy_ref_commit.repository.container).to eq in_another_project.project end end @@ -2172,9 +2236,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: end let(:merge_request) do - create(:merge_request, :simple, - source_project: project, - target_project: project) + create(:merge_request, :simple, source_project: project, target_project: project) end it 'returns merge request modified paths' do @@ -2199,8 +2261,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: describe '#modified_paths_since' do let(:project) do - create(:project, :custom_repo, - files: { 'file1.txt' => 'file 1' }) + create(:project, :custom_repo, files: { 'file1.txt' => 'file 1' }) end let(:user) { project.owner } @@ -3473,19 +3534,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let(:target_branch) { 'master' } let!(:pipeline) do - create(:ci_pipeline, - source: :merge_request_event, - project: pipeline_project, - ref: source_branch, - merge_request: merge_request) + create( + :ci_pipeline, + source: :merge_request_event, + project: pipeline_project, + ref: source_branch, + merge_request: merge_request + ) end let(:merge_request) do - create(:merge_request, - source_project: pipeline_project, - source_branch: source_branch, - target_project: project, - target_branch: target_branch) + create( + :merge_request, + source_project: pipeline_project, + source_branch: source_branch, + target_project: project, + target_branch: target_branch + ) end it 'returns an associated merge request' do @@ -3496,19 +3561,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let(:target_branch_2) { 'merge-test' } let!(:pipeline_2) do - create(:ci_pipeline, - source: :merge_request_event, - project: pipeline_project, - ref: source_branch, - merge_request: merge_request_2) + create( + :ci_pipeline, + source: :merge_request_event, + project: pipeline_project, + ref: source_branch, + merge_request: merge_request_2 + ) end let(:merge_request_2) do - create(:merge_request, - source_project: pipeline_project, - source_branch: source_branch, - target_project: project, - target_branch: target_branch_2) + create( + :merge_request, + source_project: pipeline_project, + source_branch: source_branch, + target_project: project, + target_branch: target_branch_2 + ) end it 'does not return an associated merge request' do @@ -3904,10 +3973,12 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let(:project) { create(:project, :repository, namespace: namespace) } let(:pipeline) do - create(:ci_pipeline, - project: project, - sha: project.commit('master').sha, - user: project.first_owner) + create( + :ci_pipeline, + project: project, + sha: project.commit('master').sha, + user: project.first_owner + ) end before do @@ -4691,10 +4762,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: let(:stage_name) { 'test' } let(:stage) do - create(:ci_stage, - pipeline: pipeline, - project: pipeline.project, - name: 'test') + create(:ci_stage, pipeline: pipeline, project: pipeline.project, name: 'test') end before do @@ -5411,11 +5479,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: describe '#cluster_agent_authorizations' do let(:pipeline) { create(:ci_empty_pipeline, :created) } - let(:authorization) { instance_double(Clusters::Agents::GroupAuthorization) } + let(:authorization) { instance_double(Clusters::Agents::Authorizations::CiAccess::GroupAuthorization) } let(:finder) { double(execute: [authorization]) } it 'retrieves authorization records from the finder and caches the result' do - expect(Clusters::AgentAuthorizationsFinder).to receive(:new).once + expect(Clusters::Agents::Authorizations::CiAccess::Finder).to receive(:new).once .with(pipeline.project) .and_return(finder) diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb index cf2c176816d..34a56162dd9 100644 --- a/spec/models/ci/processable_spec.rb +++ b/spec/models/ci/processable_spec.rb @@ -43,11 +43,13 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) } let_it_be_with_refind(:processable) do - create(:ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags, - :allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group, - description: 'my-job', stage: 'test', stage_id: stage.id, - pipeline: pipeline, auto_canceled_by: another_pipeline, - scheduled_at: 10.seconds.since) + create( + :ci_build, :failed, :picked, :expired, :erased, :queued, :coverage, :tags, + :allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group, + description: 'my-job', stage: 'test', stage_id: stage.id, + pipeline: pipeline, auto_canceled_by: another_pipeline, + scheduled_at: 10.seconds.since + ) end let_it_be(:internal_job_variable) { create(:ci_job_variable, job: processable) } @@ -83,7 +85,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do runner_id tag_taggings taggings tags trigger_request_id user_id auto_canceled_by_id retried failure_reason sourced_pipelines sourced_pipeline artifacts_file_store artifacts_metadata_store - metadata runner_machine_build runner_machine runner_session trace_chunks upstream_pipeline_id + metadata runner_manager_build runner_manager runner_session trace_chunks upstream_pipeline_id artifacts_file artifacts_metadata artifacts_size commands resource resource_group_id processed security_scans author pipeline_id report_results pending_state pages_deployments @@ -95,8 +97,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do before_all do # Create artifacts to check that the associations are rejected when cloning Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS.each do |file_type, file_format| - create(:ci_job_artifact, file_format, - file_type: file_type, job: processable, expire_at: processable.artifacts_expire_at) + create(:ci_job_artifact, file_format, file_type: file_type, job: processable, expire_at: processable.artifacts_expire_at) end create(:ci_job_variable, :dotenv_source, job: processable) @@ -193,8 +194,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do context 'when it has a deployment' do let!(:processable) do - create(:ci_build, :with_deployment, :deploy_to_production, - pipeline: pipeline, stage_id: stage.id, project: project) + create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline, stage_id: stage.id, project: project) end it 'persists the expanded environment name' do diff --git a/spec/models/ci/ref_spec.rb b/spec/models/ci/ref_spec.rb index ffbda4b459f..eab5a40bc30 100644 --- a/spec/models/ci/ref_spec.rb +++ b/spec/models/ci/ref_spec.rb @@ -105,8 +105,11 @@ RSpec.describe Ci::Ref do context 'when pipeline is a detached merge request pipeline' do let(:merge_request) do - create(:merge_request, target_project: project, target_branch: 'master', - source_project: project, source_branch: 'feature') + create( + :merge_request, + target_project: project, target_branch: 'master', + source_project: project, source_branch: 'feature' + ) end let!(:pipeline) do diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb index 01acf5194f0..87537f36311 100644 --- a/spec/models/ci/resource_group_spec.rb +++ b/spec/models/ci/resource_group_spec.rb @@ -165,4 +165,23 @@ RSpec.describe Ci::ResourceGroup do end end end + + describe '#current_processable' do + subject { resource_group.current_processable } + + let(:build) { create(:ci_build) } + let(:resource_group) { create(:ci_resource_group) } + + context 'when resource is retained by a build' do + before do + resource_group.assign_resource_to(build) + end + + it { is_expected.to eq(build) } + end + + context 'when resource is not retained by a build' do + it { is_expected.to be_nil } + end + end end diff --git a/spec/models/ci/runner_machine_build_spec.rb b/spec/models/ci/runner_machine_build_spec.rb deleted file mode 100644 index b43ff535477..00000000000 --- a/spec/models/ci/runner_machine_build_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::RunnerMachineBuild, model: true, feature_category: :runner_fleet do - let_it_be(:runner) { create(:ci_runner) } - let_it_be(:runner_machine) { create(:ci_runner_machine, runner: runner) } - let_it_be(:build) { create(:ci_build, runner_machine: runner_machine) } - - it { is_expected.to belong_to(:build) } - it { is_expected.to belong_to(:runner_machine) } - - describe 'partitioning' do - context 'with build' do - let(:build) { FactoryBot.build(:ci_build, partition_id: ci_testing_partition_id) } - let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, build: build) } - - it 'sets partition_id to the current partition value' do - expect { runner_machine_build.valid? }.to change { runner_machine_build.partition_id } - .to(ci_testing_partition_id) - end - - context 'when it is already set' do - let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, partition_id: 125) } - - it 'does not change the partition_id value' do - expect { runner_machine_build.valid? }.not_to change { runner_machine_build.partition_id } - end - end - end - - context 'without build' do - let(:runner_machine_build) { FactoryBot.build(:ci_runner_machine_build, build: nil) } - - it { is_expected.to validate_presence_of(:partition_id) } - - it 'does not change the partition_id value' do - expect { runner_machine_build.valid? }.not_to change { runner_machine_build.partition_id } - end - end - end - - describe 'ci_sliding_list partitioning' do - let(:connection) { described_class.connection } - let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) } - - let(:partitioning_strategy) { described_class.partitioning_strategy } - - it { expect(partitioning_strategy.missing_partitions).to be_empty } - it { expect(partitioning_strategy.extra_partitions).to be_empty } - it { expect(partitioning_strategy.current_partitions).to include partitioning_strategy.initial_partition } - it { expect(partitioning_strategy.active_partition).to be_present } - end - - context 'loose foreign key on p_ci_runner_machine_builds.runner_machine_id' do # rubocop:disable RSpec/ContextWording - it_behaves_like 'cleanup by a loose foreign key' do - let!(:parent) { create(:ci_runner_machine) } - let!(:model) { create(:ci_runner_machine_build, runner_machine: parent) } - end - end - - describe '.for_build' do - subject(:for_build) { described_class.for_build(build_id) } - - context 'with valid build_id' do - let(:build_id) { build.id } - - it { is_expected.to contain_exactly(described_class.find_by_build_id(build_id)) } - end - - context 'with valid build_ids' do - let(:build2) { create(:ci_build, runner_machine: runner_machine) } - let(:build_id) { [build, build2] } - - it { is_expected.to eq(described_class.where(build_id: build_id)) } - end - - context 'with non-existeng build_id' do - let(:build_id) { non_existing_record_id } - - it { is_expected.to be_empty } - end - end - - describe '.pluck_runner_machine_id_and_build_id' do - subject { scope.pluck_build_id_and_runner_machine_id } - - context 'with default scope' do - let(:scope) { described_class } - - it { is_expected.to eq({ build.id => runner_machine.id }) } - end - - context 'with scope excluding build' do - let(:scope) { described_class.where(build_id: non_existing_record_id) } - - it { is_expected.to be_empty } - end - end -end diff --git a/spec/models/ci/runner_machine_spec.rb b/spec/models/ci/runner_machine_spec.rb deleted file mode 100644 index 0989477cd21..00000000000 --- a/spec/models/ci/runner_machine_spec.rb +++ /dev/null @@ -1,291 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model do - it_behaves_like 'having unique enum values' - - it_behaves_like 'it has loose foreign keys' do - let(:factory_name) { :ci_runner_machine } - end - - it { is_expected.to belong_to(:runner) } - it { is_expected.to belong_to(:runner_version).with_foreign_key(:version) } - it { is_expected.to have_many(:runner_machine_builds) } - it { is_expected.to have_many(:builds).through(:runner_machine_builds) } - - describe 'validation' do - it { is_expected.to validate_presence_of(:runner) } - it { is_expected.to validate_presence_of(:system_xid) } - it { is_expected.to validate_length_of(:system_xid).is_at_most(64) } - it { is_expected.to validate_length_of(:version).is_at_most(2048) } - it { is_expected.to validate_length_of(:revision).is_at_most(255) } - it { is_expected.to validate_length_of(:platform).is_at_most(255) } - it { is_expected.to validate_length_of(:architecture).is_at_most(255) } - it { is_expected.to validate_length_of(:ip_address).is_at_most(1024) } - - context 'when runner has config' do - it 'is valid' do - runner_machine = build(:ci_runner_machine, config: { gpus: "all" }) - - expect(runner_machine).to be_valid - end - end - - context 'when runner has an invalid config' do - it 'is invalid' do - runner_machine = build(:ci_runner_machine, config: { test: 1 }) - - expect(runner_machine).not_to be_valid - end - end - end - - describe '.stale', :freeze_time do - subject { described_class.stale.ids } - - let!(:runner_machine1) { create(:ci_runner_machine, :stale) } - let!(:runner_machine2) { create(:ci_runner_machine, :stale, contacted_at: nil) } - let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } - let!(:runner_machine4) { create(:ci_runner_machine, created_at: 5.days.ago) } - let!(:runner_machine5) do - create(:ci_runner_machine, created_at: (7.days - 1.second).ago, contacted_at: (7.days - 1.second).ago) - end - - it 'returns stale runner machines' do - is_expected.to match_array([runner_machine1.id, runner_machine2.id]) - end - end - - describe '.online_contact_time_deadline', :freeze_time do - subject { described_class.online_contact_time_deadline } - - it { is_expected.to eq(2.hours.ago) } - end - - describe '.stale_deadline', :freeze_time do - subject { described_class.stale_deadline } - - it { is_expected.to eq(7.days.ago) } - end - - describe '#status', :freeze_time do - let(:runner_machine) { build(:ci_runner_machine, created_at: 8.days.ago) } - - subject { runner_machine.status } - - context 'if never connected' do - before do - runner_machine.contacted_at = nil - end - - it { is_expected.to eq(:stale) } - - context 'if created recently' do - before do - runner_machine.created_at = 1.day.ago - end - - it { is_expected.to eq(:never_contacted) } - end - end - - context 'if contacted 1s ago' do - before do - runner_machine.contacted_at = 1.second.ago - end - - it { is_expected.to eq(:online) } - end - - context 'if contacted recently' do - before do - runner_machine.contacted_at = 2.hours.ago - end - - it { is_expected.to eq(:offline) } - end - - context 'if contacted long time ago' do - before do - runner_machine.contacted_at = 7.days.ago - end - - it { is_expected.to eq(:stale) } - end - end - - describe '#heartbeat', :freeze_time do - let(:runner_machine) { create(:ci_runner_machine, version: '15.0.0') } - let(:executor) { 'shell' } - let(:values) do - { - ip_address: '8.8.8.8', - architecture: '18-bit', - config: { gpus: "all" }, - executor: executor, - version: version - } - end - - subject(:heartbeat) do - runner_machine.heartbeat(values) - end - - context 'when database was updated recently' do - before do - runner_machine.contacted_at = Time.current - end - - context 'when version is changed' do - let(:version) { '15.0.1' } - - before do - allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version) - end - - it 'schedules version information update' do - heartbeat - - expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once - end - - it 'updates cache' do - expect_redis_update - - heartbeat - - expect(runner_machine.runner_version).to be_nil - end - - context 'when fetching runner releases is disabled' do - before do - stub_application_setting(update_runner_versions_enabled: false) - end - - it 'does not schedule version information update' do - heartbeat - - expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async) - end - end - end - - context 'with only ip_address specified' do - let(:values) do - { ip_address: '1.1.1.1' } - end - - it 'updates only ip_address' do - expect_redis_update(values.merge(contacted_at: Time.current)) - - heartbeat - end - - context 'with new version having been cached' do - let(:version) { '15.0.1' } - - before do - runner_machine.cache_attributes(version: version) - end - - it 'does not lose cached version value' do - expect { heartbeat }.not_to change { runner_machine.version }.from(version) - end - end - end - end - - context 'when database was not updated recently' do - before do - runner_machine.contacted_at = 2.hours.ago - - allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version) - end - - context 'when version is changed' do - let(:version) { '15.0.1' } - - context 'with invalid runner_machine' do - before do - runner_machine.runner = nil - end - - it 'still updates redis cache and database' do - expect(runner_machine).to be_invalid - - expect_redis_update - does_db_update - - expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async) - .with(version).once - end - end - - it 'updates redis cache and database' do - expect_redis_update - does_db_update - - expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async) - .with(version).once - end - end - - context 'with unchanged runner_machine version' do - let(:version) { runner_machine.version } - - it 'does not schedule ci_runner_versions update' do - heartbeat - - expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async) - end - - Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor| - context "with #{executor} executor" do - let(:executor) { executor } - - it 'updates with expected executor type' do - expect_redis_update - - heartbeat - - expect(runner_machine.reload.read_attribute(:executor_type)).to eq(expected_executor_type) - end - - def expected_executor_type - executor.gsub(/[+-]/, '_') - end - end - end - - context 'with an unknown executor type' do - let(:executor) { 'some-unknown-type' } - - it 'updates with unknown executor type' do - expect_redis_update - - heartbeat - - expect(runner_machine.reload.read_attribute(:executor_type)).to eq('unknown') - end - end - end - end - - def expect_redis_update(values = anything) - values_json = values == anything ? anything : Gitlab::Json.dump(values) - - Gitlab::Redis::Cache.with do |redis| - redis_key = runner_machine.send(:cache_attribute_key) - expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original - end - end - - def does_db_update - expect { heartbeat }.to change { runner_machine.reload.read_attribute(:contacted_at) } - .and change { runner_machine.reload.read_attribute(:architecture) } - .and change { runner_machine.reload.read_attribute(:config) } - .and change { runner_machine.reload.read_attribute(:executor_type) } - end - end -end diff --git a/spec/models/ci/runner_manager_build_spec.rb b/spec/models/ci/runner_manager_build_spec.rb new file mode 100644 index 00000000000..3a381313b76 --- /dev/null +++ b/spec/models/ci/runner_manager_build_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :runner_fleet do + let_it_be(:runner) { create(:ci_runner) } + let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) } + let_it_be(:build) { create(:ci_build, runner_manager: runner_manager) } + + it { is_expected.to belong_to(:build) } + it { is_expected.to belong_to(:runner_manager) } + + describe 'partitioning' do + context 'with build' do + let(:build) { FactoryBot.build(:ci_build, partition_id: ci_testing_partition_id) } + let(:runner_manager_build) { FactoryBot.build(:ci_runner_machine_build, build: build) } + + it 'sets partition_id to the current partition value' do + expect { runner_manager_build.valid? }.to change { runner_manager_build.partition_id } + .to(ci_testing_partition_id) + end + + context 'when it is already set' do + let(:runner_manager_build) { FactoryBot.build(:ci_runner_machine_build, partition_id: 125) } + + it 'does not change the partition_id value' do + expect { runner_manager_build.valid? }.not_to change { runner_manager_build.partition_id } + end + end + end + + context 'without build' do + let(:runner_manager_build) { FactoryBot.build(:ci_runner_machine_build, build: nil) } + + it { is_expected.to validate_presence_of(:partition_id) } + + it 'does not change the partition_id value' do + expect { runner_manager_build.valid? }.not_to change { runner_manager_build.partition_id } + end + end + end + + describe 'ci_sliding_list partitioning' do + let(:connection) { described_class.connection } + let(:partition_manager) { Gitlab::Database::Partitioning::PartitionManager.new(described_class) } + + let(:partitioning_strategy) { described_class.partitioning_strategy } + + it { expect(partitioning_strategy.missing_partitions).to be_empty } + it { expect(partitioning_strategy.extra_partitions).to be_empty } + it { expect(partitioning_strategy.current_partitions).to include partitioning_strategy.initial_partition } + it { expect(partitioning_strategy.active_partition).to be_present } + end + + context 'loose foreign key on p_ci_runner_manager_builds.runner_manager_id' do # rubocop:disable RSpec/ContextWording + it_behaves_like 'cleanup by a loose foreign key' do + let!(:parent) { create(:ci_runner_machine) } + let!(:model) { create(:ci_runner_machine_build, runner_manager: parent) } + end + end + + describe '.for_build' do + subject(:for_build) { described_class.for_build(build_id) } + + context 'with valid build_id' do + let(:build_id) { build.id } + + it { is_expected.to contain_exactly(described_class.find_by_build_id(build_id)) } + end + + context 'with valid build_ids' do + let(:build2) { create(:ci_build, runner_manager: runner_manager) } + let(:build_id) { [build, build2] } + + it { is_expected.to eq(described_class.where(build_id: build_id)) } + end + + context 'with non-existeng build_id' do + let(:build_id) { non_existing_record_id } + + it { is_expected.to be_empty } + end + end + + describe '.pluck_runner_manager_id_and_build_id' do + subject { scope.pluck_build_id_and_runner_manager_id } + + context 'with default scope' do + let(:scope) { described_class } + + it { is_expected.to eq({ build.id => runner_manager.id }) } + end + + context 'with scope excluding build' do + let(:scope) { described_class.where(build_id: non_existing_record_id) } + + it { is_expected.to be_empty } + end + end +end diff --git a/spec/models/ci/runner_manager_spec.rb b/spec/models/ci/runner_manager_spec.rb new file mode 100644 index 00000000000..d69c9ef845e --- /dev/null +++ b/spec/models/ci/runner_manager_spec.rb @@ -0,0 +1,291 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::RunnerManager, feature_category: :runner_fleet, type: :model do + it_behaves_like 'having unique enum values' + + it_behaves_like 'it has loose foreign keys' do + let(:factory_name) { :ci_runner_machine } + end + + it { is_expected.to belong_to(:runner) } + it { is_expected.to belong_to(:runner_version).with_foreign_key(:version) } + it { is_expected.to have_many(:runner_manager_builds) } + it { is_expected.to have_many(:builds).through(:runner_manager_builds) } + + describe 'validation' do + it { is_expected.to validate_presence_of(:runner) } + it { is_expected.to validate_presence_of(:system_xid) } + it { is_expected.to validate_length_of(:system_xid).is_at_most(64) } + it { is_expected.to validate_length_of(:version).is_at_most(2048) } + it { is_expected.to validate_length_of(:revision).is_at_most(255) } + it { is_expected.to validate_length_of(:platform).is_at_most(255) } + it { is_expected.to validate_length_of(:architecture).is_at_most(255) } + it { is_expected.to validate_length_of(:ip_address).is_at_most(1024) } + + context 'when runner has config' do + it 'is valid' do + runner_manager = build(:ci_runner_machine, config: { gpus: "all" }) + + expect(runner_manager).to be_valid + end + end + + context 'when runner has an invalid config' do + it 'is invalid' do + runner_manager = build(:ci_runner_machine, config: { test: 1 }) + + expect(runner_manager).not_to be_valid + end + end + end + + describe '.stale', :freeze_time do + subject { described_class.stale.ids } + + let!(:runner_manager1) { create(:ci_runner_machine, :stale) } + let!(:runner_manager2) { create(:ci_runner_machine, :stale, contacted_at: nil) } + let!(:runner_manager3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } + let!(:runner_manager4) { create(:ci_runner_machine, created_at: 5.days.ago) } + let!(:runner_manager5) do + create(:ci_runner_machine, created_at: (7.days - 1.second).ago, contacted_at: (7.days - 1.second).ago) + end + + it 'returns stale runner managers' do + is_expected.to match_array([runner_manager1.id, runner_manager2.id]) + end + end + + describe '.online_contact_time_deadline', :freeze_time do + subject { described_class.online_contact_time_deadline } + + it { is_expected.to eq(2.hours.ago) } + end + + describe '.stale_deadline', :freeze_time do + subject { described_class.stale_deadline } + + it { is_expected.to eq(7.days.ago) } + end + + describe '#status', :freeze_time do + let(:runner_manager) { build(:ci_runner_machine, created_at: 8.days.ago) } + + subject { runner_manager.status } + + context 'if never connected' do + before do + runner_manager.contacted_at = nil + end + + it { is_expected.to eq(:stale) } + + context 'if created recently' do + before do + runner_manager.created_at = 1.day.ago + end + + it { is_expected.to eq(:never_contacted) } + end + end + + context 'if contacted 1s ago' do + before do + runner_manager.contacted_at = 1.second.ago + end + + it { is_expected.to eq(:online) } + end + + context 'if contacted recently' do + before do + runner_manager.contacted_at = 2.hours.ago + end + + it { is_expected.to eq(:offline) } + end + + context 'if contacted long time ago' do + before do + runner_manager.contacted_at = 7.days.ago + end + + it { is_expected.to eq(:stale) } + end + end + + describe '#heartbeat', :freeze_time do + let(:runner_manager) { create(:ci_runner_machine, version: '15.0.0') } + let(:executor) { 'shell' } + let(:values) do + { + ip_address: '8.8.8.8', + architecture: '18-bit', + config: { gpus: "all" }, + executor: executor, + version: version + } + end + + subject(:heartbeat) do + runner_manager.heartbeat(values) + end + + context 'when database was updated recently' do + before do + runner_manager.contacted_at = Time.current + end + + context 'when version is changed' do + let(:version) { '15.0.1' } + + before do + allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version) + end + + it 'schedules version information update' do + heartbeat + + expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once + end + + it 'updates cache' do + expect_redis_update + + heartbeat + + expect(runner_manager.runner_version).to be_nil + end + + context 'when fetching runner releases is disabled' do + before do + stub_application_setting(update_runner_versions_enabled: false) + end + + it 'does not schedule version information update' do + heartbeat + + expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async) + end + end + end + + context 'with only ip_address specified' do + let(:values) do + { ip_address: '1.1.1.1' } + end + + it 'updates only ip_address' do + expect_redis_update(values.merge(contacted_at: Time.current)) + + heartbeat + end + + context 'with new version having been cached' do + let(:version) { '15.0.1' } + + before do + runner_manager.cache_attributes(version: version) + end + + it 'does not lose cached version value' do + expect { heartbeat }.not_to change { runner_manager.version }.from(version) + end + end + end + end + + context 'when database was not updated recently' do + before do + runner_manager.contacted_at = 2.hours.ago + + allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version) + end + + context 'when version is changed' do + let(:version) { '15.0.1' } + + context 'with invalid runner_manager' do + before do + runner_manager.runner = nil + end + + it 'still updates redis cache and database' do + expect(runner_manager).to be_invalid + + expect_redis_update + does_db_update + + expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async) + .with(version).once + end + end + + it 'updates redis cache and database' do + expect_redis_update + does_db_update + + expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async) + .with(version).once + end + end + + context 'with unchanged runner_manager version' do + let(:version) { runner_manager.version } + + it 'does not schedule ci_runner_versions update' do + heartbeat + + expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async) + end + + Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor| + context "with #{executor} executor" do + let(:executor) { executor } + + it 'updates with expected executor type' do + expect_redis_update + + heartbeat + + expect(runner_manager.reload.read_attribute(:executor_type)).to eq(expected_executor_type) + end + + def expected_executor_type + executor.gsub(/[+-]/, '_') + end + end + end + + context 'with an unknown executor type' do + let(:executor) { 'some-unknown-type' } + + it 'updates with unknown executor type' do + expect_redis_update + + heartbeat + + expect(runner_manager.reload.read_attribute(:executor_type)).to eq('unknown') + end + end + end + end + + def expect_redis_update(values = anything) + values_json = values == anything ? anything : Gitlab::Json.dump(values) + + Gitlab::Redis::Cache.with do |redis| + redis_key = runner_manager.send(:cache_attribute_key) + expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original + end + end + + def does_db_update + expect { heartbeat }.to change { runner_manager.reload.read_attribute(:contacted_at) } + .and change { runner_manager.reload.read_attribute(:architecture) } + .and change { runner_manager.reload.read_attribute(:config) } + .and change { runner_manager.reload.read_attribute(:executor_type) } + end + end +end diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb index fe49b2c2c7f..53a6a0e6a88 100644 --- a/spec/models/ci/runner_spec.rb +++ b/spec/models/ci/runner_spec.rb @@ -273,7 +273,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do end end - shared_examples '.belonging_to_parent_group_of_project' do + shared_examples '.belonging_to_parent_groups_of_project' do let_it_be(:group1) { create(:group) } let_it_be(:project1) { create(:project, group: group1) } let_it_be(:runner1) { create(:ci_runner, :group, groups: [group1]) } @@ -284,7 +284,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do let(:project_id) { project1.id } - subject(:result) { described_class.belonging_to_parent_group_of_project(project_id) } + subject(:result) { described_class.belonging_to_parent_groups_of_project(project_id) } it 'returns the group runner' do expect(result).to contain_exactly(runner1) @@ -310,7 +310,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do end context 'when use_traversal_ids* are enabled' do - it_behaves_like '.belonging_to_parent_group_of_project' + it_behaves_like '.belonging_to_parent_groups_of_project' end context 'when use_traversal_ids* are disabled' do @@ -322,7 +322,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do ) end - it_behaves_like '.belonging_to_parent_group_of_project' + it_behaves_like '.belonging_to_parent_groups_of_project' end context 'with instance runners sharing enabled' do diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb index 511d120ab7f..bce1f2a6c39 100644 --- a/spec/models/ci/runner_version_spec.rb +++ b/spec/models/ci/runner_version_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Ci::RunnerVersion, feature_category: :runner_fleet do create(:ci_runner_version, version: 'abc123', status: :unavailable) end - it { is_expected.to have_many(:runner_machines).with_foreign_key(:version) } + it { is_expected.to have_many(:runner_managers).with_foreign_key(:version) } it_behaves_like 'having unique enum values' diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb index 38ae908fb00..e526e90e250 100644 --- a/spec/models/ci/secure_file_spec.rb +++ b/spec/models/ci/secure_file_spec.rb @@ -144,8 +144,11 @@ RSpec.describe Ci::SecureFile do describe '#update_metadata!' do it 'assigns the expected metadata when a parsable .cer file is supplied' do - file = create(:ci_secure_file, name: 'file1.cer', - file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.cer'))) + file = create( + :ci_secure_file, + name: 'file1.cer', + file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.cer')) + ) file.update_metadata! file.reload @@ -157,8 +160,11 @@ RSpec.describe Ci::SecureFile do end it 'assigns the expected metadata when a parsable .p12 file is supplied' do - file = create(:ci_secure_file, name: 'file1.p12', - file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.p12'))) + file = create( + :ci_secure_file, + name: 'file1.p12', + file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.p12')) + ) file.update_metadata! file.reload @@ -170,10 +176,11 @@ RSpec.describe Ci::SecureFile do end it 'assigns the expected metadata when a parsable .mobileprovision file is supplied' do - file = create(:ci_secure_file, name: 'file1.mobileprovision', - file: CarrierWaveStringFile.new( - fixture_file('ci_secure_files/sample.mobileprovision') - )) + file = create( + :ci_secure_file, + name: 'file1.mobileprovision', + file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.mobileprovision')) + ) file.update_metadata! file.reload diff --git a/spec/models/ci/sources/pipeline_spec.rb b/spec/models/ci/sources/pipeline_spec.rb index 47f32353fef..036708ed61e 100644 --- a/spec/models/ci/sources/pipeline_spec.rb +++ b/spec/models/ci/sources/pipeline_spec.rb @@ -37,7 +37,7 @@ RSpec.describe Ci::Sources::Pipeline, feature_category: :continuous_integration end end - describe 'partitioning', :ci_partitioning do + describe 'partitioning', :ci_partitionable do include Ci::PartitioningHelpers let(:new_pipeline) { create(:ci_pipeline) } diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb index b392ab4ed11..79e92082ee1 100644 --- a/spec/models/ci/stage_spec.rb +++ b/spec/models/ci/stage_spec.rb @@ -223,10 +223,13 @@ RSpec.describe Ci::Stage, :models do with_them do before do statuses.each do |status| - create(:commit_status, project: stage.project, - pipeline: stage.pipeline, - stage_id: stage.id, - status: status) + create( + :commit_status, + project: stage.project, + pipeline: stage.pipeline, + stage_id: stage.id, + status: status + ) stage.update_legacy_status end @@ -239,11 +242,14 @@ RSpec.describe Ci::Stage, :models do context 'when stage has warnings' do before do - create(:ci_build, project: stage.project, - pipeline: stage.pipeline, - stage_id: stage.id, - status: :failed, - allow_failure: true) + create( + :ci_build, + project: stage.project, + pipeline: stage.pipeline, + stage_id: stage.id, + status: :failed, + allow_failure: true + ) stage.update_legacy_status end diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb index 7a313115965..85327dbeb34 100644 --- a/spec/models/ci/variable_spec.rb +++ b/spec/models/ci/variable_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::Variable, feature_category: :pipeline_composition do +RSpec.describe Ci::Variable, feature_category: :secrets_management do let_it_be_with_reload(:project) { create(:project) } subject { build(:ci_variable, project: project) } diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb index de67bdb32aa..df8ad861aff 100644 --- a/spec/models/clusters/agent_spec.rb +++ b/spec/models/clusters/agent_spec.rb @@ -8,10 +8,10 @@ RSpec.describe Clusters::Agent do it { is_expected.to belong_to(:created_by_user).class_name('User').optional } it { is_expected.to belong_to(:project).class_name('::Project') } it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken').order(Clusters::AgentToken.arel_table[:last_used_at].desc.nulls_last) } - it { is_expected.to have_many(:group_authorizations).class_name('Clusters::Agents::GroupAuthorization') } - it { is_expected.to have_many(:authorized_groups).through(:group_authorizations) } - it { is_expected.to have_many(:project_authorizations).class_name('Clusters::Agents::ProjectAuthorization') } - it { is_expected.to have_many(:authorized_projects).through(:project_authorizations).class_name('::Project') } + it { is_expected.to have_many(:ci_access_group_authorizations).class_name('Clusters::Agents::Authorizations::CiAccess::GroupAuthorization') } + it { is_expected.to have_many(:ci_access_authorized_groups).through(:ci_access_group_authorizations) } + it { is_expected.to have_many(:ci_access_project_authorizations).class_name('Clusters::Agents::Authorizations::CiAccess::ProjectAuthorization') } + it { is_expected.to have_many(:ci_access_authorized_projects).through(:ci_access_project_authorizations).class_name('::Project') } it { is_expected.to validate_presence_of(:name) } it { is_expected.to validate_length_of(:name).is_at_most(63) } diff --git a/spec/models/clusters/agents/authorizations/ci_access/group_authorization_spec.rb b/spec/models/clusters/agents/authorizations/ci_access/group_authorization_spec.rb new file mode 100644 index 00000000000..deabebde760 --- /dev/null +++ b/spec/models/clusters/agents/authorizations/ci_access/group_authorization_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::GroupAuthorization, feature_category: :deployment_management do + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } + it { is_expected.to belong_to(:group).class_name('::Group').required } + + it { expect(described_class).to validate_jsonb_schema(['config']) } + + describe '#config_project' do + let(:record) { create(:agent_ci_access_group_authorization) } + + it { expect(record.config_project).to eq(record.agent.project) } + end +end diff --git a/spec/models/clusters/agents/authorizations/ci_access/implicit_authorization_spec.rb b/spec/models/clusters/agents/authorizations/ci_access/implicit_authorization_spec.rb new file mode 100644 index 00000000000..427858c7529 --- /dev/null +++ b/spec/models/clusters/agents/authorizations/ci_access/implicit_authorization_spec.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization, feature_category: :deployment_management do + let_it_be(:agent) { create(:cluster_agent) } + + subject { described_class.new(agent: agent) } + + it { expect(subject.agent).to eq(agent) } + it { expect(subject.agent_id).to eq(agent.id) } + it { expect(subject.config_project).to eq(agent.project) } + it { expect(subject.config).to eq({}) } +end diff --git a/spec/models/clusters/agents/authorizations/ci_access/project_authorization_spec.rb b/spec/models/clusters/agents/authorizations/ci_access/project_authorization_spec.rb new file mode 100644 index 00000000000..fe5f3cb10ea --- /dev/null +++ b/spec/models/clusters/agents/authorizations/ci_access/project_authorization_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::ProjectAuthorization, feature_category: :deployment_management do + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } + it { is_expected.to belong_to(:project).class_name('Project').required } + + it { expect(described_class).to validate_jsonb_schema(['config']) } + + describe '#config_project' do + let(:record) { create(:agent_ci_access_project_authorization) } + + it { expect(record.config_project).to eq(record.agent.project) } + end +end diff --git a/spec/models/clusters/agents/authorizations/user_access/group_authorization_spec.rb b/spec/models/clusters/agents/authorizations/user_access/group_authorization_spec.rb new file mode 100644 index 00000000000..9ed4da474c0 --- /dev/null +++ b/spec/models/clusters/agents/authorizations/user_access/group_authorization_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::UserAccess::GroupAuthorization, feature_category: :deployment_management do + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } + it { is_expected.to belong_to(:group).class_name('::Group').required } + + it { expect(described_class).to validate_jsonb_schema(['config']) } + + describe '#config_project' do + let(:record) { create(:agent_user_access_group_authorization) } + + it { expect(record.config_project).to eq(record.agent.project) } + end +end diff --git a/spec/models/clusters/agents/authorizations/user_access/project_authorization_spec.rb b/spec/models/clusters/agents/authorizations/user_access/project_authorization_spec.rb new file mode 100644 index 00000000000..79b039684ad --- /dev/null +++ b/spec/models/clusters/agents/authorizations/user_access/project_authorization_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::UserAccess::ProjectAuthorization, feature_category: :deployment_management do + it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } + it { is_expected.to belong_to(:project).class_name('Project').required } + + it { expect(described_class).to validate_jsonb_schema(['config']) } + + describe '#config_project' do + let(:record) { create(:agent_user_access_project_authorization) } + + it { expect(record.config_project).to eq(record.agent.project) } + end +end diff --git a/spec/models/clusters/agents/group_authorization_spec.rb b/spec/models/clusters/agents/group_authorization_spec.rb deleted file mode 100644 index baeb8f5464e..00000000000 --- a/spec/models/clusters/agents/group_authorization_spec.rb +++ /dev/null @@ -1,16 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::GroupAuthorization do - it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } - it { is_expected.to belong_to(:group).class_name('::Group').required } - - it { expect(described_class).to validate_jsonb_schema(['config']) } - - describe '#config_project' do - let(:record) { create(:agent_group_authorization) } - - it { expect(record.config_project).to eq(record.agent.project) } - end -end diff --git a/spec/models/clusters/agents/implicit_authorization_spec.rb b/spec/models/clusters/agents/implicit_authorization_spec.rb deleted file mode 100644 index 1f4c5b1ac9e..00000000000 --- a/spec/models/clusters/agents/implicit_authorization_spec.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::ImplicitAuthorization do - let_it_be(:agent) { create(:cluster_agent) } - - subject { described_class.new(agent: agent) } - - it { expect(subject.agent).to eq(agent) } - it { expect(subject.agent_id).to eq(agent.id) } - it { expect(subject.config_project).to eq(agent.project) } - it { expect(subject.config).to eq({}) } -end diff --git a/spec/models/clusters/agents/project_authorization_spec.rb b/spec/models/clusters/agents/project_authorization_spec.rb deleted file mode 100644 index 9ba259356c7..00000000000 --- a/spec/models/clusters/agents/project_authorization_spec.rb +++ /dev/null @@ -1,16 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::ProjectAuthorization do - it { is_expected.to belong_to(:agent).class_name('Clusters::Agent').required } - it { is_expected.to belong_to(:project).class_name('Project').required } - - it { expect(described_class).to validate_jsonb_schema(['config']) } - - describe '#config_project' do - let(:record) { create(:agent_project_authorization) } - - it { expect(record.config_project).to eq(record.agent.project) } - end -end diff --git a/spec/models/clusters/applications/helm_spec.rb b/spec/models/clusters/applications/helm_spec.rb deleted file mode 100644 index 1b8be92475a..00000000000 --- a/spec/models/clusters/applications/helm_spec.rb +++ /dev/null @@ -1,116 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Applications::Helm do - include_examples 'cluster application core specs', :clusters_applications_helm - - describe 'default values' do - it { expect(subject.version).to eq(Gitlab::Kubernetes::Helm::V2::BaseCommand::HELM_VERSION) } - end - - describe '.available' do - subject { described_class.available } - - let!(:installed_cluster) { create(:clusters_applications_helm, :installed) } - let!(:updated_cluster) { create(:clusters_applications_helm, :updated) } - - before do - create(:clusters_applications_helm, :errored) - end - - it { is_expected.to contain_exactly(installed_cluster, updated_cluster) } - end - - describe '#can_uninstall?' do - subject(:application) { build(:clusters_applications_helm).can_uninstall? } - - it { is_expected.to eq true } - end - - describe '#issue_client_cert' do - let(:application) { create(:clusters_applications_helm) } - - subject { application.issue_client_cert } - - it 'returns a new cert' do - is_expected.to be_kind_of(Gitlab::Kubernetes::Helm::V2::Certificate) - expect(subject.cert_string).not_to eq(application.ca_cert) - expect(subject.key_string).not_to eq(application.ca_key) - end - end - - describe '#install_command' do - let(:helm) { create(:clusters_applications_helm) } - - subject { helm.install_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V2::InitCommand) } - - it 'is initialized with 1 arguments' do - expect(subject.name).to eq('helm') - end - - it 'has cert files' do - expect(subject.files[:'ca.pem']).to be_present - expect(subject.files[:'ca.pem']).to eq(helm.ca_cert) - - expect(subject.files[:'cert.pem']).to be_present - expect(subject.files[:'key.pem']).to be_present - - cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem']) - expect(cert.not_after).to be > 999.years.from_now - end - - describe 'rbac' do - context 'rbac cluster' do - it { expect(subject).to be_rbac } - end - - context 'non rbac cluster' do - before do - helm.cluster.platform_kubernetes.abac! - end - - it { expect(subject).not_to be_rbac } - end - end - end - - describe '#uninstall_command' do - let(:helm) { create(:clusters_applications_helm) } - - subject { helm.uninstall_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V2::ResetCommand) } - - it 'has name' do - expect(subject.name).to eq('helm') - end - - it 'has cert files' do - expect(subject.files[:'ca.pem']).to be_present - expect(subject.files[:'ca.pem']).to eq(helm.ca_cert) - - expect(subject.files[:'cert.pem']).to be_present - expect(subject.files[:'key.pem']).to be_present - - cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem']) - expect(cert.not_after).to be > 999.years.from_now - end - - describe 'rbac' do - context 'rbac cluster' do - it { expect(subject).to be_rbac } - end - - context 'non rbac cluster' do - before do - helm.cluster.platform_kubernetes.abac! - end - - it { expect(subject).not_to be_rbac } - end - end - end -end diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb deleted file mode 100644 index 2be59e5f515..00000000000 --- a/spec/models/clusters/applications/ingress_spec.rb +++ /dev/null @@ -1,180 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Applications::Ingress do - let(:ingress) { create(:clusters_applications_ingress) } - - before do - allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in) - allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async) - end - - it_behaves_like 'having unique enum values' - - include_examples 'cluster application core specs', :clusters_applications_ingress - include_examples 'cluster application status specs', :clusters_applications_ingress - include_examples 'cluster application version specs', :clusters_applications_ingress - include_examples 'cluster application helm specs', :clusters_applications_ingress - include_examples 'cluster application initial status specs' - - describe 'default values' do - it { expect(subject.ingress_type).to eq("nginx") } - it { expect(subject.version).to eq(described_class::VERSION) } - end - - describe '#can_uninstall?' do - subject { ingress.can_uninstall? } - - context 'with jupyter installed' do - before do - create(:clusters_applications_jupyter, :installed, cluster: ingress.cluster) - end - - it 'returns false if external_ip_or_hostname? is true' do - ingress.external_ip = 'IP' - - is_expected.to be_falsey - end - - it 'returns false if external_ip_or_hostname? is false' do - is_expected.to be_falsey - end - end - - context 'with jupyter installable' do - before do - create(:clusters_applications_jupyter, :installable, cluster: ingress.cluster) - end - - it 'returns true if external_ip_or_hostname? is true' do - ingress.external_ip = 'IP' - - is_expected.to be_truthy - end - - it 'returns false if external_ip_or_hostname? is false' do - is_expected.to be_falsey - end - end - - context 'with jupyter nil' do - it 'returns false if external_ip_or_hostname? is false' do - is_expected.to be_falsey - end - - context 'if external_ip_or_hostname? is true' do - context 'with IP' do - before do - ingress.external_ip = 'IP' - end - - it { is_expected.to be_truthy } - end - - context 'with hostname' do - before do - ingress.external_hostname = 'example.com' - end - - it { is_expected.to be_truthy } - end - end - end - end - - describe '#make_installed!' do - before do - application.make_installed! - end - - let(:application) { create(:clusters_applications_ingress, :installing) } - - it 'schedules a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_in) - .with(Clusters::Applications::Ingress::FETCH_IP_ADDRESS_DELAY, 'ingress', application.id) - end - end - - describe '#schedule_status_update' do - let(:application) { create(:clusters_applications_ingress, :installed) } - - before do - application.schedule_status_update - end - - it 'schedules a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_async) - .with('ingress', application.id) - end - - context 'when the application is not installed' do - let(:application) { create(:clusters_applications_ingress, :installing) } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_async) - end - end - - context 'when there is already an external_ip' do - let(:application) { create(:clusters_applications_ingress, :installed, external_ip: '111.222.222.111') } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in) - end - end - - context 'when there is already an external_hostname' do - let(:application) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in) - end - end - end - - describe '#install_command' do - subject { ingress.install_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) } - - it 'is initialized with ingress arguments' do - expect(subject.name).to eq('ingress') - expect(subject.chart).to eq('ingress/nginx-ingress') - expect(subject.version).to eq('1.40.2') - expect(subject).to be_rbac - expect(subject.files).to eq(ingress.files) - end - - context 'on a non rbac enabled cluster' do - before do - ingress.cluster.platform_kubernetes.abac! - end - - it { is_expected.not_to be_rbac } - end - - context 'application failed to install previously' do - let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') } - - it 'is initialized with the locked version' do - expect(subject.version).to eq('1.40.2') - end - end - end - - describe '#files' do - let(:application) { ingress } - let(:values) { subject[:'values.yaml'] } - - subject { application.files } - - it 'includes ingress valid keys in values' do - expect(values).to include('image') - expect(values).to include('repository') - expect(values).to include('stats') - expect(values).to include('podAnnotations') - expect(values).to include('clusterIP') - end - end -end diff --git a/spec/models/clusters/applications/jupyter_spec.rb b/spec/models/clusters/applications/jupyter_spec.rb deleted file mode 100644 index 9336d2352f8..00000000000 --- a/spec/models/clusters/applications/jupyter_spec.rb +++ /dev/null @@ -1,130 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Applications::Jupyter do - include_examples 'cluster application core specs', :clusters_applications_jupyter - include_examples 'cluster application status specs', :clusters_applications_jupyter - include_examples 'cluster application version specs', :clusters_applications_jupyter - include_examples 'cluster application helm specs', :clusters_applications_jupyter - - it { is_expected.to belong_to(:oauth_application) } - - describe 'default values' do - it { expect(subject.version).to eq(described_class::VERSION) } - end - - describe '#can_uninstall?' do - let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') } - let(:jupyter) { create(:clusters_applications_jupyter, cluster: ingress.cluster) } - - subject { jupyter.can_uninstall? } - - it { is_expected.to be_truthy } - end - - describe '#set_initial_status' do - before do - jupyter.set_initial_status - end - - context 'when ingress is not installed' do - let(:cluster) { create(:cluster, :provided_by_gcp) } - let(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) } - - it { expect(jupyter).to be_not_installable } - end - - context 'when ingress is installed and external_ip is assigned' do - let(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') } - let(:jupyter) { create(:clusters_applications_jupyter, cluster: ingress.cluster) } - - it { expect(jupyter).to be_installable } - end - - context 'when ingress is installed and external_hostname is assigned' do - let(:ingress) { create(:clusters_applications_ingress, :installed, external_hostname: 'localhost.localdomain') } - let(:jupyter) { create(:clusters_applications_jupyter, cluster: ingress.cluster) } - - it { expect(jupyter).to be_installable } - end - end - - describe '#install_command' do - let!(:ingress) { create(:clusters_applications_ingress, :installed, external_ip: '127.0.0.1') } - let!(:jupyter) { create(:clusters_applications_jupyter, cluster: ingress.cluster) } - - subject { jupyter.install_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) } - - it 'is initialized with 4 arguments' do - expect(subject.name).to eq('jupyter') - expect(subject.chart).to eq('jupyter/jupyterhub') - expect(subject.version).to eq('0.9.0') - - expect(subject).to be_rbac - expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/') - expect(subject.files).to eq(jupyter.files) - end - - context 'on a non rbac enabled cluster' do - before do - jupyter.cluster.platform_kubernetes.abac! - end - - it { is_expected.not_to be_rbac } - end - - context 'application failed to install previously' do - let(:jupyter) { create(:clusters_applications_jupyter, :errored, version: '0.0.1') } - - it 'is initialized with the locked version' do - expect(subject.version).to eq('0.9.0') - end - end - end - - describe '#files' do - let(:cluster) { create(:cluster, :with_installed_helm, :provided_by_gcp, :project) } - let(:application) { create(:clusters_applications_jupyter, cluster: cluster) } - let(:values) { subject[:'values.yaml'] } - - subject { application.files } - - context 'when cluster belongs to a project' do - it 'includes valid values' do - expect(values).to include('ingress') - expect(values).to include('hub') - expect(values).to include('proxy') - expect(values).to include('auth') - expect(values).to include('singleuser') - expect(values).to match(/clientId: '?#{application.oauth_application.uid}/) - expect(values).to match(/callbackUrl: '?#{application.callback_url}/) - expect(values).to include("gitlabProjectIdWhitelist:\n - #{application.cluster.project.id}") - expect(values).to include("c.GitLabOAuthenticator.scope = ['api read_repository write_repository']") - expect(values).to match(/GITLAB_HOST: '?#{Gitlab.config.gitlab.host}/) - expect(values).to match(/GITLAB_CLUSTER_ID: '?#{application.cluster.id}/) - end - end - - context 'when cluster belongs to a group' do - let(:group) { create(:group) } - let(:cluster) { create(:cluster, :with_installed_helm, :provided_by_gcp, :group, groups: [group]) } - - it 'includes valid values' do - expect(values).to include('ingress') - expect(values).to include('hub') - expect(values).to include('proxy') - expect(values).to include('auth') - expect(values).to include('singleuser') - expect(values).to match(/clientId: '?#{application.oauth_application.uid}/) - expect(values).to match(/callbackUrl: '?#{application.callback_url}/) - expect(values).to include("gitlabGroupWhitelist:\n - #{group.to_param}") - expect(values).to include("c.GitLabOAuthenticator.scope = ['api read_repository write_repository']") - expect(values).to match(/GITLAB_HOST: '?#{Gitlab.config.gitlab.host}/) - expect(values).to match(/GITLAB_CLUSTER_ID: '?#{application.cluster.id}/) - end - end - end -end diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb deleted file mode 100644 index 91e90de02c0..00000000000 --- a/spec/models/clusters/applications/knative_spec.rb +++ /dev/null @@ -1,235 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Applications::Knative do - let(:knative) { create(:clusters_applications_knative) } - - before do - allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in) - allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async) - allow(ClusterConfigureIstioWorker).to receive(:perform_async) - end - - include_examples 'cluster application core specs', :clusters_applications_knative - include_examples 'cluster application status specs', :clusters_applications_knative - include_examples 'cluster application helm specs', :clusters_applications_knative - include_examples 'cluster application version specs', :clusters_applications_knative - include_examples 'cluster application initial status specs' - - describe 'default values' do - it { expect(subject.version).to eq(described_class::VERSION) } - end - - describe 'when cloud run is enabled' do - let(:cluster) { create(:cluster, :provided_by_gcp, :cloud_run_enabled) } - let(:knative_cloud_run) { create(:clusters_applications_knative, cluster: cluster) } - - it { expect(knative_cloud_run).to be_not_installable } - end - - describe 'when rbac is not enabled' do - let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled) } - let(:knative_no_rbac) { create(:clusters_applications_knative, cluster: cluster) } - - it { expect(knative_no_rbac).to be_not_installable } - end - - describe 'make_installed with external_ip' do - before do - application.make_installed! - end - - let(:application) { create(:clusters_applications_knative, :installing) } - - it 'schedules a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_in) - .with(Clusters::Applications::Knative::FETCH_IP_ADDRESS_DELAY, 'knative', application.id) - end - end - - describe 'configuring istio ingress gateway' do - context 'after installed' do - let(:application) { create(:clusters_applications_knative, :installing) } - - before do - application.make_installed! - end - - it 'schedules a ClusterConfigureIstioWorker' do - expect(ClusterConfigureIstioWorker).to have_received(:perform_async).with(application.cluster_id) - end - end - - context 'after updated' do - let(:application) { create(:clusters_applications_knative, :updating) } - - before do - application.make_installed! - end - - it 'schedules a ClusterConfigureIstioWorker' do - expect(ClusterConfigureIstioWorker).to have_received(:perform_async).with(application.cluster_id) - end - end - end - - describe '#can_uninstall?' do - subject { knative.can_uninstall? } - - it { is_expected.to be_truthy } - end - - describe '#schedule_status_update with external_ip' do - let(:application) { create(:clusters_applications_knative, :installed) } - - before do - application.schedule_status_update - end - - it 'schedules a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).to have_received(:perform_async) - .with('knative', application.id) - end - - context 'when the application is not installed' do - let(:application) { create(:clusters_applications_knative, :installing) } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_async) - end - end - - context 'when there is already an external_ip' do - let(:application) { create(:clusters_applications_knative, :installed, external_ip: '111.222.222.111') } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in) - end - end - - context 'when there is already an external_hostname' do - let(:application) { create(:clusters_applications_knative, :installed, external_hostname: 'localhost.localdomain') } - - it 'does not schedule a ClusterWaitForIngressIpAddressWorker' do - expect(ClusterWaitForIngressIpAddressWorker).not_to have_received(:perform_in) - end - end - end - - shared_examples 'a command' do - it 'is an instance of Helm::InstallCommand' do - expect(subject).to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) - end - - it 'is initialized with knative arguments' do - expect(subject.name).to eq('knative') - expect(subject.chart).to eq('knative/knative') - expect(subject.files).to eq(knative.files) - end - - it 'does not install metrics for prometheus' do - expect(subject.postinstall).to be_empty - end - end - - describe '#install_command' do - subject { knative.install_command } - - it 'is initialized with latest version' do - expect(subject.version).to eq('0.10.0') - end - - it_behaves_like 'a command' - end - - describe '#update_command' do - let!(:current_installed_version) { knative.version = '0.1.0' } - - subject { knative.update_command } - - it 'is initialized with current version' do - expect(subject.version).to eq(current_installed_version) - end - - it_behaves_like 'a command' - end - - describe '#uninstall_command' do - subject { knative.uninstall_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand) } - - it "removes knative deployed services before uninstallation" do - 2.times do |i| - cluster_project = create(:cluster_project, cluster: knative.cluster) - - create(:cluster_kubernetes_namespace, - cluster: cluster_project.cluster, - cluster_project: cluster_project, - project: cluster_project.project, - namespace: "namespace_#{i}") - end - - remove_namespaced_services_script = [ - "kubectl delete ksvc --all -n #{knative.cluster.kubernetes_namespaces.first.namespace}", - "kubectl delete ksvc --all -n #{knative.cluster.kubernetes_namespaces.second.namespace}" - ] - - expect(subject.predelete).to match_array(remove_namespaced_services_script) - end - - it "initializes command with all necessary postdelete script" do - api_groups = YAML.safe_load(File.read(Rails.root.join(Clusters::Applications::Knative::API_GROUPS_PATH))) - - remove_knative_istio_leftovers_script = [ - "kubectl delete --ignore-not-found ns knative-serving", - "kubectl delete --ignore-not-found ns knative-build" - ] - - full_delete_commands_size = api_groups.size + remove_knative_istio_leftovers_script.size - - expect(subject.postdelete).to include(*remove_knative_istio_leftovers_script) - expect(subject.postdelete.size).to eq(full_delete_commands_size) - expect(subject.postdelete[2]).to include("kubectl api-resources -o name --api-group #{api_groups[0]} | xargs -r kubectl delete --ignore-not-found crd") - expect(subject.postdelete[3]).to include("kubectl api-resources -o name --api-group #{api_groups[1]} | xargs -r kubectl delete --ignore-not-found crd") - end - end - - describe '#files' do - let(:application) { knative } - let(:values) { subject[:'values.yaml'] } - - subject { application.files } - - it 'includes knative specific keys in the values.yaml file' do - expect(values).to include('domain') - end - end - - describe 'validations' do - it { is_expected.to validate_presence_of(:hostname) } - end - - describe '#available_domains' do - let!(:domain) { create(:pages_domain, :instance_serverless) } - - it 'returns all instance serverless domains' do - expect(PagesDomain).to receive(:instance_serverless).and_call_original - - domains = subject.available_domains - - expect(domains.length).to eq(1) - expect(domains).to include(domain) - end - end - - describe '#find_available_domain' do - let!(:domain) { create(:pages_domain, :instance_serverless) } - - it 'returns the domain scoped to available domains' do - expect(subject).to receive(:available_domains).and_call_original - expect(subject.find_available_domain(domain.id)).to eq(domain) - end - end -end diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb deleted file mode 100644 index 04b5ae9641d..00000000000 --- a/spec/models/clusters/applications/runner_spec.rb +++ /dev/null @@ -1,127 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Applications::Runner do - let(:ci_runner) { create(:ci_runner) } - - include_examples 'cluster application core specs', :clusters_applications_runner - include_examples 'cluster application status specs', :clusters_applications_runner - include_examples 'cluster application version specs', :clusters_applications_runner - include_examples 'cluster application helm specs', :clusters_applications_runner - include_examples 'cluster application initial status specs' - - it { is_expected.to belong_to(:runner) } - - describe 'default values' do - it { expect(subject.version).to eq(described_class::VERSION) } - end - - describe '#can_uninstall?' do - let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } - - subject { gitlab_runner.can_uninstall? } - - it { is_expected.to be_truthy } - end - - describe '#install_command' do - let(:kubeclient) { double('kubernetes client') } - let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } - - subject { gitlab_runner.install_command } - - it { is_expected.to be_an_instance_of(Gitlab::Kubernetes::Helm::V3::InstallCommand) } - - it 'is initialized with 4 arguments' do - expect(subject.name).to eq('runner') - expect(subject.chart).to eq('runner/gitlab-runner') - expect(subject.version).to eq(Clusters::Applications::Runner::VERSION) - expect(subject).to be_rbac - expect(subject.repository).to eq('https://charts.gitlab.io') - expect(subject.files).to eq(gitlab_runner.files) - end - - context 'on a non rbac enabled cluster' do - before do - gitlab_runner.cluster.platform_kubernetes.abac! - end - - it { is_expected.not_to be_rbac } - end - - context 'application failed to install previously' do - let(:gitlab_runner) { create(:clusters_applications_runner, :errored, runner: ci_runner, version: '0.1.13') } - - it 'is initialized with the locked version' do - expect(subject.version).to eq(Clusters::Applications::Runner::VERSION) - end - end - end - - describe '#files' do - let(:application) { create(:clusters_applications_runner, runner: ci_runner) } - let(:values) { subject[:'values.yaml'] } - - subject { application.files } - - it 'includes runner valid values' do - expect(values).to include('concurrent') - expect(values).to include('checkInterval') - expect(values).to include('rbac') - expect(values).to include('runners') - expect(values).to include('privileged: true') - expect(values).to include('image: ubuntu:16.04') - expect(values).to include('resources') - expect(values).to match(/gitlabUrl: ['"]?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/) - end - - context 'with duplicated values on vendor/runner/values.yaml' do - let(:stub_values) do - { - "concurrent" => 4, - "checkInterval" => 3, - "rbac" => { - "create" => false - }, - "clusterWideAccess" => false, - "runners" => { - "privileged" => false, - "image" => "ubuntu:16.04", - "builds" => {}, - "services" => {}, - "helpers" => {} - } - } - end - - before do - allow(application).to receive(:chart_values).and_return(stub_values) - end - - it 'overwrites values.yaml' do - expect(values).to match(/privileged: '?#{application.privileged}/) - end - end - end - - describe '#make_uninstalling!' do - subject { create(:clusters_applications_runner, :scheduled, runner: ci_runner) } - - it 'calls prepare_uninstall' do - expect_next_instance_of(described_class) do |instance| - expect(instance).to receive(:prepare_uninstall).and_call_original - end - - subject.make_uninstalling! - end - end - - describe '#post_uninstall' do - it 'destroys its runner' do - application_runner = create(:clusters_applications_runner, :scheduled, runner: ci_runner) - - expect { application_runner.post_uninstall }.to change { Ci::Runner.count }.by(-1) - end - end -end diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb index 2d46714eb22..189affa3c88 100644 --- a/spec/models/clusters/cluster_spec.rb +++ b/spec/models/clusters/cluster_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching, -feature_category: :kubernetes_management do +feature_category: :deployment_management do include ReactiveCachingHelpers include KubernetesHelpers @@ -23,9 +23,6 @@ feature_category: :kubernetes_management do it { is_expected.to have_one(:provider_aws) } it { is_expected.to have_one(:platform_kubernetes) } it { is_expected.to have_one(:integration_prometheus) } - it { is_expected.to have_one(:application_helm) } - it { is_expected.to have_one(:application_ingress) } - it { is_expected.to have_one(:application_runner) } it { is_expected.to have_many(:kubernetes_namespaces) } it { is_expected.to have_one(:cluster_project) } it { is_expected.to have_many(:deployment_clusters) } @@ -35,8 +32,6 @@ feature_category: :kubernetes_management do it { is_expected.to delegate_method(:status).to(:provider) } it { is_expected.to delegate_method(:status_reason).to(:provider) } - it { is_expected.to delegate_method(:external_ip).to(:application_ingress).with_prefix } - it { is_expected.to delegate_method(:external_hostname).to(:application_ingress).with_prefix } it { is_expected.to respond_to :project } it { is_expected.to be_namespace_per_environment } @@ -49,15 +44,6 @@ feature_category: :kubernetes_management do let(:factory_name) { :cluster } end - describe 'applications have inverse_of: :cluster option' do - let(:cluster) { create(:cluster) } - let!(:helm) { create(:clusters_applications_helm, cluster: cluster) } - - it 'does not do a third query when referencing cluster again' do - expect { cluster.application_helm.cluster }.not_to exceed_query_limit(2) - end - end - describe '.enabled' do subject { described_class.enabled } @@ -692,102 +678,6 @@ feature_category: :kubernetes_management do end end - describe '.with_persisted_applications' do - let(:cluster) { create(:cluster) } - let!(:helm) { create(:clusters_applications_helm, :installed, cluster: cluster) } - - it 'preloads persisted applications' do - query_rec = ActiveRecord::QueryRecorder.new do - described_class.with_persisted_applications.find_by_id(cluster.id).application_helm - end - - expect(query_rec.count).to eq(1) - end - end - - describe '#persisted_applications' do - let(:cluster) { create(:cluster) } - - subject { cluster.persisted_applications } - - context 'when all applications are created' do - let!(:helm) { create(:clusters_applications_helm, cluster: cluster) } - let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) } - let!(:runner) { create(:clusters_applications_runner, cluster: cluster) } - let!(:jupyter) { create(:clusters_applications_jupyter, cluster: cluster) } - let!(:knative) { create(:clusters_applications_knative, cluster: cluster) } - - it 'returns a list of created applications' do - is_expected.to contain_exactly(helm, ingress, runner, jupyter, knative) - end - end - - context 'when not all were created' do - let!(:helm) { create(:clusters_applications_helm, cluster: cluster) } - let!(:ingress) { create(:clusters_applications_ingress, cluster: cluster) } - - it 'returns a list of created applications' do - is_expected.to contain_exactly(helm, ingress) - end - end - end - - describe '#applications' do - let_it_be(:cluster, reload: true) { create(:cluster) } - - subject { cluster.applications } - - context 'when none of applications are created' do - it 'returns a list of a new objects' do - is_expected.not_to be_empty - end - end - - context 'when applications are created' do - let(:cluster) { create(:cluster, :with_all_applications) } - - it 'returns a list of created applications', :aggregate_failures do - is_expected.to have_attributes(size: described_class::APPLICATIONS.size) - is_expected.to all(be_kind_of(::Clusters::Concerns::ApplicationCore)) - is_expected.to all(be_persisted) - end - end - end - - describe '#find_or_build_application' do - let_it_be(:cluster, reload: true) { create(:cluster) } - - it 'rejects classes that are not applications' do - expect do - cluster.find_or_build_application(Project) - end.to raise_error(ArgumentError) - end - - context 'when none of applications are created' do - it 'returns the new application', :aggregate_failures do - described_class::APPLICATIONS.values.each do |application_class| - application = cluster.find_or_build_application(application_class) - - expect(application).to be_a(application_class) - expect(application).not_to be_persisted - end - end - end - - context 'when application is persisted' do - let(:cluster) { create(:cluster, :with_all_applications) } - - it 'returns the persisted application', :aggregate_failures do - described_class::APPLICATIONS.each_value do |application_class| - application = cluster.find_or_build_application(application_class) - - expect(application).to be_kind_of(::Clusters::Concerns::ApplicationCore) - expect(application).to be_persisted - end - end - end - end - describe '#allow_user_defined_namespace?' do subject { cluster.allow_user_defined_namespace? } @@ -837,7 +727,7 @@ feature_category: :kubernetes_management do describe '#all_projects' do context 'cluster_type is project_type' do let(:project) { create(:project) } - let(:cluster) { create(:cluster, :with_installed_helm, projects: [project]) } + let(:cluster) { create(:cluster, projects: [project]) } it 'returns projects' do expect(cluster.all_projects).to match_array [project] @@ -847,7 +737,7 @@ feature_category: :kubernetes_management do context 'cluster_type is group_type' do let(:group) { create(:group) } let!(:project) { create(:project, group: group) } - let(:cluster) { create(:cluster_for_group, :with_installed_helm, groups: [group]) } + let(:cluster) { create(:cluster_for_group, groups: [group]) } it 'returns group projects' do expect(cluster.all_projects.ids).to match_array [project.id] @@ -1426,36 +1316,6 @@ feature_category: :kubernetes_management do end end - describe '#knative_pre_installed?' do - subject(:knative_pre_installed?) { cluster.knative_pre_installed? } - - before do - allow(cluster).to receive(:provider).and_return(provider) - end - - context 'without provider' do - let(:provider) {} - - it { is_expected.to eq(false) } - end - - context 'with provider' do - let(:provider) { instance_double(Clusters::Providers::Aws, knative_pre_installed?: knative_pre_installed?) } - - context 'with knative_pre_installed? set to true' do - let(:knative_pre_installed?) { true } - - it { is_expected.to eq(true) } - end - - context 'with knative_pre_installed? set to false' do - let(:knative_pre_installed?) { false } - - it { is_expected.to eq(false) } - end - end - end - describe '#platform_kubernetes_active?' do subject(:platform_kubernetes_active?) { cluster.platform_kubernetes_active? } @@ -1515,94 +1375,4 @@ feature_category: :kubernetes_management do end end end - - describe '#application_helm_available?' do - subject(:application_helm_available?) { cluster.application_helm_available? } - - before do - allow(cluster).to receive(:application_helm).and_return(application_helm) - end - - context 'without application_helm' do - let(:application_helm) {} - - it { is_expected.to eq(false) } - end - - context 'with application_helm' do - let(:application_helm) { instance_double(Clusters::Applications::Helm, available?: available?) } - - context 'with available? set to true' do - let(:available?) { true } - - it { is_expected.to eq(true) } - end - - context 'with available? set to false' do - let(:available?) { false } - - it { is_expected.to eq(false) } - end - end - end - - describe '#application_ingress_available?' do - subject(:application_ingress_available?) { cluster.application_ingress_available? } - - before do - allow(cluster).to receive(:application_ingress).and_return(application_ingress) - end - - context 'without application_ingress' do - let(:application_ingress) {} - - it { is_expected.to eq(false) } - end - - context 'with application_ingress' do - let(:application_ingress) { instance_double(Clusters::Applications::Ingress, available?: available?) } - - context 'with available? set to true' do - let(:available?) { true } - - it { is_expected.to eq(true) } - end - - context 'with available? set to false' do - let(:available?) { false } - - it { is_expected.to eq(false) } - end - end - end - - describe '#application_knative_available?' do - subject(:application_knative_available?) { cluster.application_knative_available? } - - before do - allow(cluster).to receive(:application_knative).and_return(application_knative) - end - - context 'without application_knative' do - let(:application_knative) {} - - it { is_expected.to eq(false) } - end - - context 'with application_knative' do - let(:application_knative) { instance_double(Clusters::Applications::Knative, available?: available?) } - - context 'with available? set to true' do - let(:available?) { true } - - it { is_expected.to eq(true) } - end - - context 'with available? set to false' do - let(:available?) { false } - - it { is_expected.to eq(false) } - end - end - end end diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb index d6d1105cdb1..f7ab0ae067c 100644 --- a/spec/models/clusters/integrations/prometheus_spec.rb +++ b/spec/models/clusters/integrations/prometheus_spec.rb @@ -28,7 +28,7 @@ RSpec.describe Clusters::Integrations::Prometheus do describe 'after_destroy' do subject(:integration) { create(:clusters_integrations_prometheus, cluster: cluster, enabled: true) } - let(:cluster) { create(:cluster, :with_installed_helm) } + let(:cluster) { create(:cluster) } it 'deactivates prometheus_integration' do expect(Clusters::Applications::DeactivateIntegrationWorker) @@ -41,7 +41,7 @@ RSpec.describe Clusters::Integrations::Prometheus do describe 'after_save' do subject(:integration) { create(:clusters_integrations_prometheus, cluster: cluster, enabled: enabled) } - let(:cluster) { create(:cluster, :with_installed_helm) } + let(:cluster) { create(:cluster) } let(:enabled) { true } context 'when no change to enabled status' do diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb index dc8429fe77e..2206ed7bfe8 100644 --- a/spec/models/compare_spec.rb +++ b/spec/models/compare_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Compare do +RSpec.describe Compare, feature_category: :source_code_management do include RepoHelpers let(:project) { create(:project, :public, :repository) } @@ -10,10 +10,11 @@ RSpec.describe Compare do let(:start_commit) { sample_image_commit } let(:head_commit) { sample_commit } + let(:straight) { false } let(:raw_compare) { Gitlab::Git::Compare.new(project.repository.raw_repository, start_commit.id, head_commit.id) } - subject(:compare) { described_class.new(raw_compare, project) } + subject(:compare) { described_class.new(raw_compare, project, straight: straight) } describe '#cache_key' do subject { compare.cache_key } @@ -147,4 +148,33 @@ RSpec.describe Compare do end end end + + describe '#to_param' do + subject { compare.to_param } + + let(:start_commit) { another_sample_commit } + let(:base_commit) { head_commit } + + it 'returns the range between base and head commits' do + is_expected.to eq(from: base_commit.id, to: head_commit.id) + end + + context 'when straight mode is on' do + let(:straight) { true } + + it 'returns the range between start and head commits' do + is_expected.to eq(from: start_commit.id, to: head_commit.id) + end + end + + context 'when there are no merge base between commits' do + before do + allow(project).to receive(:merge_base_commit).and_return(nil) + end + + it 'returns the range between start and head commits' do + is_expected.to eq(from: start_commit.id, to: head_commit.id) + end + end + end end diff --git a/spec/models/concerns/awareness_spec.rb b/spec/models/concerns/awareness_spec.rb deleted file mode 100644 index 67acacc7bb1..00000000000 --- a/spec/models/concerns/awareness_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Awareness, :clean_gitlab_redis_shared_state do - subject { create(:user) } - - let(:session) { AwarenessSession.for(1) } - - describe "when joining a session" do - it "increases the number of sessions" do - expect { subject.join(session) } - .to change { subject.session_ids.size } - .by(1) - end - end - - describe "when leaving session" do - it "decreases the number of sessions" do - subject.join(session) - - expect { subject.leave(session) } - .to change { subject.session_ids.size } - .by(-1) - end - end - - describe "when joining multiple sessions" do - let(:session2) { AwarenessSession.for(2) } - - it "increases number of active sessions for user" do - expect do - subject.join(session) - subject.join(session2) - end.to change { subject.session_ids.size } - .by(2) - end - end -end diff --git a/spec/models/concerns/ci/maskable_spec.rb b/spec/models/concerns/ci/maskable_spec.rb index 12157867062..6e648a39f8f 100644 --- a/spec/models/concerns/ci/maskable_spec.rb +++ b/spec/models/concerns/ci/maskable_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::Maskable, feature_category: :pipeline_composition do +RSpec.describe Ci::Maskable, feature_category: :secrets_management do let(:variable) { build(:ci_variable) } describe 'masked value validations' do diff --git a/spec/models/concerns/ci/partitionable/switch_spec.rb b/spec/models/concerns/ci/partitionable/switch_spec.rb index d955ad223f8..551ae111fa4 100644 --- a/spec/models/concerns/ci/partitionable/switch_spec.rb +++ b/spec/models/concerns/ci/partitionable/switch_spec.rb @@ -59,13 +59,14 @@ RSpec.describe Ci::Partitionable::Switch, :aggregate_failures do model.include(Ci::Partitionable) model.partitionable scope: ->(r) { 1 }, - through: { table: :_test_p_ci_jobs_metadata, flag: table_rollout_flag } + through: { table: :_test_p_ci_jobs_metadata, flag: table_rollout_flag } model.belongs_to :job, anonymous_class: jobs_model - jobs_model.has_one :metadata, anonymous_class: model, - foreign_key: :job_id, inverse_of: :job, - dependent: :destroy + jobs_model.has_one :metadata, + anonymous_class: model, + foreign_key: :job_id, inverse_of: :job, + dependent: :destroy allow(Feature::Definition).to receive(:get).and_call_original allow(Feature::Definition).to receive(:get).with(table_rollout_flag) diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb index 5100f20ed25..d41654e547e 100644 --- a/spec/models/concerns/ci/partitionable_spec.rb +++ b/spec/models/concerns/ci/partitionable_spec.rb @@ -31,7 +31,7 @@ RSpec.describe Ci::Partitionable do ci_model.include(described_class) ci_model.partitionable scope: ->(r) { 1 }, - through: { table: :_test_table_name, flag: :some_flag } + through: { table: :_test_table_name, flag: :some_flag } end it { expect(ci_model.routing_table_name).to eq(:_test_table_name) } diff --git a/spec/models/concerns/ci/track_environment_usage_spec.rb b/spec/models/concerns/ci/track_environment_usage_spec.rb index d75972c49b5..ad89973eee5 100644 --- a/spec/models/concerns/ci/track_environment_usage_spec.rb +++ b/spec/models/concerns/ci/track_environment_usage_spec.rb @@ -8,10 +8,12 @@ RSpec.describe Ci::TrackEnvironmentUsage do context 'when build is the verify action for the environment' do let(:build) do - build_stubbed(:ci_build, - ref: 'master', - environment: 'staging', - options: { environment: { action: 'verify' } }) + build_stubbed( + :ci_build, + ref: 'master', + environment: 'staging', + options: { environment: { action: 'verify' } } + ) end it { is_expected.to be_truthy } @@ -19,10 +21,12 @@ RSpec.describe Ci::TrackEnvironmentUsage do context 'when build is not the verify action for the environment' do let(:build) do - build_stubbed(:ci_build, - ref: 'master', - environment: 'staging', - options: { environment: { action: 'start' } }) + build_stubbed( + :ci_build, + ref: 'master', + environment: 'staging', + options: { environment: { action: 'start' } } + ) end it { is_expected.to be_falsey } diff --git a/spec/models/concerns/clusters/agents/authorization_config_scopes_spec.rb b/spec/models/concerns/clusters/agents/authorization_config_scopes_spec.rb deleted file mode 100644 index a4d1a33b3d5..00000000000 --- a/spec/models/concerns/clusters/agents/authorization_config_scopes_spec.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::AuthorizationConfigScopes do - describe '.with_available_ci_access_fields' do - let(:project) { create(:project) } - - let!(:agent_authorization_0) { create(:agent_project_authorization, project: project) } - let!(:agent_authorization_1) { create(:agent_project_authorization, project: project, config: { access_as: {} }) } - let!(:agent_authorization_2) { create(:agent_project_authorization, project: project, config: { access_as: { agent: {} } }) } - let!(:impersonate_authorization) { create(:agent_project_authorization, project: project, config: { access_as: { impersonate: {} } }) } - let!(:ci_user_authorization) { create(:agent_project_authorization, project: project, config: { access_as: { ci_user: {} } }) } - let!(:ci_job_authorization) { create(:agent_project_authorization, project: project, config: { access_as: { ci_job: {} } }) } - let!(:unexpected_authorization) { create(:agent_project_authorization, project: project, config: { access_as: { unexpected: {} } }) } - - subject { Clusters::Agents::ProjectAuthorization.with_available_ci_access_fields(project) } - - it { is_expected.to contain_exactly(agent_authorization_0, agent_authorization_1, agent_authorization_2) } - end -end diff --git a/spec/models/concerns/clusters/agents/authorizations/ci_access/config_scopes_spec.rb b/spec/models/concerns/clusters/agents/authorizations/ci_access/config_scopes_spec.rb new file mode 100644 index 00000000000..5c69ede11fc --- /dev/null +++ b/spec/models/concerns/clusters/agents/authorizations/ci_access/config_scopes_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::ConfigScopes, feature_category: :deployment_management do + describe '.with_available_ci_access_fields' do + let(:project) { create(:project) } + + let!(:agent_authorization_0) { create(:agent_ci_access_project_authorization, project: project) } + let!(:agent_authorization_1) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: {} }) } + let!(:agent_authorization_2) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: { agent: {} } }) } + let!(:impersonate_authorization) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: { impersonate: {} } }) } + let!(:ci_user_authorization) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: { ci_user: {} } }) } + let!(:ci_job_authorization) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: { ci_job: {} } }) } + let!(:unexpected_authorization) { create(:agent_ci_access_project_authorization, project: project, config: { access_as: { unexpected: {} } }) } + + subject { Clusters::Agents::Authorizations::CiAccess::ProjectAuthorization.with_available_ci_access_fields(project) } + + it { is_expected.to contain_exactly(agent_authorization_0, agent_authorization_1, agent_authorization_2) } + end +end diff --git a/spec/models/concerns/database_event_tracking_spec.rb b/spec/models/concerns/database_event_tracking_spec.rb index 976462b4174..cad82f971b3 100644 --- a/spec/models/concerns/database_event_tracking_spec.rb +++ b/spec/models/concerns/database_event_tracking_spec.rb @@ -3,6 +3,10 @@ require 'spec_helper' RSpec.describe DatabaseEventTracking, :snowplow do + before do + allow(Gitlab::Tracking).to receive(:database_event).and_call_original + end + let(:test_class) do Class.new(ActiveRecord::Base) do include DatabaseEventTracking @@ -17,7 +21,7 @@ RSpec.describe DatabaseEventTracking, :snowplow do context 'if event emmiter failed' do before do - allow(Gitlab::Tracking).to receive(:event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking + allow(Gitlab::Tracking).to receive(:database_event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking end it 'tracks the exception' do @@ -35,7 +39,7 @@ RSpec.describe DatabaseEventTracking, :snowplow do it 'does not track the event' do create_test_class_record - expect_no_snowplow_event + expect_no_snowplow_event(tracking_method: :database_event) end end @@ -46,24 +50,45 @@ RSpec.describe DatabaseEventTracking, :snowplow do it 'when created' do create_test_class_record - expect_snowplow_event(category: category, action: "#{event}_create", label: 'application_setting_terms', - property: 'create', namespace: nil, "id" => 1) + expect_snowplow_event( + tracking_method: :database_event, + category: category, + action: "#{event}_create", + label: 'application_setting_terms', + property: 'create', + namespace: nil, + "id" => 1 + ) end it 'when updated' do create_test_class_record test_class.first.update!(id: 3) - expect_snowplow_event(category: category, action: "#{event}_update", label: 'application_setting_terms', - property: 'update', namespace: nil, "id" => 3) + expect_snowplow_event( + tracking_method: :database_event, + category: category, + action: "#{event}_update", + label: 'application_setting_terms', + property: 'update', + namespace: nil, + "id" => 3 + ) end it 'when destroyed' do create_test_class_record test_class.first.destroy! - expect_snowplow_event(category: category, action: "#{event}_destroy", label: 'application_setting_terms', - property: 'destroy', namespace: nil, "id" => 1) + expect_snowplow_event( + tracking_method: :database_event, + category: category, + action: "#{event}_destroy", + label: 'application_setting_terms', + property: 'destroy', + namespace: nil, + "id" => 1 + ) end end end diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb index bd1afe844ac..9b086e9785e 100644 --- a/spec/models/concerns/deployment_platform_spec.rb +++ b/spec/models/concerns/deployment_platform_spec.rb @@ -56,13 +56,23 @@ RSpec.describe DeploymentPlatform do context 'when project does not have a cluster but has group clusters' do let!(:default_cluster) do - create(:cluster, :provided_by_user, - cluster_type: :group_type, groups: [group], environment_scope: '*') + create( + :cluster, + :provided_by_user, + cluster_type: :group_type, + groups: [group], + environment_scope: '*' + ) end let!(:cluster) do - create(:cluster, :provided_by_user, - cluster_type: :group_type, environment_scope: 'review/*', groups: [group]) + create( + :cluster, + :provided_by_user, + cluster_type: :group_type, + environment_scope: 'review/*', + groups: [group] + ) end let(:environment) { 'review/name' } @@ -99,8 +109,13 @@ RSpec.describe DeploymentPlatform do context 'when parent_group has a cluster with default scope' do let!(:parent_group_cluster) do - create(:cluster, :provided_by_user, - cluster_type: :group_type, environment_scope: '*', groups: [parent_group]) + create( + :cluster, + :provided_by_user, + cluster_type: :group_type, + environment_scope: '*', + groups: [parent_group] + ) end it_behaves_like 'matching environment scope' @@ -108,8 +123,13 @@ RSpec.describe DeploymentPlatform do context 'when parent_group has a cluster that is an exact match' do let!(:parent_group_cluster) do - create(:cluster, :provided_by_user, - cluster_type: :group_type, environment_scope: 'review/name', groups: [parent_group]) + create( + :cluster, + :provided_by_user, + cluster_type: :group_type, + environment_scope: 'review/name', + groups: [parent_group] + ) end it_behaves_like 'matching environment scope' @@ -160,8 +180,13 @@ RSpec.describe DeploymentPlatform do let!(:cluster) { create(:cluster, :provided_by_user, environment_scope: 'review/*', projects: [project]) } let!(:group_default_cluster) do - create(:cluster, :provided_by_user, - cluster_type: :group_type, groups: [group], environment_scope: '*') + create( + :cluster, + :provided_by_user, + cluster_type: :group_type, + groups: [group], + environment_scope: '*' + ) end let(:environment) { 'review/name' } diff --git a/spec/models/concerns/expirable_spec.rb b/spec/models/concerns/expirable_spec.rb index 50dfb138ac9..68a25917ce1 100644 --- a/spec/models/concerns/expirable_spec.rb +++ b/spec/models/concerns/expirable_spec.rb @@ -3,40 +3,52 @@ require 'spec_helper' RSpec.describe Expirable do - describe 'ProjectMember' do - let_it_be(:no_expire) { create(:project_member) } - let_it_be(:expire_later) { create(:project_member, expires_at: 8.days.from_now) } - let_it_be(:expired) { create(:project_member, expires_at: 1.day.from_now) } + let_it_be(:no_expire) { create(:project_member) } + let_it_be(:expire_later) { create(:project_member, expires_at: 8.days.from_now) } + let_it_be(:expired) { create(:project_member, expires_at: 1.day.from_now) } - before do - travel_to(3.days.from_now) - end + before do + travel_to(3.days.from_now) + end - describe '.expired' do - it { expect(ProjectMember.expired).to match_array([expired]) } - end + describe '.expired' do + it { expect(ProjectMember.expired).to match_array([expired]) } - describe '.not_expired' do - it { expect(ProjectMember.not_expired).to include(no_expire, expire_later) } - it { expect(ProjectMember.not_expired).not_to include(expired) } - end + it 'scopes the query when multiple models are expirable' do + expired_access_token = create(:personal_access_token, :expired, user: no_expire.user) - describe '#expired?' do - it { expect(no_expire.expired?).to eq(false) } - it { expect(expire_later.expired?).to eq(false) } - it { expect(expired.expired?).to eq(true) } + expect(PersonalAccessToken.expired.joins(user: :members)).to match_array([expired_access_token]) + expect(PersonalAccessToken.joins(user: :members).merge(ProjectMember.expired)).to eq([]) end - describe '#expires?' do - it { expect(no_expire.expires?).to eq(false) } - it { expect(expire_later.expires?).to eq(true) } - it { expect(expired.expires?).to eq(true) } - end + it 'works with a timestamp expired_at field', time_travel_to: '2022-03-14T11:30:00Z' do + expired_deploy_token = create(:deploy_token, expires_at: 5.minutes.ago.iso8601) - describe '#expires_soon?' do - it { expect(no_expire.expires_soon?).to eq(false) } - it { expect(expire_later.expires_soon?).to eq(true) } - it { expect(expired.expires_soon?).to eq(true) } + # Here verify that `expires_at` in the SQL uses `Time.current` instead of `Date.current` + expect(DeployToken.expired).to match_array([expired_deploy_token]) end end + + describe '.not_expired' do + it { expect(ProjectMember.not_expired).to include(no_expire, expire_later) } + it { expect(ProjectMember.not_expired).not_to include(expired) } + end + + describe '#expired?' do + it { expect(no_expire.expired?).to eq(false) } + it { expect(expire_later.expired?).to eq(false) } + it { expect(expired.expired?).to eq(true) } + end + + describe '#expires?' do + it { expect(no_expire.expires?).to eq(false) } + it { expect(expire_later.expires?).to eq(true) } + it { expect(expired.expires?).to eq(true) } + end + + describe '#expires_soon?' do + it { expect(no_expire.expires_soon?).to eq(false) } + it { expect(expire_later.expires_soon?).to eq(true) } + it { expect(expired.expires_soon?).to eq(true) } + end end diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb index 03d2c267098..e7f041296b7 100644 --- a/spec/models/concerns/has_user_type_spec.rb +++ b/spec/models/concerns/has_user_type_spec.rb @@ -5,8 +5,9 @@ require 'spec_helper' RSpec.describe User, feature_category: :system_access do specify 'types consistency checks', :aggregate_failures do expect(described_class::USER_TYPES.keys) - .to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot visual_review_bot - migration_bot automation_bot security_policy_bot admin_bot suggested_reviewers_bot service_account]) + .to match_array(%w[human human_deprecated ghost alert_bot project_bot support_bot service_user security_bot + visual_review_bot migration_bot automation_bot security_policy_bot admin_bot suggested_reviewers_bot + service_account llm_bot]) expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES) expect(described_class::USER_TYPES).to include(*described_class::NON_INTERNAL_USER_TYPES) expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES) @@ -22,7 +23,13 @@ RSpec.describe User, feature_category: :system_access do describe '.humans' do it 'includes humans only' do - expect(described_class.humans).to match_array([human]) + expect(described_class.humans).to match_array([human, human_deprecated]) + end + end + + describe '.human' do + it 'includes humans only' do + expect(described_class.human).to match_array([human, human_deprecated]) end end @@ -69,6 +76,7 @@ RSpec.describe User, feature_category: :system_access do describe '#human?' do it 'is true for humans only' do expect(human).to be_human + expect(human_deprecated).to be_human expect(alert_bot).not_to be_human expect(User.new).to be_human end diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index 206b3ae61cf..ea6baac455f 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -150,8 +150,10 @@ RSpec.describe Issuable do end it 'gives preference to state_id if present' do - issuable = MergeRequest.new('state' => 'opened', - 'state_id' => described_class::STATE_ID_MAP['merged']) + issuable = MergeRequest.new( + 'state' => 'opened', + 'state_id' => described_class::STATE_ID_MAP['merged'] + ) expect(issuable.state).to eq('merged') expect(issuable.state_id).to eq(described_class::STATE_ID_MAP['merged']) diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb index e53fdafe3b1..7367577914c 100644 --- a/spec/models/concerns/token_authenticatable_spec.rb +++ b/spec/models/concerns/token_authenticatable_spec.rb @@ -130,10 +130,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) } let(:user) { create(:user) } let(:personal_access_token) do - described_class.new(name: 'test-pat-01', - user_id: user.id, - scopes: [:api], - token_digest: token_digest) + described_class.new(name: 'test-pat-01', user_id: user.id, scopes: [:api], token_digest: token_digest) end before do diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb index e028a82ff76..7642ef44210 100644 --- a/spec/models/container_repository_spec.rb +++ b/spec/models/container_repository_spec.rb @@ -1722,4 +1722,19 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont it { is_expected.to contain_exactly(*stale_migrations) } end + + describe '#registry' do + it 'caches the client' do + registry = repository.registry + registry1 = repository.registry + registry2 = nil + + travel_to(Time.current + Gitlab::CurrentSettings.container_registry_token_expire_delay.minutes) do + registry2 = repository.registry + end + + expect(registry1.object_id).to be(registry.object_id) + expect(registry2.object_id).not_to be(registry.object_id) + end + end end diff --git a/spec/models/design_management/git_repository_spec.rb b/spec/models/design_management/git_repository_spec.rb new file mode 100644 index 00000000000..1b07e337cde --- /dev/null +++ b/spec/models/design_management/git_repository_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe DesignManagement::GitRepository, feature_category: :design_management do + let_it_be(:project) { create(:project) } + let(:git_repository) { described_class.new(project) } + + shared_examples 'returns parsed git attributes that enable LFS for all file types' do + it do + expect(subject.patterns).to be_a_kind_of(Hash) + expect(subject.patterns).to have_key('/designs/*') + expect(subject.patterns['/designs/*']).to eql( + { "filter" => "lfs", "diff" => "lfs", "merge" => "lfs", "text" => false } + ) + end + end + + describe "#info_attributes" do + subject { git_repository.info_attributes } + + include_examples 'returns parsed git attributes that enable LFS for all file types' + end + + describe '#attributes_at' do + subject { git_repository.attributes_at } + + include_examples 'returns parsed git attributes that enable LFS for all file types' + end + + describe '#gitattribute' do + it 'returns a gitattribute when path has gitattributes' do + expect(git_repository.gitattribute('/designs/file.txt', 'filter')).to eq('lfs') + end + + it 'returns nil when path has no gitattributes' do + expect(git_repository.gitattribute('/invalid/file.txt', 'filter')).to be_nil + end + end + + describe '#copy_gitattributes' do + it 'always returns regardless of whether given a valid or invalid ref' do + expect(git_repository.copy_gitattributes('master')).to be true + expect(git_repository.copy_gitattributes('invalid')).to be true + end + end + + describe '#attributes' do + it 'confirms that all files are LFS enabled' do + %w[png zip anything].each do |filetype| + path = "/#{DesignManagement.designs_directory}/file.#{filetype}" + attributes = git_repository.attributes(path) + + expect(attributes['filter']).to eq('lfs') + end + end + end +end diff --git a/spec/models/design_management/repository_spec.rb b/spec/models/design_management/repository_spec.rb index 0115e0c139c..67cdba40f82 100644 --- a/spec/models/design_management/repository_spec.rb +++ b/spec/models/design_management/repository_spec.rb @@ -2,57 +2,16 @@ require 'spec_helper' -RSpec.describe DesignManagement::Repository do - let(:project) { create(:project) } - let(:repository) { described_class.new(project) } +RSpec.describe DesignManagement::Repository, feature_category: :design_management do + let_it_be(:project) { create(:project) } + let(:subject) { ::DesignManagement::Repository.new({ project: project }) } - shared_examples 'returns parsed git attributes that enable LFS for all file types' do - it do - expect(subject.patterns).to be_a_kind_of(Hash) - expect(subject.patterns).to have_key('/designs/*') - expect(subject.patterns['/designs/*']).to eql( - { "filter" => "lfs", "diff" => "lfs", "merge" => "lfs", "text" => false } - ) - end + describe 'associations' do + it { is_expected.to belong_to(:project).inverse_of(:design_management_repository) } end - describe "#info_attributes" do - subject { repository.info_attributes } - - include_examples 'returns parsed git attributes that enable LFS for all file types' - end - - describe '#attributes_at' do - subject { repository.attributes_at } - - include_examples 'returns parsed git attributes that enable LFS for all file types' - end - - describe '#gitattribute' do - it 'returns a gitattribute when path has gitattributes' do - expect(repository.gitattribute('/designs/file.txt', 'filter')).to eq('lfs') - end - - it 'returns nil when path has no gitattributes' do - expect(repository.gitattribute('/invalid/file.txt', 'filter')).to be_nil - end - end - - describe '#copy_gitattributes' do - it 'always returns regardless of whether given a valid or invalid ref' do - expect(repository.copy_gitattributes('master')).to be true - expect(repository.copy_gitattributes('invalid')).to be true - end - end - - describe '#attributes' do - it 'confirms that all files are LFS enabled' do - %w(png zip anything).each do |filetype| - path = "/#{DesignManagement.designs_directory}/file.#{filetype}" - attributes = repository.attributes(path) - - expect(attributes['filter']).to eq('lfs') - end - end + describe 'validations' do + it { is_expected.to validate_presence_of(:project) } + it { is_expected.to validate_uniqueness_of(:project) } end end diff --git a/spec/models/group_group_link_spec.rb b/spec/models/group_group_link_spec.rb index eec8fe0ef71..59370cf12d2 100644 --- a/spec/models/group_group_link_spec.rb +++ b/spec/models/group_group_link_spec.rb @@ -5,9 +5,29 @@ require 'spec_helper' RSpec.describe GroupGroupLink do let_it_be(:group) { create(:group) } let_it_be(:shared_group) { create(:group) } - let_it_be(:group_group_link) do - create(:group_group_link, shared_group: shared_group, - shared_with_group: group) + + describe 'validation' do + let_it_be(:group_group_link) do + create(:group_group_link, shared_group: shared_group, + shared_with_group: group) + end + + it { is_expected.to validate_presence_of(:shared_group) } + + it do + is_expected.to( + validate_uniqueness_of(:shared_group_id) + .scoped_to(:shared_with_group_id) + .with_message('The group has already been shared with this group')) + end + + it { is_expected.to validate_presence_of(:shared_with_group) } + it { is_expected.to validate_presence_of(:group_access) } + + it do + is_expected.to( + validate_inclusion_of(:group_access).in_array(Gitlab::Access.values)) + end end describe 'relations' do @@ -16,42 +36,51 @@ RSpec.describe GroupGroupLink do end describe 'scopes' do - describe '.non_guests' do - let!(:group_group_link_reporter) { create :group_group_link, :reporter } - let!(:group_group_link_maintainer) { create :group_group_link, :maintainer } - let!(:group_group_link_owner) { create :group_group_link, :owner } - let!(:group_group_link_guest) { create :group_group_link, :guest } - - it 'returns all records which are greater than Guests access' do - expect(described_class.non_guests).to match_array([ - group_group_link_reporter, group_group_link, - group_group_link_maintainer, group_group_link_owner - ]) - end - end - - describe '.with_owner_or_maintainer_access' do + context 'for scopes fetching records based on access levels' do + let_it_be(:group_group_link_guest) { create :group_group_link, :guest } + let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter } + let_it_be(:group_group_link_developer) { create :group_group_link, :developer } let_it_be(:group_group_link_maintainer) { create :group_group_link, :maintainer } let_it_be(:group_group_link_owner) { create :group_group_link, :owner } - let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter } - let_it_be(:group_group_link_guest) { create :group_group_link, :guest } - it 'returns all records which have OWNER or MAINTAINER access' do - expect(described_class.with_owner_or_maintainer_access).to match_array([ - group_group_link_maintainer, - group_group_link_owner - ]) + describe '.non_guests' do + it 'returns all records which are greater than Guests access' do + expect(described_class.non_guests).to match_array([ + group_group_link_reporter, group_group_link_developer, + group_group_link_maintainer, group_group_link_owner + ]) + end end - end - describe '.with_owner_access' do - let_it_be(:group_group_link_maintainer) { create :group_group_link, :maintainer } - let_it_be(:group_group_link_owner) { create :group_group_link, :owner } - let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter } - let_it_be(:group_group_link_guest) { create :group_group_link, :guest } + describe '.with_owner_or_maintainer_access' do + it 'returns all records which have OWNER or MAINTAINER access' do + expect(described_class.with_owner_or_maintainer_access).to match_array([ + group_group_link_maintainer, + group_group_link_owner + ]) + end + end - it 'returns all records which have OWNER access' do - expect(described_class.with_owner_access).to match_array([group_group_link_owner]) + describe '.with_owner_access' do + it 'returns all records which have OWNER access' do + expect(described_class.with_owner_access).to match_array([group_group_link_owner]) + end + end + + describe '.with_developer_access' do + it 'returns all records which have DEVELOPER access' do + expect(described_class.with_developer_access).to match_array([group_group_link_developer]) + end + end + + describe '.with_developer_maintainer_owner_access' do + it 'returns all records which have DEVELOPER, MAINTAINER or OWNER access' do + expect(described_class.with_developer_maintainer_owner_access).to match_array([ + group_group_link_developer, + group_group_link_owner, + group_group_link_maintainer + ]) + end end end @@ -93,6 +122,15 @@ RSpec.describe GroupGroupLink do let_it_be(:sub_shared_group) { create(:group, parent: shared_group) } let_it_be(:other_group) { create(:group) } + let_it_be(:group_group_link_1) do + create( + :group_group_link, + shared_group: shared_group, + shared_with_group: group, + group_access: Gitlab::Access::DEVELOPER + ) + end + let_it_be(:group_group_link_2) do create( :group_group_link, @@ -125,7 +163,7 @@ RSpec.describe GroupGroupLink do expect(described_class.all.count).to eq(4) expect(distinct_group_group_links.count).to eq(2) - expect(distinct_group_group_links).to include(group_group_link) + expect(distinct_group_group_links).to include(group_group_link_1) expect(distinct_group_group_links).not_to include(group_group_link_2) expect(distinct_group_group_links).not_to include(group_group_link_3) expect(distinct_group_group_links).to include(group_group_link_4) @@ -133,27 +171,9 @@ RSpec.describe GroupGroupLink do end end - describe 'validation' do - it { is_expected.to validate_presence_of(:shared_group) } - - it do - is_expected.to( - validate_uniqueness_of(:shared_group_id) - .scoped_to(:shared_with_group_id) - .with_message('The group has already been shared with this group')) - end - - it { is_expected.to validate_presence_of(:shared_with_group) } - it { is_expected.to validate_presence_of(:group_access) } - - it do - is_expected.to( - validate_inclusion_of(:group_access).in_array(Gitlab::Access.values)) - end - end - describe '#human_access' do it 'delegates to Gitlab::Access' do + group_group_link = create(:group_group_link, :reporter) expect(Gitlab::Access).to receive(:human_access).with(group_group_link.group_access) group_group_link.human_access @@ -161,6 +181,8 @@ RSpec.describe GroupGroupLink do end describe 'search by group name' do + let_it_be(:group_group_link) { create(:group_group_link, :reporter, shared_with_group: group) } + it { expect(described_class.search(group.name)).to eq([group_group_link]) } it { expect(described_class.search('not-a-group-name')).to be_empty } end diff --git a/spec/models/group_label_spec.rb b/spec/models/group_label_spec.rb index ec9244d5eb5..6cd03a189e5 100644 --- a/spec/models/group_label_spec.rb +++ b/spec/models/group_label_spec.rb @@ -56,4 +56,39 @@ RSpec.describe GroupLabel do end end end + + describe '#preloaded_parent_container' do + let_it_be(:label) { create(:group_label) } + + before do + label.reload # ensure associations are not loaded + end + + context 'when group is loaded' do + it 'does not invoke a DB query' do + label.group + + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(0) + expect(label.preloaded_parent_container).to eq(label.group) + end + end + + context 'when parent_container is loaded' do + it 'does not invoke a DB query' do + label.parent_container + + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(0) + expect(label.preloaded_parent_container).to eq(label.parent_container) + end + end + + context 'when none of them are loaded' do + it 'invokes a DB query' do + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(1) + end + end + end end diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb index 3134f2ba248..0bf4540f535 100644 --- a/spec/models/group_spec.rb +++ b/spec/models/group_spec.rb @@ -969,6 +969,23 @@ RSpec.describe Group, feature_category: :subgroups do end end + describe '.with_project_creation_levels' do + let_it_be(:group_1) { create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) } + let_it_be(:group_2) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) } + let_it_be(:group_3) { create(:group, project_creation_level: Gitlab::Access::MAINTAINER_PROJECT_ACCESS) } + let_it_be(:group_4) { create(:group, project_creation_level: nil) } + + it 'returns groups with the specified project creation levels' do + result = described_class.with_project_creation_levels([ + Gitlab::Access::NO_ONE_PROJECT_ACCESS, + Gitlab::Access::MAINTAINER_PROJECT_ACCESS + ]) + + expect(result).to include(group_1, group_3) + expect(result).not_to include(group_2, group_4) + end + end + describe '.project_creation_allowed' do let_it_be(:group_1) { create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) } let_it_be(:group_2) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) } @@ -981,6 +998,22 @@ RSpec.describe Group, feature_category: :subgroups do expect(result).to include(group_2, group_3, group_4) expect(result).not_to include(group_1) end + + context 'when the application_setting is set to `NO_ONE_PROJECT_ACCESS`' do + before do + stub_application_setting(default_project_creation: Gitlab::Access::NO_ONE_PROJECT_ACCESS) + end + + it 'only includes groups where project creation is allowed' do + result = described_class.project_creation_allowed + + expect(result).to include(group_2, group_3) + + # group_4 won't be included because it has `project_creation_level: nil`, + # and that means it behaves like the value of the application_setting will inherited. + expect(result).not_to include(group_1, group_4) + end + end end describe 'by_ids_or_paths' do @@ -3598,6 +3631,13 @@ RSpec.describe Group, feature_category: :subgroups do end end + describe '#content_editor_on_issues_feature_flag_enabled?' do + it_behaves_like 'checks self and root ancestor feature flag' do + let(:feature_flag) { :content_editor_on_issues } + let(:feature_flag_method) { :content_editor_on_issues_feature_flag_enabled? } + end + end + describe '#work_items_feature_flag_enabled?' do it_behaves_like 'checks self and root ancestor feature flag' do let(:feature_flag) { :work_items } @@ -3710,7 +3750,7 @@ RSpec.describe Group, feature_category: :subgroups do end end - describe '#usage_quotas_enabled?', feature_category: :subscription_cost_management, unless: Gitlab.ee? do + describe '#usage_quotas_enabled?', feature_category: :consumables_cost_management, unless: Gitlab.ee? do using RSpec::Parameterized::TableSyntax where(:feature_enabled, :root_group, :result) do diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb index 48dfaff74d8..254b8c2520b 100644 --- a/spec/models/hooks/web_hook_spec.rb +++ b/spec/models/hooks/web_hook_spec.rb @@ -267,7 +267,7 @@ RSpec.describe WebHook, feature_category: :integrations do end context 'without url variables' do - subject(:hook) { build_stubbed(:project_hook, project: project, url: 'http://example.com') } + subject(:hook) { build_stubbed(:project_hook, project: project, url: 'http://example.com', url_variables: nil) } it 'does not reset url variables' do hook.url = 'http://example.com/{one}/{two}' diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb index 0bdcb6dde31..4c22ed2e10f 100644 --- a/spec/models/import_failure_spec.rb +++ b/spec/models/import_failure_spec.rb @@ -56,7 +56,13 @@ RSpec.describe ImportFailure do it { is_expected.not_to allow_value({ ids: [123] }).for(:external_identifiers) } it 'allows up to 3 fields' do - is_expected.not_to allow_value({ note_id: 234, noteable_id: 345, noteable_type: 'MergeRequest', extra_attribute: 'abc' }).for(:external_identifiers) + is_expected.not_to allow_value({ + note_id: 234, + noteable_id: 345, + noteable_type: 'MergeRequest', + object_type: 'pull_request', + extra_attribute: 'abc' + }).for(:external_identifiers) end end end diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb index a247881899f..46c30074ae7 100644 --- a/spec/models/integration_spec.rb +++ b/spec/models/integration_spec.rb @@ -1092,7 +1092,7 @@ RSpec.describe Integration, feature_category: :integrations do field :foo_dt, storage: :data_fields field :bar, type: 'password' - field :password + field :password, is_secret: true field :webhook diff --git a/spec/models/integrations/every_integration_spec.rb b/spec/models/integrations/every_integration_spec.rb index 8666ef512fc..c39a3486eb4 100644 --- a/spec/models/integrations/every_integration_spec.rb +++ b/spec/models/integrations/every_integration_spec.rb @@ -11,9 +11,9 @@ RSpec.describe 'Every integration' do let(:integration) { integration_class.new } context 'secret fields', :aggregate_failures do - it "uses type: 'password' for all secret fields, except when bypassed" do + it "uses type: 'password' for all secret fields" do integration.fields.each do |field| - next unless Integrations::Field::SECRET_NAME.match?(field[:name]) && field[:is_secret] + next unless field[:is_secret] expect(field[:type]).to eq('password'), "Field '#{field[:name]}' should use type 'password'" diff --git a/spec/models/integrations/ewm_spec.rb b/spec/models/integrations/ewm_spec.rb index dc48a2c982f..4f4ff038b19 100644 --- a/spec/models/integrations/ewm_spec.rb +++ b/spec/models/integrations/ewm_spec.rb @@ -31,27 +31,27 @@ RSpec.describe Integrations::Ewm do describe "ReferencePatternValidation" do it "extracts bug" do - expect(described_class.reference_pattern.match("This is bug 123")[:issue]).to eq("bug 123") + expect(subject.reference_pattern.match("This is bug 123")[:issue]).to eq("bug 123") end it "extracts task" do - expect(described_class.reference_pattern.match("This is task 123.")[:issue]).to eq("task 123") + expect(subject.reference_pattern.match("This is task 123.")[:issue]).to eq("task 123") end it "extracts work item" do - expect(described_class.reference_pattern.match("This is work item 123 now")[:issue]).to eq("work item 123") + expect(subject.reference_pattern.match("This is work item 123 now")[:issue]).to eq("work item 123") end it "extracts workitem" do - expect(described_class.reference_pattern.match("workitem 123 at the beginning")[:issue]).to eq("workitem 123") + expect(subject.reference_pattern.match("workitem 123 at the beginning")[:issue]).to eq("workitem 123") end it "extracts defect" do - expect(described_class.reference_pattern.match("This is defect 123 defect")[:issue]).to eq("defect 123") + expect(subject.reference_pattern.match("This is defect 123 defect")[:issue]).to eq("defect 123") end it "extracts rtcwi" do - expect(described_class.reference_pattern.match("This is rtcwi 123")[:issue]).to eq("rtcwi 123") + expect(subject.reference_pattern.match("This is rtcwi 123")[:issue]).to eq("rtcwi 123") end end end diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb index c30f9ef0d7b..ca71dd0e6d3 100644 --- a/spec/models/integrations/field_spec.rb +++ b/spec/models/integrations/field_spec.rb @@ -15,8 +15,8 @@ RSpec.describe ::Integrations::Field do end describe '#initialize' do - it 'sets type password for secret names' do - attrs[:name] = 'token' + it 'sets type password for secret fields' do + attrs[:is_secret] = true attrs[:type] = 'text' expect(field[:type]).to eq('password') @@ -84,7 +84,7 @@ RSpec.describe ::Integrations::Field do when :type eq 'text' when :is_secret - eq true + eq false else be_nil end @@ -175,16 +175,6 @@ RSpec.describe ::Integrations::Field do it { is_expected.to be_secret } end - %w[token api_token api_key secret_key secret_sauce password passphrase].each do |name| - context "when named #{name}" do - before do - attrs[:name] = name - end - - it { is_expected.to be_secret } - end - end - context "when named url" do before do attrs[:name] = :url diff --git a/spec/models/integrations/google_play_spec.rb b/spec/models/integrations/google_play_spec.rb index ab1aaad24e7..8349ac71bc9 100644 --- a/spec/models/integrations/google_play_spec.rb +++ b/spec/models/integrations/google_play_spec.rb @@ -11,8 +11,19 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do it { is_expected.to validate_presence_of :service_account_key_file_name } it { is_expected.to validate_presence_of :service_account_key } + it { is_expected.to validate_presence_of :package_name } it { is_expected.to allow_value(File.read('spec/fixtures/service_account.json')).for(:service_account_key) } it { is_expected.not_to allow_value(File.read('spec/fixtures/group.json')).for(:service_account_key) } + it { is_expected.to allow_value('com.example.myapp').for(:package_name) } + it { is_expected.to allow_value('com.example.myorg.myapp').for(:package_name) } + it { is_expected.to allow_value('com_us.example.my_org.my_app').for(:package_name) } + it { is_expected.to allow_value('a.a.a').for(:package_name) } + it { is_expected.to allow_value('com.example').for(:package_name) } + it { is_expected.not_to allow_value('com').for(:package_name) } + it { is_expected.not_to allow_value('com.example.my app').for(:package_name) } + it { is_expected.not_to allow_value('1com.example.myapp').for(:package_name) } + it { is_expected.not_to allow_value('com.1example.myapp').for(:package_name) } + it { is_expected.not_to allow_value('com.example._myapp').for(:package_name) } end end @@ -21,20 +32,23 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do describe '#fields' do it 'returns custom fields' do - expect(google_play_integration.fields.pluck(:name)).to match_array(%w[service_account_key + expect(google_play_integration.fields.pluck(:name)).to match_array(%w[package_name service_account_key service_account_key_file_name]) end end describe '#test' do it 'returns true for a successful request' do - allow(Google::Auth::ServiceAccountCredentials).to receive_message_chain(:make_creds, :fetch_access_token!) + allow_next_instance_of(Google::Apis::AndroidpublisherV3::AndroidPublisherService) do |instance| + allow(instance).to receive(:list_reviews) + end expect(google_play_integration.test[:success]).to be true end it 'returns false for an invalid request' do - allow(Google::Auth::ServiceAccountCredentials).to receive_message_chain(:make_creds, - :fetch_access_token!).and_raise(Signet::AuthorizationError.new('error')) + allow_next_instance_of(Google::Apis::AndroidpublisherV3::AndroidPublisherService) do |instance| + allow(instance).to receive(:list_reviews).and_raise(Google::Apis::ClientError.new('error')) + end expect(google_play_integration.test[:success]).to be false end end @@ -56,6 +70,12 @@ RSpec.describe Integrations::GooglePlay, feature_category: :mobile_devops do it 'returns vars when the integration is activated' do ci_vars = [ + { + key: 'SUPPLY_PACKAGE_NAME', + value: google_play_integration.package_name, + masked: false, + public: false + }, { key: 'SUPPLY_JSON_KEY_DATA', value: google_play_integration.service_account_key, diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb index b4580028112..c4da876a0dd 100644 --- a/spec/models/integrations/harbor_spec.rb +++ b/spec/models/integrations/harbor_spec.rb @@ -48,7 +48,7 @@ RSpec.describe Integrations::Harbor do before do allow_next_instance_of(Gitlab::Harbor::Client) do |client| - allow(client).to receive(:ping).and_return(test_response) + allow(client).to receive(:check_project_availability).and_return(test_response) end end diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb index fad8768cba0..ccea8748d13 100644 --- a/spec/models/integrations/jira_spec.rb +++ b/spec/models/integrations/jira_spec.rb @@ -11,6 +11,8 @@ RSpec.describe Integrations::Jira do let(:url) { 'http://jira.example.com' } let(:api_url) { 'http://api-jira.example.com' } let(:username) { 'jira-username' } + let(:jira_issue_prefix) { '' } + let(:jira_issue_regex) { '' } let(:password) { 'jira-password' } let(:project_key) { nil } let(:transition_id) { 'test27' } @@ -48,6 +50,8 @@ RSpec.describe Integrations::Jira do it { is_expected.to validate_presence_of(:url) } it { is_expected.to validate_presence_of(:username) } it { is_expected.to validate_presence_of(:password) } + it { is_expected.to validate_length_of(:jira_issue_regex).is_at_most(255) } + it { is_expected.to validate_length_of(:jira_issue_prefix).is_at_most(255) } it_behaves_like 'issue tracker integration URL attribute', :url it_behaves_like 'issue tracker integration URL attribute', :api_url @@ -62,6 +66,8 @@ RSpec.describe Integrations::Jira do it { is_expected.not_to validate_presence_of(:url) } it { is_expected.not_to validate_presence_of(:username) } it { is_expected.not_to validate_presence_of(:password) } + it { is_expected.not_to validate_length_of(:jira_issue_regex).is_at_most(255) } + it { is_expected.not_to validate_length_of(:jira_issue_prefix).is_at_most(255) } end describe 'jira_issue_transition_id' do @@ -167,7 +173,7 @@ RSpec.describe Integrations::Jira do subject(:fields) { integration.fields } it 'returns custom fields' do - expect(fields.pluck(:name)).to eq(%w[url api_url username password jira_issue_transition_id]) + expect(fields.pluck(:name)).to eq(%w[url api_url username password jira_issue_regex jira_issue_prefix jira_issue_transition_id]) end end @@ -202,7 +208,7 @@ RSpec.describe Integrations::Jira do end end - describe '.reference_pattern' do + describe '#reference_pattern' do using RSpec::Parameterized::TableSyntax where(:key, :result) do @@ -216,11 +222,77 @@ RSpec.describe Integrations::Jira do '3EXT_EXT-1234' | '' 'CVE-2022-123' | '' 'CVE-123' | 'CVE-123' + 'abc-JIRA-1234' | 'JIRA-1234' end with_them do specify do - expect(described_class.reference_pattern.match(key).to_s).to eq(result) + expect(jira_integration.reference_pattern.match(key).to_s).to eq(result) + end + end + + context 'with match prefix' do + before do + jira_integration.jira_issue_prefix = 'jira#' + end + + where(:key, :result, :issue_key) do + 'jira##123' | '' | '' + 'jira#1#23#12' | '' | '' + 'jira#JIRA-1234A' | 'jira#JIRA-1234' | 'JIRA-1234' + 'jira#JIRA-1234-some_tag' | 'jira#JIRA-1234' | 'JIRA-1234' + 'JIRA-1234A' | '' | '' + 'JIRA-1234-some_tag' | '' | '' + 'myjira#JIRA-1234-some_tag' | '' | '' + 'MYjira#JIRA-1234-some_tag' | '' | '' + 'my-jira#JIRA-1234-some_tag' | 'jira#JIRA-1234' | 'JIRA-1234' + end + + with_them do + specify do + expect(jira_integration.reference_pattern.match(key).to_s).to eq(result) + + expect(jira_integration.reference_pattern.match(key)[:issue]).to eq(issue_key) unless result.empty? + end + end + end + + context 'with trailing space in jira_issue_prefix' do + before do + jira_integration.jira_issue_prefix = 'Jira# ' + end + + it 'leaves the trailing space' do + expect(jira_integration.jira_issue_prefix).to eq('Jira# ') + end + + it 'pulls the issue ID without a prefix' do + expect(jira_integration.reference_pattern.match('Jira# FOO-123')[:issue]).to eq('FOO-123') + end + end + + context 'with custom issue pattern' do + before do + jira_integration.jira_issue_regex = '[A-Z][0-9]-[0-9]+' + end + + where(:key, :result) do + 'J1-123' | 'J1-123' + 'AAbJ J1-123' | 'J1-123' + '#A1-123' | 'A1-123' + 'J1-1234-some_tag' | 'J1-1234' + 'J1-1234A' | 'J1-1234' + 'J1-1234-some_tag' | 'J1-1234' + 'JI1-123' | '' + 'J1I-123' | '' + 'JI-123' | '' + '#123' | '' + end + + with_them do + specify do + expect(jira_integration.reference_pattern.match(key).to_s).to eq(result) + end end end end @@ -252,6 +324,8 @@ RSpec.describe Integrations::Jira do url: url, api_url: api_url, username: username, password: password, + jira_issue_regex: jira_issue_regex, + jira_issue_prefix: jira_issue_prefix, jira_issue_transition_id: transition_id } end @@ -267,6 +341,8 @@ RSpec.describe Integrations::Jira do expect(integration.jira_tracker_data.api_url).to eq(api_url) expect(integration.jira_tracker_data.username).to eq(username) expect(integration.jira_tracker_data.password).to eq(password) + expect(integration.jira_tracker_data.jira_issue_regex).to eq(jira_issue_regex) + expect(integration.jira_tracker_data.jira_issue_prefix).to eq(jira_issue_prefix) expect(integration.jira_tracker_data.jira_issue_transition_id).to eq(transition_id) expect(integration.jira_tracker_data.deployment_cloud?).to be_truthy end diff --git a/spec/models/integrations/redmine_spec.rb b/spec/models/integrations/redmine_spec.rb index 59997d2b6f6..8785fc8a1ed 100644 --- a/spec/models/integrations/redmine_spec.rb +++ b/spec/models/integrations/redmine_spec.rb @@ -38,11 +38,11 @@ RSpec.describe Integrations::Redmine do end end - describe '.reference_pattern' do + describe '#reference_pattern' do it_behaves_like 'allows project key on reference pattern' it 'does allow # on the reference' do - expect(described_class.reference_pattern.match('#123')[:issue]).to eq('123') + expect(subject.reference_pattern.match('#123')[:issue]).to eq('123') end end end diff --git a/spec/models/integrations/youtrack_spec.rb b/spec/models/integrations/youtrack_spec.rb index 618ebcbb76a..69dda244413 100644 --- a/spec/models/integrations/youtrack_spec.rb +++ b/spec/models/integrations/youtrack_spec.rb @@ -26,15 +26,15 @@ RSpec.describe Integrations::Youtrack do end end - describe '.reference_pattern' do + describe '#reference_pattern' do it_behaves_like 'allows project key on reference pattern' it 'does allow project prefix on the reference' do - expect(described_class.reference_pattern.match('YT-123')[:issue]).to eq('YT-123') + expect(subject.reference_pattern.match('YT-123')[:issue]).to eq('YT-123') end it 'allows lowercase project key on the reference' do - expect(described_class.reference_pattern.match('yt-123')[:issue]).to eq('yt-123') + expect(subject.reference_pattern.match('yt-123')[:issue]).to eq('yt-123') end end diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb index 8072a60326c..38f50f7627e 100644 --- a/spec/models/issue_spec.rb +++ b/spec/models/issue_spec.rb @@ -12,7 +12,6 @@ RSpec.describe Issue, feature_category: :team_planning do describe "Associations" do it { is_expected.to belong_to(:milestone) } - it { is_expected.to belong_to(:iteration) } it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:work_item_type).class_name('WorkItems::Type') } it { is_expected.to belong_to(:moved_to).class_name('Issue') } @@ -38,6 +37,7 @@ RSpec.describe Issue, feature_category: :team_planning do it { is_expected.to have_many(:issue_customer_relations_contacts) } it { is_expected.to have_many(:customer_relations_contacts) } it { is_expected.to have_many(:incident_management_timeline_events) } + it { is_expected.to have_many(:assignment_events).class_name('ResourceEvents::IssueAssignmentEvent').inverse_of(:issue) } describe 'versions.most_recent' do it 'returns the most recent version' do @@ -161,7 +161,7 @@ RSpec.describe Issue, feature_category: :team_planning do it 'is possible to change type only between selected types' do issue = create(:issue, old_type, project: reusable_project) - issue.work_item_type_id = WorkItems::Type.default_by_type(new_type).id + issue.assign_attributes(work_item_type: WorkItems::Type.default_by_type(new_type), issue_type: new_type) expect(issue.valid?).to eq(is_valid) end @@ -255,7 +255,7 @@ RSpec.describe Issue, feature_category: :team_planning do describe '#ensure_work_item_type' do let_it_be(:issue_type) { create(:work_item_type, :issue, :default) } - let_it_be(:task_type) { create(:work_item_type, :issue, :default) } + let_it_be(:incident_type) { create(:work_item_type, :incident, :default) } let_it_be(:project) { create(:project) } context 'when a type was already set' do @@ -272,9 +272,9 @@ RSpec.describe Issue, feature_category: :team_planning do expect(issue.work_item_type_id).to eq(issue_type.id) expect(WorkItems::Type).not_to receive(:default_by_type) - issue.update!(work_item_type: task_type, issue_type: 'task') + issue.update!(work_item_type: incident_type, issue_type: :incident) - expect(issue.work_item_type_id).to eq(task_type.id) + expect(issue.work_item_type_id).to eq(incident_type.id) end it 'ensures a work item type if updated to nil' do @@ -301,13 +301,23 @@ RSpec.describe Issue, feature_category: :team_planning do expect(issue.work_item_type_id).to be_nil expect(WorkItems::Type).not_to receive(:default_by_type) - issue.update!(work_item_type: task_type, issue_type: 'task') + issue.update!(work_item_type: incident_type, issue_type: :incident) - expect(issue.work_item_type_id).to eq(task_type.id) + expect(issue.work_item_type_id).to eq(incident_type.id) end end end + describe '#check_issue_type_in_sync' do + it 'raises an error if issue_type is out of sync' do + issue = build(:issue, issue_type: :issue, work_item_type: WorkItems::Type.default_by_type(:task)) + + expect do + issue.save! + end.to raise_error(Issue::IssueTypeOutOfSyncError) + end + end + describe '#record_create_action' do it 'records the creation action after saving' do expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action) @@ -1741,7 +1751,8 @@ RSpec.describe Issue, feature_category: :team_planning do end context 'when project in user namespace' do - let(:project) { build_stubbed(:project_empty_repo) } + let(:project_namespace) { build_stubbed(:project_namespace) } + let(:project) { build_stubbed(:project_empty_repo, project_namespace: project_namespace) } let(:project_id) { project.id } let(:namespace_id) { nil } @@ -1750,7 +1761,8 @@ RSpec.describe Issue, feature_category: :team_planning do context 'when project in a group namespace' do let(:group) { create(:group) } - let(:project) { build_stubbed(:project_empty_repo, group: group) } + let(:project_namespace) { build_stubbed(:project_namespace) } + let(:project) { build_stubbed(:project_empty_repo, group: group, project_namespace: project_namespace) } let(:project_id) { nil } let(:namespace_id) { group.id } @@ -1772,6 +1784,36 @@ RSpec.describe Issue, feature_category: :team_planning do it 'raises error when feature is invalid' do expect { issue.issue_type_supports?(:unkown_feature) }.to raise_error(ArgumentError) end + + context 'when issue_type_uses_work_item_types_table feature flag is disabled' do + before do + stub_feature_flags(issue_type_uses_work_item_types_table: false) + end + + it 'uses the issue_type column' do + expect(issue).to receive(:issue_type).and_call_original + expect(issue).not_to receive(:work_item_type).and_call_original + + issue.issue_type_supports?(:assignee) + end + end + + context 'when issue_type_uses_work_item_types_table feature flag is enabled' do + it 'uses the work_item_types table' do + expect(issue).not_to receive(:issue_type).and_call_original + expect(issue).to receive(:work_item_type).and_call_original + + issue.issue_type_supports?(:assignee) + end + + context 'when the issue is not persisted' do + it 'uses the default work item type' do + non_persisted_issue = build(:issue) + + expect(non_persisted_issue.issue_type_supports?(:assignee)).to be_truthy + end + end + end end describe '#supports_time_tracking?' do @@ -1785,7 +1827,7 @@ RSpec.describe Issue, feature_category: :team_planning do with_them do before do - issue.update!(issue_type: issue_type) + issue.update!(issue_type: issue_type, work_item_type: WorkItems::Type.default_by_type(issue_type)) end it do @@ -1805,7 +1847,7 @@ RSpec.describe Issue, feature_category: :team_planning do with_them do before do - issue.update!(issue_type: issue_type) + issue.update!(issue_type: issue_type, work_item_type: WorkItems::Type.default_by_type(issue_type)) end it do @@ -1919,4 +1961,10 @@ RSpec.describe Issue, feature_category: :team_planning do end end end + + describe '#work_item_type_with_default' do + subject { Issue.new.work_item_type_with_default } + + it { is_expected.to eq(WorkItems::Type.default_by_type(::Issue::DEFAULT_ISSUE_TYPE)) } + end end diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb index 30607116c61..eea96e5e4ae 100644 --- a/spec/models/member_spec.rb +++ b/spec/models/member_spec.rb @@ -898,6 +898,14 @@ RSpec.describe Member, feature_category: :subgroups do end end + describe '.pluck_user_ids' do + let(:member) { create(:group_member) } + + it 'plucks the user ids' do + expect(described_class.where(id: member).pluck_user_ids).to match([member.user_id]) + end + end + describe '#send_invitation_reminder' do subject { member.send_invitation_reminder(0) } diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb index f2bc9b42b77..20dae056646 100644 --- a/spec/models/members/project_member_spec.rb +++ b/spec/models/members/project_member_spec.rb @@ -136,24 +136,6 @@ RSpec.describe ProjectMember do end end - describe '.add_members_to_projects' do - it 'adds the given users to the given projects' do - projects = create_list(:project, 2) - users = create_list(:user, 2) - - described_class.add_members_to_projects( - [projects.first.id, projects.second.id], - [users.first.id, users.second], - described_class::MAINTAINER) - - expect(projects.first.users).to include(users.first) - expect(projects.first.users).to include(users.second) - - expect(projects.second.users).to include(users.first) - expect(projects.second.users).to include(users.second) - end - end - describe '.truncate_teams' do before do @project_1 = create(:project) diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index b82b16fdec3..f3aa174a964 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -30,7 +30,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev it { is_expected.to have_many(:merge_request_diffs) } it { is_expected.to have_many(:user_mentions).class_name("MergeRequestUserMention") } it { is_expected.to belong_to(:milestone) } - it { is_expected.to belong_to(:iteration) } it { is_expected.to have_many(:resource_milestone_events) } it { is_expected.to have_many(:resource_state_events) } it { is_expected.to have_many(:draft_notes) } @@ -38,6 +37,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev it { is_expected.to have_many(:reviewed_by_users).through(:reviews).source(:author) } it { is_expected.to have_one(:cleanup_schedule).inverse_of(:merge_request) } it { is_expected.to have_many(:created_environments).class_name('Environment').inverse_of(:merge_request) } + it { is_expected.to have_many(:assignment_events).class_name('ResourceEvents::MergeRequestAssignmentEvent').inverse_of(:merge_request) } context 'for forks' do let!(:project) { create(:project) } @@ -4487,7 +4487,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev let(:expected_merge_status) { 'checking' } include_examples 'for a valid state transition' - it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' + it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' end context 'when the status is checking' do @@ -4507,7 +4507,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev let(:expected_merge_status) { 'cannot_be_merged_rechecking' } include_examples 'for a valid state transition' - it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' + it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' end context 'when the status is cannot_be_merged' do @@ -4725,9 +4725,9 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev end [:closed, :merged].each do |state| - let(:state) { state } - context state do + let(:state) { state } + it 'does not notify' do expect(notification_service).not_to receive(:merge_request_unmergeable) expect(todo_service).not_to receive(:merge_request_became_unmergeable) diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb index 374e49aea01..063b57788ce 100644 --- a/spec/models/ml/candidate_spec.rb +++ b/spec/models/ml/candidate_spec.rb @@ -3,48 +3,73 @@ require 'spec_helper' RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops do - let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params, name: 'candidate0') } + let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params, :with_artifact, name: 'candidate0') } let_it_be(:candidate2) do create(:ml_candidates, experiment: candidate.experiment, user: create(:user), name: 'candidate2') end - let_it_be(:candidate_artifact) do - FactoryBot.create(:generic_package, - name: candidate.package_name, - version: candidate.package_version, - project: candidate.project) - end - - let(:project) { candidate.experiment.project } + let(:project) { candidate.project } describe 'associations' do it { is_expected.to belong_to(:experiment) } + it { is_expected.to belong_to(:project) } it { is_expected.to belong_to(:user) } + it { is_expected.to belong_to(:package) } it { is_expected.to have_many(:params) } it { is_expected.to have_many(:metrics) } it { is_expected.to have_many(:metadata) } end + describe 'modules' do + it_behaves_like 'AtomicInternalId' do + let(:internal_id_attribute) { :internal_id } + let(:instance) { build(:ml_candidates, experiment: candidate.experiment) } + let(:scope) { :project } + let(:scope_attrs) { { project: instance.project } } + let(:usage) { :ml_candidates } + end + end + describe 'default values' do - it { expect(described_class.new.iid).to be_present } + it { expect(described_class.new.eid).to be_present } end - describe '.artifact_root' do - subject { candidate.artifact_root } + describe '.destroy' do + let_it_be(:candidate_to_destroy) do + create(:ml_candidates, :with_metrics_and_params, :with_metadata, :with_artifact) + end - it { is_expected.to eq("/ml_candidate_#{candidate.id}/-/") } + it 'destroys metrics, params and metadata, but not the artifact', :aggregate_failures do + expect { candidate_to_destroy.destroy! } + .to change { Ml::CandidateMetadata.count }.by(-2) + .and change { Ml::CandidateParam.count }.by(-2) + .and change { Ml::CandidateMetric.count }.by(-2) + .and not_change { Packages::Package.count } + end end - describe '.package_name' do - subject { candidate.package_name } + describe '.artifact_root' do + subject { candidate.artifact_root } - it { is_expected.to eq("ml_candidate_#{candidate.id}") } + it { is_expected.to eq("/#{candidate.package_name}/#{candidate.iid}/") } end describe '.package_version' do subject { candidate.package_version } - it { is_expected.to eq('-') } + it { is_expected.to eq(candidate.iid) } + end + + describe '.eid' do + let_it_be(:eid) { SecureRandom.uuid } + + let_it_be(:candidate3) do + build(:ml_candidates, :with_metrics_and_params, name: 'candidate0', eid: eid) + end + + subject { candidate3.eid } + + it { is_expected.to eq(eid) } end describe '.artifact' do @@ -52,10 +77,6 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d subject { tested_candidate.artifact } - before do - candidate_artifact - end - context 'when has logged artifacts' do it 'returns the package' do expect(subject.name).to eq(tested_candidate.package_name) @@ -69,21 +90,26 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d end end - describe '.artifact_lazy' do - context 'when candidates have same the same iid' do - before do - BatchLoader::Executor.clear_current - end + describe '#by_project_id_and_eid' do + let(:project_id) { candidate.experiment.project_id } + let(:eid) { candidate.eid } - it 'loads the correct artifacts', :aggregate_failures do - candidate.artifact_lazy - candidate2.artifact_lazy + subject { described_class.with_project_id_and_eid(project_id, eid) } - expect(Packages::Package).to receive(:joins).once.and_call_original # Only one database call + context 'when eid exists', 'and belongs to project' do + it { is_expected.to eq(candidate) } + end - expect(candidate.artifact.name).to eq(candidate.package_name) - expect(candidate2.artifact).to be_nil - end + context 'when eid exists', 'and does not belong to project' do + let(:project_id) { non_existing_record_id } + + it { is_expected.to be_nil } + end + + context 'when eid does not exist' do + let(:eid) { 'a' } + + it { is_expected.to be_nil } end end @@ -93,18 +119,18 @@ RSpec.describe Ml::Candidate, factory_default: :keep, feature_category: :mlops d subject { described_class.with_project_id_and_iid(project_id, iid) } - context 'when iid exists', 'and belongs to project' do + context 'when internal_id exists', 'and belongs to project' do it { is_expected.to eq(candidate) } end - context 'when iid exists', 'and does not belong to project' do + context 'when internal_id exists', 'and does not belong to project' do let(:project_id) { non_existing_record_id } it { is_expected.to be_nil } end - context 'when iid does not exist' do - let(:iid) { 'a' } + context 'when internal_id does not exist' do + let(:iid) { non_existing_record_id } it { is_expected.to be_nil } end diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb index c75331a2ab5..9738a88b5b8 100644 --- a/spec/models/ml/experiment_spec.rb +++ b/spec/models/ml/experiment_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ml::Experiment do +RSpec.describe Ml::Experiment, feature_category: :mlops do let_it_be(:exp) { create(:ml_experiments) } let_it_be(:exp2) { create(:ml_experiments, project: exp.project) } @@ -16,6 +16,12 @@ RSpec.describe Ml::Experiment do it { is_expected.to have_many(:metadata) } end + describe '.package_name' do + describe '.package_name' do + it { expect(exp.package_name).to eq("ml_experiment_#{exp.iid}") } + end + end + describe '#by_project_id_and_iid' do subject { described_class.by_project_id_and_iid(exp.project_id, iid) } @@ -74,4 +80,22 @@ RSpec.describe Ml::Experiment do expect(subject[exp3.id]).to eq(3) end end + + describe '#package_for_experiment?' do + using RSpec::Parameterized::TableSyntax + + subject { described_class.package_for_experiment?(package_name) } + + where(:package_name, :id) do + 'ml_experiment_1234' | true + 'ml_experiment_1234abc' | false + 'ml_experiment_abc' | false + 'ml_experiment_' | false + 'blah' | false + end + + with_them do + it { is_expected.to be(id) } + end + end end diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb index b7cc59b5af3..e3d389a2a6e 100644 --- a/spec/models/namespace_setting_spec.rb +++ b/spec/models/namespace_setting_spec.rb @@ -15,6 +15,8 @@ RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_values([:all, :ssh, :http]).with_suffix } describe "validations" do + it { is_expected.to validate_inclusion_of(:code_suggestions).in_array([true, false]) } + describe "#default_branch_name_content" do let_it_be(:group) { create(:group) } diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 89df24d75c9..1cb5ebeae98 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -1587,30 +1587,6 @@ RSpec.describe Namespace, feature_category: :subgroups do end end - describe '#use_traversal_ids_for_root_ancestor?' do - let_it_be(:namespace, reload: true) { create(:namespace) } - - subject { namespace.use_traversal_ids_for_root_ancestor? } - - context 'when use_traversal_ids_for_root_ancestor feature flag is true' do - before do - stub_feature_flags(use_traversal_ids_for_root_ancestor: true) - end - - it { is_expected.to eq true } - - it_behaves_like 'disabled feature flag when traversal_ids is blank' - end - - context 'when use_traversal_ids_for_root_ancestor feature flag is false' do - before do - stub_feature_flags(use_traversal_ids_for_root_ancestor: false) - end - - it { is_expected.to eq false } - end - end - describe '#use_traversal_ids_for_ancestors?' do let_it_be(:namespace, reload: true) { create(:namespace) } diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index c1de8125c0d..bcfcfa05ddf 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Note do +RSpec.describe Note, feature_category: :team_planning do include RepoHelpers describe 'associations' do @@ -799,20 +799,22 @@ RSpec.describe Note do describe '#system_note_with_references?' do it 'falsey for user-generated notes' do - note = create(:note, system: false) + note = build_stubbed(:note, system: false) expect(note.system_note_with_references?).to be_falsy end context 'when the note might contain cross references' do SystemNoteMetadata.new.cross_reference_types.each do |type| - let(:note) { create(:note, :system) } - let!(:metadata) { create(:system_note_metadata, note: note, action: type) } + context "with #{type}" do + let(:note) { build_stubbed(:note, :system) } + let!(:metadata) { build_stubbed(:system_note_metadata, note: note, action: type) } - it 'delegates to the cross-reference regex' do - expect(note).to receive(:matches_cross_reference_regex?).and_return(false) + it 'delegates to the cross-reference regex' do + expect(note).to receive(:matches_cross_reference_regex?).and_return(false) - note.system_note_with_references? + note.system_note_with_references? + end end end end @@ -1666,6 +1668,32 @@ RSpec.describe Note do end end end + + describe '.without_hidden' do + subject { described_class.without_hidden } + + context 'when a note with a banned author exists' do + let_it_be(:banned_user) { create(:banned_user).user } + let_it_be(:banned_note) { create(:note, author: banned_user) } + + context 'when the :hidden_notes feature is disabled' do + before do + stub_feature_flags(hidden_notes: false) + end + + it { is_expected.to include(banned_note, note1) } + end + + context 'when the :hidden_notes feature is enabled' do + before do + stub_feature_flags(hidden_notes: true) + end + + it { is_expected.not_to include(banned_note) } + it { is_expected.to include(note1) } + end + end + end end describe 'banzai_render_context' do diff --git a/spec/models/onboarding/completion_spec.rb b/spec/models/onboarding/completion_spec.rb index 0639762b76c..dd7648f7799 100644 --- a/spec/models/onboarding/completion_spec.rb +++ b/spec/models/onboarding/completion_spec.rb @@ -37,26 +37,6 @@ RSpec.describe Onboarding::Completion, feature_category: :onboarding do it { is_expected.to eq(100) } end - - context 'with security_actions_continuous_onboarding experiment' do - let(:completed_actions) { Hash[tracked_action_columns.first, Time.current] } - - context 'when control' do - before do - stub_experiments(security_actions_continuous_onboarding: :control) - end - - it { is_expected.to eq(10) } - end - - context 'when candidate' do - before do - stub_experiments(security_actions_continuous_onboarding: :candidate) - end - - it { is_expected.to eq(8) } - end - end end describe '#completed?' do @@ -84,18 +64,18 @@ RSpec.describe Onboarding::Completion, feature_category: :onboarding do end end - context 'when security_scan_enabled' do - let(:column) { :security_scan_enabled_at } - let(:completed_actions) { { security_scan_enabled_at: security_scan_enabled_at } } + context 'when secure_dast_run' do + let(:column) { :secure_dast_run_at } + let(:completed_actions) { { secure_dast_run_at: secure_dast_run_at } } context 'when is completed' do - let(:security_scan_enabled_at) { Time.current } + let(:secure_dast_run_at) { Time.current } it { is_expected.to eq(true) } end context 'when is not completed' do - let(:security_scan_enabled_at) { nil } + let(:secure_dast_run_at) { nil } it { is_expected.to eq(false) } end diff --git a/spec/models/onboarding/progress_spec.rb b/spec/models/onboarding/progress_spec.rb index 9d91af2487a..7d169464462 100644 --- a/spec/models/onboarding/progress_spec.rb +++ b/spec/models/onboarding/progress_spec.rb @@ -187,7 +187,7 @@ RSpec.describe Onboarding::Progress do end context 'for multiple actions' do - let(:action1) { :security_scan_enabled } + let(:action1) { :secure_dast_run } let(:action2) { :secure_dependency_scanning_run } let(:actions) { [action1, action2] } @@ -206,11 +206,11 @@ RSpec.describe Onboarding::Progress do it 'does not override timestamp', :aggregate_failures do described_class.register(namespace, [action1]) - expect(described_class.find_by_namespace_id(namespace.id).security_scan_enabled_at).not_to be_nil + expect(described_class.find_by_namespace_id(namespace.id).secure_dast_run_at).not_to be_nil expect(described_class.find_by_namespace_id(namespace.id).secure_dependency_scanning_run_at).to be_nil expect { described_class.register(namespace, [action1, action2]) }.not_to change { - described_class.find_by_namespace_id(namespace.id).security_scan_enabled_at + described_class.find_by_namespace_id(namespace.id).secure_dast_run_at } expect(described_class.find_by_namespace_id(namespace.id).secure_dependency_scanning_run_at).not_to be_nil end diff --git a/spec/models/packages/debian/file_metadatum_spec.rb b/spec/models/packages/debian/file_metadatum_spec.rb index 8cbd83c3e2d..e86c0a71c9a 100644 --- a/spec/models/packages/debian/file_metadatum_spec.rb +++ b/spec/models/packages/debian/file_metadatum_spec.rb @@ -2,15 +2,15 @@ require 'spec_helper' -RSpec.describe Packages::Debian::FileMetadatum, type: :model do - RSpec.shared_context 'Debian file metadatum' do |factory, trait| - let_it_be_with_reload(:debian_package_file) { create(factory, trait) } +RSpec.describe Packages::Debian::FileMetadatum, type: :model, feature_category: :package_registry do + RSpec.shared_context 'with Debian file metadatum' do |package_file_trait| + let_it_be_with_reload(:debian_package_file) { create(:debian_package_file, package_file_trait) } let(:debian_file_metadatum) { debian_package_file.debian_file_metadatum } subject { debian_file_metadatum } end - RSpec.shared_examples 'Test Debian file metadatum' do |has_component, has_architecture, has_fields, has_outdated| + RSpec.shared_examples 'Test Debian file metadatum' do |has_component, has_architecture, has_fields| describe 'relationships' do it { is_expected.to belong_to(:package_file) } end @@ -51,8 +51,8 @@ RSpec.describe Packages::Debian::FileMetadatum, type: :model do describe '#fields' do if has_fields it { is_expected.to validate_presence_of(:fields) } - it { is_expected.to allow_value({ 'a': 'b' }).for(:fields) } - it { is_expected.not_to allow_value({ 'a': { 'b': 'c' } }).for(:fields) } + it { is_expected.to allow_value({ a: 'b' }).for(:fields) } + it { is_expected.not_to allow_value({ a: { b: 'c' } }).for(:fields) } else it { is_expected.to validate_absence_of(:fields) } end @@ -69,24 +69,35 @@ RSpec.describe Packages::Debian::FileMetadatum, type: :model do end end end + + describe 'scopes' do + describe '.with_file_type' do + subject { described_class.with_file_type(package_file_trait) } + + it 'returns the matching file metadatum' do + expect(subject).to match_array([debian_file_metadatum]) + end + end + end end using RSpec::Parameterized::TableSyntax - where(:factory, :trait, :has_component, :has_architecture, :has_fields) do - :debian_package_file | :unknown | false | false | false - :debian_package_file | :source | true | false | false - :debian_package_file | :dsc | true | false | true - :debian_package_file | :deb | true | true | true - :debian_package_file | :udeb | true | true | true - :debian_package_file | :ddeb | true | true | true - :debian_package_file | :buildinfo | true | false | true - :debian_package_file | :changes | false | false | true + where(:package_file_trait, :has_component, :has_architecture, :has_fields) do + :unknown | false | false | false + :source | true | false | false + :dsc | true | false | true + :deb | true | true | true + :udeb | true | true | true + :ddeb | true | true | true + :buildinfo | true | false | true + :changes | false | false | true end with_them do - include_context 'Debian file metadatum', params[:factory], params[:trait] do - it_behaves_like 'Test Debian file metadatum', params[:has_component], params[:has_architecture], params[:has_fields], params[:has_outdated] + include_context 'with Debian file metadatum', params[:package_file_trait] do + it_behaves_like 'Test Debian file metadatum', + params[:has_component], params[:has_architecture], params[:has_fields] end end end diff --git a/spec/models/packages/event_spec.rb b/spec/models/packages/event_spec.rb new file mode 100644 index 00000000000..58c1c1e6e92 --- /dev/null +++ b/spec/models/packages/event_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::Event, feature_category: :package_registry do + let(:event_type) { :push_package } + let(:event_scope) { :npm } + let(:originator_type) { :deploy_token } + + shared_examples 'handle forbidden event type' do |result: []| + let(:event_type) { :search } + + it { is_expected.to eq(result) } + end + + describe '.event_allowed?' do + subject { described_class.event_allowed?(event_type) } + + it { is_expected.to eq(true) } + + it_behaves_like 'handle forbidden event type', result: false + end + + describe '.unique_counters_for' do + subject { described_class.unique_counters_for(event_scope, event_type, originator_type) } + + it { is_expected.to contain_exactly('i_package_npm_deploy_token') } + + it_behaves_like 'handle forbidden event type' + + context 'when an originator type is quest' do + let(:originator_type) { :guest } + + it { is_expected.to eq([]) } + end + end + + describe '.counters_for' do + subject { described_class.counters_for(event_scope, event_type, originator_type) } + + it do + is_expected.to contain_exactly( + 'i_package_push_package', + 'i_package_push_package_by_deploy_token', + 'i_package_npm_push_package' + ) + end + + it_behaves_like 'handle forbidden event type' + end +end diff --git a/spec/models/packages/npm/metadata_cache_spec.rb b/spec/models/packages/npm/metadata_cache_spec.rb new file mode 100644 index 00000000000..fdee0bedc5b --- /dev/null +++ b/spec/models/packages/npm/metadata_cache_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::Npm::MetadataCache, type: :model, feature_category: :package_registry do + let_it_be(:npm_metadata_cache) { create(:npm_metadata_cache) } + + describe 'relationships' do + it { is_expected.to belong_to(:project).inverse_of(:npm_metadata_caches) } + end + + describe 'validations' do + it { is_expected.to validate_presence_of(:file) } + it { is_expected.to validate_presence_of(:project) } + it { is_expected.to validate_presence_of(:size) } + + describe '#package_name' do + it { is_expected.to validate_presence_of(:package_name) } + it { is_expected.to validate_uniqueness_of(:package_name).scoped_to(:project_id) } + it { is_expected.to allow_value('my.app-11.07.2018').for(:package_name) } + it { is_expected.to allow_value('@group-1/package').for(:package_name) } + it { is_expected.to allow_value('@any-scope/package').for(:package_name) } + it { is_expected.to allow_value('unscoped-package').for(:package_name) } + it { is_expected.not_to allow_value('my(dom$$$ain)com.my-app').for(:package_name) } + it { is_expected.not_to allow_value('@inv@lid-scope/package').for(:package_name) } + it { is_expected.not_to allow_value('@scope/../../package').for(:package_name) } + it { is_expected.not_to allow_value('@scope%2e%2e%fpackage').for(:package_name) } + it { is_expected.not_to allow_value('@scope/sub/package').for(:package_name) } + end + end +end diff --git a/spec/models/packages/npm/metadatum_spec.rb b/spec/models/packages/npm/metadatum_spec.rb index ff8cce5310e..92daddded7e 100644 --- a/spec/models/packages/npm/metadatum_spec.rb +++ b/spec/models/packages/npm/metadatum_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Packages::Npm::Metadatum, type: :model do +RSpec.describe Packages::Npm::Metadatum, type: :model, feature_category: :package_registry do describe 'relationships' do it { is_expected.to belong_to(:package).inverse_of(:npm_metadatum) } end @@ -47,4 +47,16 @@ RSpec.describe Packages::Npm::Metadatum, type: :model do end end end + + describe 'scopes' do + describe '.package_id_in' do + let_it_be(:package) { create(:npm_package) } + let_it_be(:metadatum_1) { create(:npm_metadatum, package: package) } + let_it_be(:metadatum_2) { create(:npm_metadatum) } + + it 'returns metadatums with the given package ids' do + expect(described_class.package_id_in([package.id])).to contain_exactly(metadatum_1) + end + end + end end diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb index d80f8247261..c9db1efc64a 100644 --- a/spec/models/packages/package_file_spec.rb +++ b/spec/models/packages/package_file_spec.rb @@ -178,6 +178,17 @@ RSpec.describe Packages::PackageFile, type: :model do describe '#with_debian_architecture_name' do it { expect(described_class.with_debian_architecture_name('mipsel')).to contain_exactly(debian_mipsel) } end + + describe '#with_debian_unknown_since' do + let_it_be(:incoming) { create(:debian_incoming, project: project) } + + before do + incoming.package_files.first.debian_file_metadatum.update! updated_at: 1.day.ago + incoming.package_files.second.update! updated_at: 1.day.ago, status: :error + end + + it { expect(described_class.with_debian_unknown_since(1.hour.ago)).to contain_exactly(incoming.package_files.first) } + end end describe '.for_helm_with_channel' do diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb index 992cc5c4354..fcf60f0559a 100644 --- a/spec/models/packages/package_spec.rb +++ b/spec/models/packages/package_spec.rb @@ -682,24 +682,20 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis end end - describe "#unique_debian_package_name" do + describe "uniqueness for package type debian" do let!(:package) { create(:debian_package) } - it "will allow a Debian package with same project, name and version, but different distribution" do - new_package = build(:debian_package, project: package.project, name: package.name, version: package.version) - expect(new_package).to be_valid - end - it "will not allow a Debian package with same project, name, version and distribution" do new_package = build(:debian_package, project: package.project, name: package.name, version: package.version) new_package.debian_publication.distribution = package.debian_publication.distribution expect(new_package).not_to be_valid - expect(new_package.errors.to_a).to include('Debian package already exists in Distribution') + expect(new_package.errors.to_a).to include('Name has already been taken') end - it "will allow a Debian package with same project, name, version, but no distribution" do + it "will not allow a Debian package with same project, name, version, but no distribution" do new_package = build(:debian_package, project: package.project, name: package.name, version: package.version, published_in: nil) - expect(new_package).to be_valid + expect(new_package).not_to be_valid + expect(new_package.errors.to_a).to include('Name has already been taken') end context 'with pending_destruction package' do @@ -713,7 +709,7 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis end end - Packages::Package.package_types.keys.without('conan', 'debian').each do |pt| + Packages::Package.package_types.keys.without('conan').each do |pt| context "project id, name, version and package type uniqueness for package type #{pt}" do let(:package) { create("#{pt}_package") } @@ -722,6 +718,15 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis expect(new_package).not_to be_valid expect(new_package.errors.to_a).to include("Name has already been taken") end + + context 'with pending_destruction package' do + let!(:package) { create("#{pt}_package", :pending_destruction) } + + it "will allow a #{pt} package with same project, name, version and package_type" do + new_package = build("#{pt}_package", project: package.project, name: package.name, version: package.version) + expect(new_package).to be_valid + end + end end end end @@ -1402,4 +1407,36 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis .to change(package, :last_downloaded_at).from(nil).to(instance_of(ActiveSupport::TimeWithZone)) end end + + describe "#publish_creation_event" do + let_it_be(:project) { create(:project) } + + let(:version) { '-' } + let(:package_type) { :generic } + + subject { described_class.create!(project: project, name: 'incoming', version: version, package_type: package_type) } + + context 'when package is generic' do + it 'publishes an event' do + expect { subject } + .to publish_event(::Packages::PackageCreatedEvent) + .with({ + project_id: project.id, + id: kind_of(Numeric), + name: "incoming", + version: "-", + package_type: 'generic' + }) + end + end + + context 'when package is not generic' do + let(:package_type) { :debian } + let(:version) { 1 } + + it 'does not create event' do + expect { subject }.not_to publish_event(::Packages::PackageCreatedEvent) + end + end + end end diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb index 38ff1bb090e..88fd1bd9e56 100644 --- a/spec/models/pages/lookup_path_spec.rb +++ b/spec/models/pages/lookup_path_spec.rb @@ -138,14 +138,14 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do end end - describe '#unique_domain' do + describe '#unique_host' do let(:project) { build(:project) } context 'when unique domain is disabled' do it 'returns nil' do project.project_setting.pages_unique_domain_enabled = false - expect(lookup_path.unique_domain).to be_nil + expect(lookup_path.unique_host).to be_nil end end @@ -154,7 +154,30 @@ RSpec.describe Pages::LookupPath, feature_category: :pages do project.project_setting.pages_unique_domain_enabled = true project.project_setting.pages_unique_domain = 'unique-domain' - expect(lookup_path.unique_domain).to eq('unique-domain') + expect(lookup_path.unique_host).to eq('unique-domain.example.com') + end + end + end + + describe '#root_directory' do + subject(:lookup_path) { described_class.new(project) } + + context 'when there is no deployment' do + it 'returns nil' do + expect(lookup_path.root_directory).to be_nil + end + end + + context 'when there is a deployment' do + let(:deployment) { create(:pages_deployment, project: project, root_directory: 'foo') } + + before do + project.mark_pages_as_deployed + project.pages_metadatum.update!(pages_deployment: deployment) + end + + it 'returns the deployment\'s root_directory' do + expect(lookup_path.root_directory).to eq('foo') end end end diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb index 268c5006a88..767db511d85 100644 --- a/spec/models/pages_deployment_spec.rb +++ b/spec/models/pages_deployment_spec.rb @@ -59,6 +59,66 @@ RSpec.describe PagesDeployment, feature_category: :pages do end end + context 'when uploading the file' do + before do + stub_pages_object_storage(::Pages::DeploymentUploader) + end + + describe '#store_after_commit?' do + context 'when feature flag pages_deploy_upload_file_outside_transaction is disabled' do + it 'returns false' do + Feature.disable(:pages_deploy_upload_file_outside_transaction) + + deployment = create(:pages_deployment, project: project) + expect(deployment.store_after_commit?).to eq(false) + end + end + + context 'when feature flag pages_deploy_upload_file_outside_transaction is enabled' do + it 'returns true' do + deployment = create(:pages_deployment, project: project) + expect(deployment.store_after_commit?).to eq(true) + end + end + end + + context 'when feature flag pages_deploy_upload_file_outside_transaction is disabled' do + before do + Feature.disable(:pages_deploy_upload_file_outside_transaction) + end + + it 'stores the file within the transaction' do + expect_next_instance_of(PagesDeployment) do |deployment| + expect(deployment).not_to receive(:store_file_now!) + end + + create(:pages_deployment, project: project) + end + end + + context 'when feature flag pages_deploy_upload_file_outside_transaction is enabled' do + before do + Feature.enable(:pages_deploy_upload_file_outside_transaction) + end + + it 'stores the file outsize of the transaction' do + expect_next_instance_of(PagesDeployment) do |deployment| + expect(deployment).to receive(:store_file_now!) + end + + create(:pages_deployment, project: project) + end + + it 'does nothing when the file did not change' do + deployment = create(:pages_deployment, project: project) + + expect(deployment).not_to receive(:store_file_now!) + + deployment.touch + end + end + end + describe '#migrated?' do it 'returns false for normal deployment' do deployment = create(:pages_deployment) diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb index 3705cab7ef5..eb17a66a103 100644 --- a/spec/models/plan_limits_spec.rb +++ b/spec/models/plan_limits_spec.rb @@ -221,6 +221,7 @@ RSpec.describe PlanLimits do security_policy_scan_execution_schedules enforcement_limit notification_limit + project_access_token_limit ] + disabled_max_artifact_size_columns end diff --git a/spec/models/preloaders/labels_preloader_spec.rb b/spec/models/preloaders/labels_preloader_spec.rb index 07f148a0a6c..3d2a5edc8f0 100644 --- a/spec/models/preloaders/labels_preloader_spec.rb +++ b/spec/models/preloaders/labels_preloader_spec.rb @@ -18,14 +18,24 @@ RSpec.describe Preloaders::LabelsPreloader do context 'project labels' do let_it_be(:projects) { create_list(:project, 3, :public, :repository) } - let_it_be(:labels) { projects.each { |p| create(:label, project: p) } } + let_it_be(:labels) { projects.map { |p| create(:label, project: p) } } it_behaves_like 'an efficient database query' + + it 'preloads the max access level', :request_store do + labels_with_preloaded_data + + query_count = ActiveRecord::QueryRecorder.new do + projects.first.team.max_member_access_for_user_ids([user.id]) + end.count + + expect(query_count).to eq(0) + end end context 'group labels' do let_it_be(:groups) { create_list(:group, 3) } - let_it_be(:labels) { groups.each { |g| create(:group_label, group: g) } } + let_it_be(:labels) { groups.map { |g| create(:group_label, group: g) } } it_behaves_like 'an efficient database query' end diff --git a/spec/models/preloaders/runner_machine_policy_preloader_spec.rb b/spec/models/preloaders/runner_machine_policy_preloader_spec.rb deleted file mode 100644 index 26fc101d8dc..00000000000 --- a/spec/models/preloaders/runner_machine_policy_preloader_spec.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Preloaders::RunnerMachinePolicyPreloader, feature_category: :runner_fleet do - let_it_be(:user) { create(:user) } - let_it_be(:runner1) { create(:ci_runner) } - let_it_be(:runner2) { create(:ci_runner) } - let_it_be(:runner_machine1) { create(:ci_runner_machine, runner: runner1) } - let_it_be(:runner_machine2) { create(:ci_runner_machine, runner: runner2) } - - let(:base_runner_machines) do - Project.where(id: [runner_machine1, runner_machine2]) - end - - it 'avoids N+1 queries when authorizing a list of runner machines', :request_store do - preload_runner_machines_for_policy(user) - control = ActiveRecord::QueryRecorder.new { authorize_all_runner_machines(user) } - - new_runner1 = create(:ci_runner) - new_runner2 = create(:ci_runner) - new_runner_machine1 = create(:ci_runner_machine, runner: new_runner1) - new_runner_machine2 = create(:ci_runner_machine, runner: new_runner2) - - pristine_runner_machines = Project.where(id: base_runner_machines + [new_runner_machine1, new_runner_machine2]) - - preload_runner_machines_for_policy(user, pristine_runner_machines) - expect { authorize_all_runner_machines(user, pristine_runner_machines) }.not_to exceed_query_limit(control) - end - - def authorize_all_runner_machines(current_user, runner_machine_list = base_runner_machines) - runner_machine_list.each { |runner_machine| current_user.can?(:read_runner_machine, runner_machine) } - end - - def preload_runner_machines_for_policy(current_user, runner_machine_list = base_runner_machines) - described_class.new(runner_machine_list, current_user).execute - end -end diff --git a/spec/models/preloaders/runner_manager_policy_preloader_spec.rb b/spec/models/preloaders/runner_manager_policy_preloader_spec.rb new file mode 100644 index 00000000000..1977e2c5787 --- /dev/null +++ b/spec/models/preloaders/runner_manager_policy_preloader_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Preloaders::RunnerManagerPolicyPreloader, feature_category: :runner_fleet do + let_it_be(:user) { create(:user) } + let_it_be(:runner1) { create(:ci_runner) } + let_it_be(:runner2) { create(:ci_runner) } + let_it_be(:runner_manager1) { create(:ci_runner_machine, runner: runner1) } + let_it_be(:runner_manager2) { create(:ci_runner_machine, runner: runner2) } + + let(:base_runner_managers) do + Project.where(id: [runner_manager1, runner_manager2]) + end + + it 'avoids N+1 queries when authorizing a list of runner managers', :request_store do + preload_runner_managers_for_policy(user) + control = ActiveRecord::QueryRecorder.new { authorize_all_runner_managers(user) } + + new_runner1 = create(:ci_runner) + new_runner2 = create(:ci_runner) + new_runner_manager1 = create(:ci_runner_machine, runner: new_runner1) + new_runner_manager2 = create(:ci_runner_machine, runner: new_runner2) + + pristine_runner_managers = Project.where(id: base_runner_managers + [new_runner_manager1, new_runner_manager2]) + + preload_runner_managers_for_policy(user, pristine_runner_managers) + expect { authorize_all_runner_managers(user, pristine_runner_managers) }.not_to exceed_query_limit(control) + end + + def authorize_all_runner_managers(current_user, runner_manager_list = base_runner_managers) + runner_manager_list.each { |runner_manager| current_user.can?(:read_runner_manager, runner_manager) } + end + + def preload_runner_managers_for_policy(current_user, runner_manager_list = base_runner_managers) + described_class.new(runner_manager_list, current_user).execute + end +end diff --git a/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb b/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb new file mode 100644 index 00000000000..f5bc0c8c2f8 --- /dev/null +++ b/spec/models/preloaders/users_max_access_level_by_project_preloader_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Preloaders::UsersMaxAccessLevelByProjectPreloader, feature_category: :projects do + let_it_be(:user_1) { create(:user) } + let_it_be(:user_2) { create(:user) } + let_it_be(:user_with_no_access) { create(:user) } # ensures we correctly cache NO_ACCESS + + let_it_be(:project_1) { create(:project) } + let_it_be(:project_2) { create(:project) } + let_it_be(:project_3) { create(:project) } + + before do + project_1.add_developer(user_1) + project_1.add_developer(user_2) + + project_2.add_developer(user_1) + project_2.add_developer(user_2) + + project_3.add_developer(user_1) + project_3.add_developer(user_2) + end + + describe '#execute', :request_store do + let(:project_users) do + { + project_1 => [user_1, user_with_no_access], + project_2 => user_2 + } + end + + it 'avoids N+1 queries' do + control_input = project_users + control = ActiveRecord::QueryRecorder.new do + described_class.new(project_users: control_input).execute + end + + sample_input = control_input.merge(project_3 => user_2) + sample = ActiveRecord::QueryRecorder.new do + described_class.new(project_users: sample_input).execute + end + + expect(sample).not_to exceed_query_limit(control) + end + + it 'preloads the max access level used by project policies' do + described_class.new(project_users: project_users).execute + + policy_queries = ActiveRecord::QueryRecorder.new do + project_users.each do |project, users| + Array.wrap(users).each do |user| + user.can?(:read_project, project) + end + end + end + + expect(policy_queries).not_to exceed_query_limit(0) + end + end +end diff --git a/spec/models/preloaders/users_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/users_max_access_level_in_projects_preloader_spec.rb deleted file mode 100644 index 7ecb6bb9861..00000000000 --- a/spec/models/preloaders/users_max_access_level_in_projects_preloader_spec.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' -RSpec.describe Preloaders::UsersMaxAccessLevelInProjectsPreloader do - let_it_be(:user1) { create(:user) } - let_it_be(:user2) { create(:user) } - - let_it_be(:project_1) { create(:project) } - let_it_be(:project_2) { create(:project) } - let_it_be(:project_3) { create(:project) } - - let(:projects) { [project_1, project_2, project_3] } - let(:users) { [user1, user2] } - - before do - project_1.add_developer(user1) - project_1.add_developer(user2) - - project_2.add_developer(user1) - project_2.add_developer(user2) - - project_3.add_developer(user1) - project_3.add_developer(user2) - end - - context 'preload maximum access level to avoid querying project_authorizations', :request_store do - it 'avoids N+1 queries', :request_store do - Preloaders::UsersMaxAccessLevelInProjectsPreloader.new(projects: projects, users: users).execute - - expect(count_queries).to eq(0) - end - - it 'runs N queries without preloading' do - query_count_without_preload = count_queries - - Preloaders::UsersMaxAccessLevelInProjectsPreloader.new(projects: projects, users: users).execute - count_queries_with_preload = count_queries - - expect(count_queries_with_preload).to be < query_count_without_preload - end - end - - def count_queries - ActiveRecord::QueryRecorder.new do - projects.each do |project| - user1.can?(:read_project, project) - user2.can?(:read_project, project) - end - end.count - end -end diff --git a/spec/models/project_label_spec.rb b/spec/models/project_label_spec.rb index f451c2905e6..ba9ea759c6a 100644 --- a/spec/models/project_label_spec.rb +++ b/spec/models/project_label_spec.rb @@ -119,4 +119,39 @@ RSpec.describe ProjectLabel do end end end + + describe '#preloaded_parent_container' do + let_it_be(:label) { create(:label) } + + before do + label.reload # ensure associations are not loaded + end + + context 'when project is loaded' do + it 'does not invoke a DB query' do + label.project + + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(0) + expect(label.preloaded_parent_container).to eq(label.project) + end + end + + context 'when parent_container is loaded' do + it 'does not invoke a DB query' do + label.parent_container + + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(0) + expect(label.preloaded_parent_container).to eq(label.parent_container) + end + end + + context 'when none of them are loaded' do + it 'invokes a DB query' do + count = ActiveRecord::QueryRecorder.new { label.preloaded_parent_container }.count + expect(count).to eq(1) + end + end + end end diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb index 42433a2a84a..f3d6d0ff006 100644 --- a/spec/models/project_setting_spec.rb +++ b/spec/models/project_setting_spec.rb @@ -207,4 +207,34 @@ RSpec.describe ProjectSetting, type: :model do end end end + + describe '#runner_registration_enabled' do + let_it_be(:settings) { create(:project_setting) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, project_setting: settings, group: group) } + + it 'returns true' do + expect(project.runner_registration_enabled).to eq true + end + + context 'when project has runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it 'returns false' do + expect(project.runner_registration_enabled).to eq false + end + end + + context 'when all projects have runner registration disabled' do + before do + stub_application_setting(valid_runner_registrars: ['group']) + end + + it 'returns false' do + expect(project.runner_registration_enabled).to eq false + end + end + end end diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index 15e5db5af60..00a94c80198 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -42,7 +42,9 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do it { is_expected.to have_many(:protected_branches) } it { is_expected.to have_many(:exported_protected_branches) } it { is_expected.to have_one(:wiki_repository).class_name('Projects::WikiRepository').inverse_of(:project) } + it { is_expected.to have_one(:design_management_repository).class_name('DesignManagement::Repository').inverse_of(:project) } it { is_expected.to have_one(:slack_integration) } + it { is_expected.to have_one(:catalog_resource) } it { is_expected.to have_one(:microsoft_teams_integration) } it { is_expected.to have_one(:mattermost_integration) } it { is_expected.to have_one(:hangouts_chat_integration) } @@ -141,6 +143,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') } it { is_expected.to have_many(:rpm_repository_files).class_name('Packages::Rpm::RepositoryFile').inverse_of(:project).dependent(:destroy) } it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) } + it { is_expected.to have_many(:npm_metadata_caches).class_name('Packages::Npm::MetadataCache') } it { is_expected.to have_one(:packages_cleanup_policy).class_name('Packages::Cleanup::Policy').inverse_of(:project) } it { is_expected.to have_many(:pipeline_artifacts).dependent(:restrict_with_error) } it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) } @@ -2283,8 +2286,8 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do subject(:project) { build(:project, :private, namespace: namespace, service_desk_enabled: true) } before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) - allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?).and_return(true) end it 'is enabled' do @@ -2324,7 +2327,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do context 'when service_desk_email is disabled' do before do - allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(false) + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:enabled?).and_return(false) end it_behaves_like 'with incoming email address' @@ -2333,7 +2336,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do context 'when service_desk_email is enabled' do before do config = double(enabled: true, address: 'foo+%{key}@bar.com') - allow(::Gitlab::ServiceDeskEmail).to receive(:config).and_return(config) + allow(::Gitlab::Email::ServiceDeskEmail).to receive(:config).and_return(config) end context 'when project_key is set' do @@ -2871,6 +2874,21 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do end end + describe '#pages_unique_host', feature_category: :pages do + let(:project_settings) { create(:project_setting, pages_unique_domain: 'unique-domain') } + let(:project) { build(:project, project_setting: project_settings) } + let(:domain) { 'example.com' } + + before do + allow(Settings.pages).to receive(:host).and_return(domain) + allow(Gitlab.config.pages).to receive(:url).and_return("http://#{domain}") + end + + it 'returns the pages unique url' do + expect(project.pages_unique_host).to eq('unique-domain.example.com') + end + end + describe '#pages_namespace_url', feature_category: :pages do let(:group) { create(:group, name: group_name) } let(:project) { create(:project, namespace: group, name: project_name) } @@ -5805,8 +5823,19 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do let_it_be(:project) { create(:project) } it 'exposes API v4 URL' do - expect(project.api_variables.first[:key]).to eq 'CI_API_V4_URL' - expect(project.api_variables.first[:value]).to include '/api/v4' + v4_variable = project.api_variables.find { |variable| variable[:key] == "CI_API_V4_URL" } + + expect(v4_variable).not_to be_nil + expect(v4_variable[:key]).to eq 'CI_API_V4_URL' + expect(v4_variable[:value]).to end_with '/api/v4' + end + + it 'exposes API GraphQL URL' do + graphql_variable = project.api_variables.find { |variable| variable[:key] == "CI_API_GRAPHQL_URL" } + + expect(graphql_variable).not_to be_nil + expect(graphql_variable[:key]).to eq 'CI_API_GRAPHQL_URL' + expect(graphql_variable[:value]).to end_with '/api/graphql' end it 'contains a URL variable for every supported API version' do @@ -5821,7 +5850,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do end expect(project.api_variables.map { |variable| variable[:key] }) - .to contain_exactly(*required_variables) + .to include(*required_variables) end end @@ -5919,7 +5948,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do expect(project).to receive(:after_create_default_branch) expect(project).to receive(:refresh_markdown_cache!) expect(InternalId).to receive(:flush_records!).with(project: project) - expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:repository_size]) + expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:repository_size, :wiki_size]) expect(DetectRepositoryLanguagesWorker).to receive(:perform_async).with(project.id) expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:perform_async).with(project.id) expect(project).to receive(:set_full_path) @@ -7431,6 +7460,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do context 'when feature flag `group_protected_branches` enabled' do before do stub_feature_flags(group_protected_branches: true) + stub_feature_flags(allow_protected_branches_for_group: true) end it 'return all protected branches' do @@ -7441,6 +7471,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do context 'when feature flag `group_protected_branches` disabled' do before do stub_feature_flags(group_protected_branches: false) + stub_feature_flags(allow_protected_branches_for_group: false) end it 'return only project-level protected branches' do @@ -8570,6 +8601,16 @@ RSpec.describe Project, factory_default: :keep, feature_category: :projects do end end + describe '#content_editor_on_issues_feature_flag_enabled?' do + let_it_be(:group_project) { create(:project, :in_subgroup) } + + it_behaves_like 'checks parent group feature flag' do + let(:feature_flag_method) { :content_editor_on_issues_feature_flag_enabled? } + let(:feature_flag) { :content_editor_on_issues } + let(:subject_project) { group_project } + end + end + describe '#work_items_mvc_feature_flag_enabled?' do let_it_be(:group_project) { create(:project, :in_subgroup) } diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb index c04fc70deca..92f256ea71a 100644 --- a/spec/models/project_wiki_spec.rb +++ b/spec/models/project_wiki_spec.rb @@ -18,6 +18,33 @@ RSpec.describe ProjectWiki do end end + describe '#create_wiki_repository' do + context 'when a project_wiki_repositories record does not exist' do + let_it_be(:wiki_container) { create(:project) } + + it 'creates a new record' do + expect { subject.create_wiki_repository }.to change { wiki_container.wiki_repository } + .from(nil).to(kind_of(Projects::WikiRepository)) + end + + context 'on a read-only instance' do + before do + allow(Gitlab::Database).to receive(:read_only?).and_return(true) + end + + it 'does not attempt to create a new record' do + expect { subject.create_wiki_repository }.not_to change { wiki_container.wiki_repository } + end + end + end + + context 'when a project_wiki_repositories record exists' do + it 'does not create a new record in the database' do + expect { subject.create_wiki_repository }.not_to change { wiki_container.wiki_repository } + end + end + end + describe '#after_wiki_activity' do it 'updates project activity' do wiki_container.update!( diff --git a/spec/models/projects/data_transfer_spec.rb b/spec/models/projects/data_transfer_spec.rb index ab798185bbb..49be35662c8 100644 --- a/spec/models/projects/data_transfer_spec.rb +++ b/spec/models/projects/data_transfer_spec.rb @@ -19,6 +19,12 @@ RSpec.describe Projects::DataTransfer, feature_category: :source_code_management end describe 'scopes' do + let(:dates) { %w[2023-01-01 2023-02-01 2023-03-01] } + + before do + dates.each { |date| create(:project_data_transfer, project: project, date: date) } + end + describe '.current_month' do subject { described_class.current_month } @@ -31,6 +37,26 @@ RSpec.describe Projects::DataTransfer, feature_category: :source_code_management end end end + + describe '.with_project_between_dates' do + subject do + described_class.with_project_between_dates(project, Date.new(2023, 2, 1), Date.new(2023, 3, 1)) + end + + it 'returns the correct number of results' do + expect(subject.size).to eq(2) + end + end + + describe '.with_namespace_between_dates' do + subject do + described_class.with_namespace_between_dates(project.namespace, Date.new(2023, 2, 1), Date.new(2023, 3, 1)) + end + + it 'returns the correct number of results' do + expect(subject.select(:namespace_id).to_a.size).to eq(2) + end + end end describe '.beginning_of_month' do diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb index c99c92e6c19..0a75250b68c 100644 --- a/spec/models/protected_branch_spec.rb +++ b/spec/models/protected_branch_spec.rb @@ -13,6 +13,30 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do describe 'Validation' do it { is_expected.to validate_presence_of(:name) } + context 'uniqueness' do + let(:protected_branch) { build(:protected_branch) } + + subject { protected_branch } + + it { is_expected.to validate_uniqueness_of(:name).scoped_to([:project_id, :namespace_id]) } + + context 'when the protected_branch was saved previously' do + before do + protected_branch.save! + end + + it { is_expected.not_to validate_uniqueness_of(:name) } + + context 'and name is changed' do + before do + protected_branch.name = "#{protected_branch.name} + something else" + end + + it { is_expected.to validate_uniqueness_of(:name).scoped_to([:project_id, :namespace_id]) } + end + end + end + describe '#validate_either_project_or_top_group' do context 'when protected branch does not have project or group association' do it 'validate failed' do @@ -311,6 +335,7 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do context "when feature flag disabled" do before do stub_feature_flags(group_protected_branches: false) + stub_feature_flags(allow_protected_branches_for_group: false) end let(:subject_branch) { create(:protected_branch, allow_force_push: allow_force_push, name: "foo") } @@ -350,6 +375,7 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do with_them do before do stub_feature_flags(group_protected_branches: true) + stub_feature_flags(allow_protected_branches_for_group: true) unless group_level_value.nil? create(:protected_branch, allow_force_push: group_level_value, name: "foo", project: nil, group: group) @@ -403,6 +429,7 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do context 'when feature flag enabled' do before do stub_feature_flags(group_protected_branches: true) + stub_feature_flags(allow_protected_branches_for_group: true) end it 'call `all_protected_branches`' do @@ -415,6 +442,7 @@ RSpec.describe ProtectedBranch, feature_category: :source_code_management do context 'when feature flag disabled' do before do stub_feature_flags(group_protected_branches: false) + stub_feature_flags(allow_protected_branches_for_group: false) end it 'call `protected_branches`' do diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index f970e818db9..72011693e20 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -597,6 +597,15 @@ RSpec.describe Repository, feature_category: :source_code_management do end describe '#list_commits_by' do + it 'returns commits when no filter is applied' do + commit_ids = repository.list_commits_by(nil, 'master', limit: 2).map(&:id) + + expect(commit_ids).to include( + 'b83d6e391c22777fca1ed3012fce84f633d7fed0', + '498214de67004b1da3d820901307bed2a68a8ef6' + ) + end + it 'returns commits with messages containing a given string' do commit_ids = repository.list_commits_by('test text', 'master').map(&:id) diff --git a/spec/models/resource_events/issue_assignment_event_spec.rb b/spec/models/resource_events/issue_assignment_event_spec.rb new file mode 100644 index 00000000000..bc217da2812 --- /dev/null +++ b/spec/models/resource_events/issue_assignment_event_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ResourceEvents::IssueAssignmentEvent, feature_category: :value_stream_management, type: :model do + subject(:event) { build(:issue_assignment_event) } + + describe 'associations' do + it { is_expected.to belong_to(:user) } + it { is_expected.to belong_to(:issue) } + end + + describe 'validations' do + it { is_expected.to be_valid } + it { is_expected.to validate_presence_of(:issue) } + end +end diff --git a/spec/models/resource_events/merge_request_assignment_event_spec.rb b/spec/models/resource_events/merge_request_assignment_event_spec.rb new file mode 100644 index 00000000000..15f4c088333 --- /dev/null +++ b/spec/models/resource_events/merge_request_assignment_event_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ResourceEvents::MergeRequestAssignmentEvent, feature_category: :value_stream_management, type: :model do + subject(:event) { build(:merge_request_assignment_event) } + + describe 'associations' do + it { is_expected.to belong_to(:user) } + it { is_expected.to belong_to(:merge_request) } + end + + describe 'validations' do + it { is_expected.to be_valid } + it { is_expected.to validate_presence_of(:merge_request) } + end +end diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb index d237a16da8f..80351862fc1 100644 --- a/spec/models/resource_milestone_event_spec.rb +++ b/spec/models/resource_milestone_event_spec.rb @@ -18,6 +18,24 @@ RSpec.describe ResourceMilestoneEvent, feature_category: :team_planning, type: : it { is_expected.to belong_to(:milestone) } end + describe 'scopes' do + describe '.aliased_for_timebox_report', :freeze_time do + let!(:event) { create(:resource_milestone_event, milestone: milestone) } + + let(:milestone) { create(:milestone) } + let(:scope) { described_class.aliased_for_timebox_report.first } + + it 'returns correct values with aliased names', :aggregate_failures do + expect(scope.event_type).to eq('timebox') + expect(scope.id).to eq(event.id) + expect(scope.issue_id).to eq(event.issue_id) + expect(scope.value).to eq(milestone.id) + expect(scope.action).to eq(event.action) + expect(scope.created_at).to eq(event.created_at) + end + end + end + describe '#milestone_title' do let(:milestone) { create(:milestone, title: 'v2.3') } let(:event) { create(:resource_milestone_event, milestone: milestone) } diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb index a6d6b507b69..699720b564a 100644 --- a/spec/models/resource_state_event_spec.rb +++ b/spec/models/resource_state_event_spec.rb @@ -41,6 +41,23 @@ RSpec.describe ResourceStateEvent, feature_category: :team_planning, type: :mode end end + describe 'scopes' do + describe '.aliased_for_timebox_report', :freeze_time do + let!(:event) { create(:resource_state_event, issue: issue) } + + let(:scope) { described_class.aliased_for_timebox_report.first } + + it 'returns correct values with aliased names', :aggregate_failures do + expect(scope.event_type).to eq('state') + expect(scope.id).to eq(event.id) + expect(scope.issue_id).to eq(event.issue_id) + expect(scope.value).to eq(issue.state_id) + expect(scope.action).to eq(nil) + expect(scope.created_at).to eq(event.created_at) + end + end + end + context 'callbacks' do describe '#issue_usage_metrics' do describe 'when an issue is closed' do diff --git a/spec/models/service_desk/custom_email_credential_spec.rb b/spec/models/service_desk/custom_email_credential_spec.rb new file mode 100644 index 00000000000..a990b77128e --- /dev/null +++ b/spec/models/service_desk/custom_email_credential_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ServiceDesk::CustomEmailCredential, feature_category: :service_desk do + let(:project) { build_stubbed(:project) } + let(:credential) { build_stubbed(:service_desk_custom_email_credential, project: project) } + let(:smtp_username) { "user@example.com" } + let(:smtp_password) { "supersecret" } + + describe 'validations' do + it { is_expected.to validate_presence_of(:project) } + + it { is_expected.to validate_presence_of(:smtp_address) } + it { is_expected.to validate_length_of(:smtp_address).is_at_most(255) } + it { is_expected.to allow_value('smtp.gmail.com').for(:smtp_address) } + it { is_expected.to allow_value('1.1.1.1').for(:smtp_address) } + it { is_expected.to allow_value('199.1.1.1').for(:smtp_address) } + it { is_expected.not_to allow_value('https://example.com').for(:smtp_address) } + it { is_expected.not_to allow_value('file://example').for(:smtp_address) } + it { is_expected.not_to allow_value('/example').for(:smtp_address) } + it { is_expected.not_to allow_value('localhost').for(:smtp_address) } + it { is_expected.not_to allow_value('127.0.0.1').for(:smtp_address) } + it { is_expected.not_to allow_value('192.168.12.12').for(:smtp_address) } # disallow local network + + it { is_expected.to validate_presence_of(:smtp_port) } + it { is_expected.to validate_numericality_of(:smtp_port).only_integer.is_greater_than(0) } + + it { is_expected.to validate_presence_of(:smtp_username) } + it { is_expected.to validate_length_of(:smtp_username).is_at_most(255) } + + it { is_expected.to validate_presence_of(:smtp_password) } + it { is_expected.to validate_length_of(:smtp_password).is_at_least(8).is_at_most(128) } + end + + describe 'encrypted #smtp_username' do + subject { build_stubbed(:service_desk_custom_email_credential, smtp_username: smtp_username) } + + it 'saves and retrieves the encrypted smtp username and iv correctly' do + expect(subject.encrypted_smtp_username).not_to be_nil + expect(subject.encrypted_smtp_username_iv).not_to be_nil + + expect(subject.smtp_username).to eq(smtp_username) + end + end + + describe 'encrypted #smtp_password' do + subject { build_stubbed(:service_desk_custom_email_credential, smtp_password: smtp_password) } + + it 'saves and retrieves the encrypted smtp password and iv correctly' do + expect(subject.encrypted_smtp_password).not_to be_nil + expect(subject.encrypted_smtp_password_iv).not_to be_nil + + expect(subject.smtp_password).to eq(smtp_password) + end + end + + describe 'associations' do + it { is_expected.to belong_to(:project) } + + it 'can access service desk setting from project' do + setting = build_stubbed(:service_desk_setting, project: project) + + expect(credential.service_desk_setting).to eq(setting) + end + end +end diff --git a/spec/models/service_desk_setting_spec.rb b/spec/models/service_desk_setting_spec.rb index b99494e6736..dba33e829c6 100644 --- a/spec/models/service_desk_setting_spec.rb +++ b/spec/models/service_desk_setting_spec.rb @@ -16,8 +16,6 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do it { is_expected.not_to allow_value('abc 12').for(:project_key).with_message("can contain only lowercase letters, digits, and '_'.") } it { is_expected.not_to allow_value('Big val').for(:project_key) } it { is_expected.to validate_length_of(:custom_email).is_at_most(255) } - it { is_expected.to validate_length_of(:custom_email_smtp_address).is_at_most(255) } - it { is_expected.to validate_length_of(:custom_email_smtp_username).is_at_most(255) } describe '#custom_email_enabled' do it { expect(subject.custom_email_enabled).to be_falsey } @@ -27,7 +25,6 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do context 'when custom_email_enabled is true' do before do # Test without ServiceDesk::CustomEmailVerification for simplicity - # See dedicated simplified tests below subject.custom_email_enabled = true end @@ -47,28 +44,6 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do it { is_expected.not_to allow_value('">"@example.org').for(:custom_email) } it { is_expected.not_to allow_value('file://example').for(:custom_email) } it { is_expected.not_to allow_value('no email at all').for(:custom_email) } - - it { is_expected.to validate_presence_of(:custom_email_smtp_username) } - - it { is_expected.to validate_presence_of(:custom_email_smtp_port) } - it { is_expected.to validate_numericality_of(:custom_email_smtp_port).only_integer.is_greater_than(0) } - - it { is_expected.to validate_presence_of(:custom_email_smtp_address) } - it { is_expected.to allow_value('smtp.gmail.com').for(:custom_email_smtp_address) } - it { is_expected.not_to allow_value('https://example.com').for(:custom_email_smtp_address) } - it { is_expected.not_to allow_value('file://example').for(:custom_email_smtp_address) } - it { is_expected.not_to allow_value('/example').for(:custom_email_smtp_address) } - end - - context 'when custom email verification is present/was triggered' do - before do - subject.project.service_desk_custom_email_verification = verification - end - - it { is_expected.to validate_presence_of(:custom_email) } - it { is_expected.to validate_presence_of(:custom_email_smtp_username) } - it { is_expected.to validate_presence_of(:custom_email_smtp_port) } - it { is_expected.to validate_presence_of(:custom_email_smtp_address) } end describe '#valid_issue_template' do @@ -138,36 +113,11 @@ RSpec.describe ServiceDeskSetting, feature_category: :service_desk do end end - describe 'encrypted #custom_email_smtp_password' do - let_it_be(:settings) do - create( - :service_desk_setting, - custom_email_enabled: true, - custom_email: 'support@example.com', - custom_email_smtp_address: 'smtp.example.com', - custom_email_smtp_port: 587, - custom_email_smtp_username: 'support@example.com', - custom_email_smtp_password: 'supersecret' - ) - end - - it 'saves and retrieves the encrypted custom email smtp password and iv correctly' do - expect(settings.encrypted_custom_email_smtp_password).not_to be_nil - expect(settings.encrypted_custom_email_smtp_password_iv).not_to be_nil - - expect(settings.custom_email_smtp_password).to eq('supersecret') - end - end - describe 'associations' do let(:custom_email_settings) do build_stubbed( :service_desk_setting, - custom_email: 'support@example.com', - custom_email_smtp_address: 'smtp.example.com', - custom_email_smtp_port: 587, - custom_email_smtp_username: 'support@example.com', - custom_email_smtp_password: 'supersecret' + custom_email: 'support@example.com' ) end diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb index 533e6e4bd7b..fc0a6432149 100644 --- a/spec/models/terraform/state_spec.rb +++ b/spec/models/terraform/state_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Terraform::State do +RSpec.describe Terraform::State, feature_category: :infrastructure_as_code do subject { create(:terraform_state, :with_version) } it { is_expected.to belong_to(:project) } diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb index 477041117cb..a476b9e79ae 100644 --- a/spec/models/terraform/state_version_spec.rb +++ b/spec/models/terraform/state_version_spec.rb @@ -2,11 +2,11 @@ require 'spec_helper' -RSpec.describe Terraform::StateVersion do +RSpec.describe Terraform::StateVersion, feature_category: :infrastructure_as_code do it { is_expected.to be_a FileStoreMounter } it { is_expected.to be_a EachBatch } - it { is_expected.to belong_to(:terraform_state).required } + it { is_expected.to belong_to(:terraform_state).required.touch } it { is_expected.to belong_to(:created_by_user).class_name('User').optional } it { is_expected.to belong_to(:build).class_name('Ci::Build').optional } diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb index 1fab3882c2a..9c8d786ecb1 100644 --- a/spec/models/u2f_registration_spec.rb +++ b/spec/models/u2f_registration_spec.rb @@ -62,72 +62,6 @@ RSpec.describe U2fRegistration do end end - describe 'callbacks' do - describe 'after create' do - shared_examples_for 'creates webauthn registration' do - it 'creates webauthn registration' do - u2f_registration = create_u2f_registration - webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id) - expect(webauthn_registration).to exist - end - end - - it_behaves_like 'creates webauthn registration' - - context 'when the u2f_registration has a blank name' do - let(:u2f_registration_name) { '' } - - it_behaves_like 'creates webauthn registration' - end - - context 'when the u2f_registration has the name as `nil`' do - let(:u2f_registration_name) { nil } - - it_behaves_like 'creates webauthn registration' - end - - it 'logs error' do - allow(Gitlab::Auth::U2fWebauthnConverter).to receive(:new).and_raise('boom!') - - allow_next_instance_of(U2fRegistration) do |u2f_registration| - allow(u2f_registration).to receive(:id).and_return(123) - end - - expect(Gitlab::ErrorTracking).to( - receive(:track_exception).with(kind_of(StandardError), - u2f_registration_id: 123)) - - create_u2f_registration - end - end - - describe 'after update' do - context 'when counter is updated' do - it 'updates the webauthn registration counter to be the same value' do - u2f_registration = create_u2f_registration - new_counter = u2f_registration.counter + 1 - webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id) - - u2f_registration.update!(counter: new_counter) - - expect(u2f_registration.reload.counter).to eq(new_counter) - expect(webauthn_registration.reload.counter).to eq(new_counter) - end - end - - context 'when sign count of registration is not updated' do - it 'does not update the counter' do - u2f_registration = create_u2f_registration - webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id) - - expect do - u2f_registration.update!(name: 'a new name') - end.not_to change { webauthn_registration.counter } - end - end - end - end - def create_u2f_registration create( :u2f_registration, diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb index a6f64c90657..5c368b1632b 100644 --- a/spec/models/user_preference_spec.rb +++ b/spec/models/user_preference_spec.rb @@ -54,6 +54,13 @@ RSpec.describe UserPreference do it { is_expected.not_to allow_value(nil).for(:use_legacy_web_ide) } it { is_expected.not_to allow_value("").for(:use_legacy_web_ide) } end + + describe 'pass_user_identities_to_ci_jwt' do + it { is_expected.to allow_value(true).for(:pass_user_identities_to_ci_jwt) } + it { is_expected.to allow_value(false).for(:pass_user_identities_to_ci_jwt) } + it { is_expected.not_to allow_value(nil).for(:pass_user_identities_to_ci_jwt) } + it { is_expected.not_to allow_value("").for(:pass_user_identities_to_ci_jwt) } + end end describe 'notes filters global keys' do diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 04f1bffce0a..bc677aca0f4 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -84,6 +84,12 @@ RSpec.describe User, feature_category: :user_profile do it { is_expected.to delegate_method(:use_new_navigation).to(:user_preference) } it { is_expected.to delegate_method(:use_new_navigation=).to(:user_preference).with_arguments(:args) } + it { is_expected.to delegate_method(:pinned_nav_items).to(:user_preference) } + it { is_expected.to delegate_method(:pinned_nav_items=).to(:user_preference).with_arguments(:args) } + + it { is_expected.to delegate_method(:achievements_enabled).to(:user_preference) } + it { is_expected.to delegate_method(:achievements_enabled=).to(:user_preference).with_arguments(:args) } + it { is_expected.to delegate_method(:job_title).to(:user_detail).allow_nil } it { is_expected.to delegate_method(:job_title=).to(:user_detail).with_arguments(:args).allow_nil } @@ -175,6 +181,9 @@ RSpec.describe User, feature_category: :user_profile do it { is_expected.to have_many(:achievements).through(:user_achievements).class_name('Achievements::Achievement').inverse_of(:users) } it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') } it { is_expected.to have_many(:audit_events).with_foreign_key(:author_id).inverse_of(:user) } + it { is_expected.to have_many(:abuse_trust_scores).class_name('Abuse::TrustScore') } + it { is_expected.to have_many(:issue_assignment_events).class_name('ResourceEvents::IssueAssignmentEvent') } + it { is_expected.to have_many(:merge_request_assignment_events).class_name('ResourceEvents::MergeRequestAssignmentEvent') } it do is_expected.to have_many(:alert_assignees).class_name('::AlertManagement::AlertAssignee').inverse_of(:assignee) @@ -2089,7 +2098,7 @@ RSpec.describe User, feature_category: :user_profile do let_it_be(:incoming_email_token) { 'ilqx6jm1u945macft4eff0nw' } it 'returns incoming email token when supported' do - allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_issue_creation?).and_return(true) user = create(:user, incoming_email_token: incoming_email_token) @@ -2097,7 +2106,7 @@ RSpec.describe User, feature_category: :user_profile do end it 'returns `nil` when not supported' do - allow(Gitlab::IncomingEmail).to receive(:supports_issue_creation?).and_return(false) + allow(Gitlab::Email::IncomingEmail).to receive(:supports_issue_creation?).and_return(false) user = create(:user, incoming_email_token: incoming_email_token) @@ -2363,6 +2372,18 @@ RSpec.describe User, feature_category: :user_profile do expect(user.forkable_namespaces).to contain_exactly(user.namespace, group, subgroup, developer_group) end + + it 'includes groups where the user has access via group shares to create projects' do + shared_group = create(:group) + create(:group_group_link, :maintainer, + shared_with_group: group, + shared_group: shared_group + ) + + expect(user.forkable_namespaces).to contain_exactly( + user.namespace, group, subgroup, shared_group + ) + end end describe '#manageable_groups' do @@ -6815,7 +6836,8 @@ RSpec.describe User, feature_category: :user_profile do { user_type: :support_bot }, { user_type: :security_bot }, { user_type: :automation_bot }, - { user_type: :admin_bot } + { user_type: :admin_bot }, + { user_type: :llm_bot } ] end diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb index 13f17e11276..4cd5d5901e2 100644 --- a/spec/models/work_item_spec.rb +++ b/spec/models/work_item_spec.rb @@ -99,6 +99,20 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do end end + describe '#get_widget' do + let(:work_item) { build(:work_item, description: 'foo') } + + it 'returns widget object' do + expect(work_item.get_widget(:description)).to be_an_instance_of(WorkItems::Widgets::Description) + end + + context 'when widget does not exist' do + it 'returns nil' do + expect(work_item.get_widget(:nop)).to be_nil + end + end + end + describe '#supports_assignee?' do let(:work_item) { build(:work_item, :task) } @@ -368,4 +382,179 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do end end end + + describe '#allowed_work_item_type_change' do + let_it_be(:all_types) { WorkItems::Type::BASE_TYPES.keys } + + it 'is possible to change between all types', :aggregate_failures do + all_types.each do |type| + work_item = build(:work_item, type, project: reusable_project) + + (all_types - [type]).each do |new_type| + work_item.work_item_type_id = WorkItems::Type.default_by_type(new_type).id + + expect(work_item).to be_valid, "#{type} to #{new_type}" + end + end + end + + context 'with ParentLink relation' do + let_it_be(:old_type) { create(:work_item_type) } + let_it_be(:new_type) { create(:work_item_type) } + + context 'with hierarchy restrictions' do + let_it_be(:child_type) { create(:work_item_type) } + + let_it_be_with_reload(:parent) { create(:work_item, work_item_type: old_type, project: reusable_project) } + let_it_be_with_reload(:child) { create(:work_item, work_item_type: child_type, project: reusable_project) } + + let_it_be(:hierarchy_restriction) do + create(:hierarchy_restriction, parent_type: old_type, child_type: child_type) + end + + let_it_be(:link) { create(:parent_link, work_item_parent: parent, work_item: child) } + + context 'when child items restrict the type change' do + before do + parent.work_item_type = new_type + end + + context 'when child items are compatible with the new type' do + let_it_be(:hierarchy_restriction_new_type) do + create(:hierarchy_restriction, parent_type: new_type, child_type: child_type) + end + + it 'allows to change types' do + expect(parent).to be_valid + expect(parent.errors).to be_empty + end + end + + context 'when child items are not compatible with the new type' do + it 'does not allow to change types' do + expect(parent).not_to be_valid + expect(parent.errors[:work_item_type_id]) + .to include("cannot be changed to #{new_type.name} with these child item types.") + end + end + end + + context 'when the parent restricts the type change' do + before do + child.work_item_type = new_type + end + + it 'does not allow to change types' do + expect(child.valid?).to eq(false) + expect(child.errors[:work_item_type_id]) + .to include("cannot be changed to #{new_type.name} with #{parent.work_item_type.name} as parent type.") + end + end + end + + context 'with hierarchy depth restriction' do + let_it_be_with_reload(:item1) { create(:work_item, work_item_type: new_type, project: reusable_project) } + let_it_be_with_reload(:item2) { create(:work_item, work_item_type: new_type, project: reusable_project) } + let_it_be_with_reload(:item3) { create(:work_item, work_item_type: new_type, project: reusable_project) } + let_it_be_with_reload(:item4) { create(:work_item, work_item_type: new_type, project: reusable_project) } + + let_it_be(:hierarchy_restriction1) do + create(:hierarchy_restriction, parent_type: old_type, child_type: new_type) + end + + let_it_be(:hierarchy_restriction2) do + create(:hierarchy_restriction, parent_type: new_type, child_type: old_type) + end + + let_it_be_with_reload(:hierarchy_restriction3) do + create(:hierarchy_restriction, parent_type: new_type, child_type: new_type, maximum_depth: 4) + end + + let_it_be(:link1) { create(:parent_link, work_item_parent: item1, work_item: item2) } + let_it_be(:link2) { create(:parent_link, work_item_parent: item2, work_item: item3) } + let_it_be(:link3) { create(:parent_link, work_item_parent: item3, work_item: item4) } + + before do + hierarchy_restriction3.update!(maximum_depth: maximum_depth) + end + + shared_examples 'validates the depth correctly' do + before do + work_item.update!(work_item_type: old_type) + end + + context 'when it is valid' do + let(:maximum_depth) { 4 } + + it 'allows to change types' do + work_item.work_item_type = new_type + + expect(work_item).to be_valid + end + end + + context 'when it is not valid' do + let(:maximum_depth) { 3 } + + it 'does not allow to change types' do + work_item.work_item_type = new_type + + expect(work_item).not_to be_valid + expect(work_item.errors[:work_item_type_id]).to include("reached maximum depth") + end + end + end + + context 'with the highest ancestor' do + let_it_be_with_reload(:work_item) { item1 } + + it_behaves_like 'validates the depth correctly' + end + + context 'with a child item' do + let_it_be_with_reload(:work_item) { item2 } + + it_behaves_like 'validates the depth correctly' + end + + context 'with the last child item' do + let_it_be_with_reload(:work_item) { item4 } + + it_behaves_like 'validates the depth correctly' + end + + context 'when ancestor is still the old type' do + let_it_be(:hierarchy_restriction4) do + create(:hierarchy_restriction, parent_type: old_type, child_type: old_type) + end + + before do + item1.update!(work_item_type: old_type) + item2.update!(work_item_type: old_type) + end + + context 'when it exceeds maximum depth' do + let(:maximum_depth) { 2 } + + it 'does not allow to change types' do + item2.work_item_type = new_type + + expect(item2).not_to be_valid + expect(item2.errors[:work_item_type_id]).to include("reached maximum depth") + end + end + + context 'when it does not exceed maximum depth' do + let(:maximum_depth) { 3 } + + it 'does allow to change types' do + item2.work_item_type = new_type + + expect(item2).to be_valid + end + end + end + end + end + end end diff --git a/spec/models/work_items/resource_link_event_spec.rb b/spec/models/work_items/resource_link_event_spec.rb new file mode 100644 index 00000000000..67ca9e72bbc --- /dev/null +++ b/spec/models/work_items/resource_link_event_spec.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::ResourceLinkEvent, type: :model, feature_category: :team_planning do + it_behaves_like 'a resource event' + + describe 'associations' do + it { is_expected.to belong_to(:work_item) } + it { is_expected.to belong_to(:child_work_item) } + end + + describe 'validation' do + it { is_expected.to validate_presence_of(:child_work_item) } + end +end diff --git a/spec/models/work_items/widget_definition_spec.rb b/spec/models/work_items/widget_definition_spec.rb index 3a4670c996f..a33e08a1bf2 100644 --- a/spec/models/work_items/widget_definition_spec.rb +++ b/spec/models/work_items/widget_definition_spec.rb @@ -12,7 +12,9 @@ RSpec.describe WorkItems::WidgetDefinition, feature_category: :team_planning do ::WorkItems::Widgets::StartAndDueDate, ::WorkItems::Widgets::Milestone, ::WorkItems::Widgets::Notes, - ::WorkItems::Widgets::Notifications + ::WorkItems::Widgets::Notifications, + ::WorkItems::Widgets::CurrentUserTodos, + ::WorkItems::Widgets::AwardEmoji ] if Gitlab.ee? diff --git a/spec/models/work_items/widgets/award_emoji_spec.rb b/spec/models/work_items/widgets/award_emoji_spec.rb new file mode 100644 index 00000000000..bb61aa41669 --- /dev/null +++ b/spec/models/work_items/widgets/award_emoji_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::Widgets::AwardEmoji, feature_category: :team_planning do + let_it_be(:work_item) { create(:work_item) } + let_it_be(:emoji1) { create(:award_emoji, name: 'star', awardable: work_item) } + let_it_be(:emoji2) { create(:award_emoji, :upvote, awardable: work_item) } + let_it_be(:emoji3) { create(:award_emoji, :downvote, awardable: work_item) } + + describe '.type' do + it { expect(described_class.type).to eq(:award_emoji) } + end + + describe '#type' do + it { expect(described_class.new(work_item).type).to eq(:award_emoji) } + end + + describe '#downvotes' do + it { expect(described_class.new(work_item).downvotes).to eq(1) } + end + + describe '#upvotes' do + it { expect(described_class.new(work_item).upvotes).to eq(1) } + end + + describe '#award_emoji' do + it { expect(described_class.new(work_item).award_emoji).to match_array([emoji1, emoji2, emoji3]) } + end +end diff --git a/spec/policies/achievements/user_achievement_policy_spec.rb b/spec/policies/achievements/user_achievement_policy_spec.rb new file mode 100644 index 00000000000..47f6188e178 --- /dev/null +++ b/spec/policies/achievements/user_achievement_policy_spec.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Achievements::UserAchievementPolicy, feature_category: :user_profile do + let(:maintainer) { create(:user) } + + let(:group) { create(:group, :public) } + + let(:current_user) { create(:user) } + let(:achievement) { create(:achievement, namespace: group) } + let(:achievement_owner) { create(:user) } + let(:user_achievement) { create(:user_achievement, achievement: achievement, user: achievement_owner) } + + before do + group.add_maintainer(maintainer) + end + + subject { described_class.new(current_user, user_achievement) } + + it 'is readable to everyone when user has public profile' do + is_expected.to be_allowed(:read_user_achievement) + end + + context 'when user has private profile' do + before do + achievement_owner.update!(private_profile: true) + end + + context 'for achievement owner' do + let(:current_user) { achievement_owner } + + it 'is visible' do + is_expected.to be_allowed(:read_user_achievement) + end + end + + context 'for group maintainer' do + let(:current_user) { maintainer } + + it 'is visible' do + is_expected.to be_allowed(:read_user_achievement) + end + end + + context 'for others' do + it 'is hidden' do + is_expected.not_to be_allowed(:read_user_achievement) + end + end + end + + context 'when group is private' do + let(:group) { create(:group, :private) } + + context 'for achievement owner' do + let(:current_user) { achievement_owner } + + it 'is hidden' do + is_expected.not_to be_allowed(:read_user_achievement) + end + end + + context 'for group maintainer' do + let(:current_user) { maintainer } + + it 'is visible' do + is_expected.to be_allowed(:read_user_achievement) + end + end + + context 'for others' do + it 'is hidden' do + is_expected.not_to be_allowed(:read_user_achievement) + end + end + end +end diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb index fee4d76ca8f..77cfcab5c3e 100644 --- a/spec/policies/ci/build_policy_spec.rb +++ b/spec/policies/ci/build_policy_spec.rb @@ -121,8 +121,7 @@ RSpec.describe Ci::BuildPolicy do context 'when no one can push or merge to the branch' do before do - create(:protected_branch, :no_one_can_push, - name: build.ref, project: project) + create(:protected_branch, :no_one_can_push, name: build.ref, project: project) end it 'does not include ability to update build' do @@ -132,8 +131,7 @@ RSpec.describe Ci::BuildPolicy do context 'when developers can push to the branch' do before do - create(:protected_branch, :developers_can_merge, - name: build.ref, project: project) + create(:protected_branch, :developers_can_merge, name: build.ref, project: project) end it 'includes ability to update build' do @@ -143,8 +141,7 @@ RSpec.describe Ci::BuildPolicy do context 'when no one can create the tag' do before do - create(:protected_tag, :no_one_can_create, - name: build.ref, project: project) + create(:protected_tag, :no_one_can_create, name: build.ref, project: project) build.update!(tag: true) end @@ -156,8 +153,7 @@ RSpec.describe Ci::BuildPolicy do context 'when no one can create the tag but it is not a tag' do before do - create(:protected_tag, :no_one_can_create, - name: build.ref, project: project) + create(:protected_tag, :no_one_can_create, name: build.ref, project: project) end it 'includes ability to update build' do @@ -181,8 +177,7 @@ RSpec.describe Ci::BuildPolicy do context 'when the build was created for a protected ref' do before do - create(:protected_branch, :developers_can_push, - name: build.ref, project: project) + create(:protected_branch, :developers_can_push, name: build.ref, project: project) end it { expect(policy).to be_disallowed :erase_build } @@ -204,8 +199,7 @@ RSpec.describe Ci::BuildPolicy do let(:owner) { user } before do - create(:protected_branch, :no_one_can_push, :no_one_can_merge, - name: build.ref, project: project) + create(:protected_branch, :no_one_can_push, :no_one_can_merge, name: build.ref, project: project) end it { expect(policy).to be_disallowed :erase_build } @@ -219,8 +213,7 @@ RSpec.describe Ci::BuildPolicy do context 'when maintainers can push to the branch' do before do - create(:protected_branch, :maintainers_can_push, - name: build.ref, project: project) + create(:protected_branch, :maintainers_can_push, name: build.ref, project: project) end context 'when the build was created by the maintainer' do @@ -240,8 +233,7 @@ RSpec.describe Ci::BuildPolicy do let(:owner) { user } before do - create(:protected_branch, :no_one_can_push, :no_one_can_merge, - name: build.ref, project: project) + create(:protected_branch, :no_one_can_push, :no_one_can_merge, name: build.ref, project: project) end it { expect(policy).to be_disallowed :erase_build } @@ -257,8 +249,7 @@ RSpec.describe Ci::BuildPolicy do context 'when the build was created for a protected branch' do before do - create(:protected_branch, :developers_can_push, - name: build.ref, project: project) + create(:protected_branch, :developers_can_push, name: build.ref, project: project) end it { expect(policy).to be_allowed :erase_build } @@ -266,8 +257,7 @@ RSpec.describe Ci::BuildPolicy do context 'when the build was created for a protected tag' do before do - create(:protected_tag, :developers_can_create, - name: build.ref, project: project) + create(:protected_tag, :developers_can_create, name: build.ref, project: project) end it { expect(policy).to be_allowed :erase_build } diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb index b68bb966820..8a5b80e3051 100644 --- a/spec/policies/ci/pipeline_policy_spec.rb +++ b/spec/policies/ci/pipeline_policy_spec.rb @@ -20,8 +20,7 @@ RSpec.describe Ci::PipelinePolicy, :models do context 'when no one can push or merge to the branch' do before do - create(:protected_branch, :no_one_can_push, - name: pipeline.ref, project: project) + create(:protected_branch, :no_one_can_push, name: pipeline.ref, project: project) end it 'does not include ability to update pipeline' do @@ -31,8 +30,7 @@ RSpec.describe Ci::PipelinePolicy, :models do context 'when developers can push to the branch' do before do - create(:protected_branch, :developers_can_merge, - name: pipeline.ref, project: project) + create(:protected_branch, :developers_can_merge, name: pipeline.ref, project: project) end it 'includes ability to update pipeline' do @@ -42,8 +40,7 @@ RSpec.describe Ci::PipelinePolicy, :models do context 'when no one can create the tag' do before do - create(:protected_tag, :no_one_can_create, - name: pipeline.ref, project: project) + create(:protected_tag, :no_one_can_create, name: pipeline.ref, project: project) pipeline.update!(tag: true) end @@ -55,8 +52,7 @@ RSpec.describe Ci::PipelinePolicy, :models do context 'when no one can create the tag but it is not a tag' do before do - create(:protected_tag, :no_one_can_create, - name: pipeline.ref, project: project) + create(:protected_tag, :no_one_can_create, name: pipeline.ref, project: project) end it 'includes ability to update pipeline' do @@ -119,8 +115,7 @@ RSpec.describe Ci::PipelinePolicy, :models do before do project.add_developer(user) - create(:protected_branch, :developers_can_merge, - name: pipeline.ref, project: project) + create(:protected_branch, :developers_can_merge, name: pipeline.ref, project: project) end it 'is enabled' do @@ -133,8 +128,7 @@ RSpec.describe Ci::PipelinePolicy, :models do before do project.add_developer(user) - create(:protected_branch, :developers_can_merge, - name: pipeline.ref, project: project) + create(:protected_branch, :developers_can_merge, name: pipeline.ref, project: project) end it 'is disabled' do diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb index 92ad37145c0..7025eda1ba1 100644 --- a/spec/policies/ci/pipeline_schedule_policy_spec.rb +++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb @@ -19,8 +19,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do context 'when no one can push or merge to the branch' do before do - create(:protected_branch, :no_one_can_push, - name: pipeline_schedule.ref, project: project) + create(:protected_branch, :no_one_can_push, name: pipeline_schedule.ref, project: project) end it 'does not include ability to play pipeline schedule' do @@ -30,8 +29,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do context 'when developers can push to the branch' do before do - create(:protected_branch, :developers_can_merge, - name: pipeline_schedule.ref, project: project) + create(:protected_branch, :developers_can_merge, name: pipeline_schedule.ref, project: project) end it 'includes ability to update pipeline' do @@ -45,8 +43,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do before do pipeline_schedule.update!(ref: tag) - create(:protected_tag, :no_one_can_create, - name: pipeline_schedule.ref, project: project) + create(:protected_tag, :no_one_can_create, name: pipeline_schedule.ref, project: project) end it 'does not include ability to play pipeline schedule' do @@ -56,8 +53,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models, :clean_gitlab_redis_cache do context 'when no one can create the tag but it is not a tag' do before do - create(:protected_tag, :no_one_can_create, - name: pipeline_schedule.ref, project: project) + create(:protected_tag, :no_one_can_create, name: pipeline_schedule.ref, project: project) end it 'includes ability to play pipeline schedule' do diff --git a/spec/policies/ci/runner_machine_policy_spec.rb b/spec/policies/ci/runner_machine_policy_spec.rb deleted file mode 100644 index 8b95f2d7526..00000000000 --- a/spec/policies/ci/runner_machine_policy_spec.rb +++ /dev/null @@ -1,176 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::RunnerMachinePolicy, feature_category: :runner_fleet do - let_it_be(:owner) { create(:user) } - - describe 'ability :read_runner_machine' do - let_it_be(:guest) { create(:user) } - let_it_be(:developer) { create(:user) } - let_it_be(:maintainer) { create(:user) } - - let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') } - let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) } - let_it_be_with_reload(:project) { create(:project, group: subgroup) } - - let_it_be(:instance_runner) { create(:ci_runner, :instance, :with_runner_machine) } - let_it_be(:group_runner) { create(:ci_runner, :group, :with_runner_machine, groups: [group]) } - let_it_be(:project_runner) { create(:ci_runner, :project, :with_runner_machine, projects: [project]) } - - let(:runner_machine) { runner.runner_machines.first } - - subject(:policy) { described_class.new(user, runner_machine) } - - before_all do - group.add_guest(guest) - group.add_developer(developer) - group.add_maintainer(maintainer) - group.add_owner(owner) - end - - shared_examples 'a policy allowing reading instance runner machine depending on runner sharing' do - context 'with instance runner' do - let(:runner) { instance_runner } - - it { expect_allowed :read_runner_machine } - - context 'with shared runners disabled on projects' do - before do - project.update!(shared_runners_enabled: false) - end - - it { expect_allowed :read_runner_machine } - end - - context 'with shared runners disabled for groups and projects' do - before do - group.update!(shared_runners_enabled: false) - project.update!(shared_runners_enabled: false) - end - - it { expect_disallowed :read_runner_machine } - end - end - end - - shared_examples 'a policy allowing reading group runner machine depending on runner sharing' do - context 'with group runner' do - let(:runner) { group_runner } - - it { expect_allowed :read_runner_machine } - - context 'with sharing of group runners disabled' do - before do - project.update!(group_runners_enabled: false) - end - - it { expect_disallowed :read_runner_machine } - end - end - end - - shared_examples 'does not allow reading runners machines on any scope' do - context 'with instance runner' do - let(:runner) { instance_runner } - - it { expect_disallowed :read_runner_machine } - - context 'with shared runners disabled for groups and projects' do - before do - group.update!(shared_runners_enabled: false) - project.update!(shared_runners_enabled: false) - end - - it { expect_disallowed :read_runner_machine } - end - end - - context 'with group runner' do - let(:runner) { group_runner } - - it { expect_disallowed :read_runner_machine } - - context 'with sharing of group runners disabled' do - before do - project.update!(group_runners_enabled: false) - end - - it { expect_disallowed :read_runner_machine } - end - end - - context 'with project runner' do - let(:runner) { project_runner } - - it { expect_disallowed :read_runner_machine } - end - end - - context 'without access' do - let_it_be(:user) { create(:user) } - - it_behaves_like 'does not allow reading runners machines on any scope' - end - - context 'with guest access' do - let(:user) { guest } - - it_behaves_like 'does not allow reading runners machines on any scope' - end - - context 'with developer access' do - let(:user) { developer } - - it_behaves_like 'a policy allowing reading instance runner machine depending on runner sharing' - - it_behaves_like 'a policy allowing reading group runner machine depending on runner sharing' - - context 'with project runner' do - let(:runner) { project_runner } - - it { expect_disallowed :read_runner_machine } - end - end - - context 'with maintainer access' do - let(:user) { maintainer } - - it_behaves_like 'a policy allowing reading instance runner machine depending on runner sharing' - - it_behaves_like 'a policy allowing reading group runner machine depending on runner sharing' - - context 'with project runner' do - let(:runner) { project_runner } - - it { expect_allowed :read_runner_machine } - end - end - - context 'with owner access' do - let(:user) { owner } - - it_behaves_like 'a policy allowing reading instance runner machine depending on runner sharing' - - context 'with group runner' do - let(:runner) { group_runner } - - it { expect_allowed :read_runner_machine } - - context 'with sharing of group runners disabled' do - before do - project.update!(group_runners_enabled: false) - end - - it { expect_allowed :read_runner_machine } - end - end - - context 'with project runner' do - let(:runner) { project_runner } - - it { expect_allowed :read_runner_machine } - end - end - end -end diff --git a/spec/policies/ci/runner_manager_policy_spec.rb b/spec/policies/ci/runner_manager_policy_spec.rb new file mode 100644 index 00000000000..d7004033ceb --- /dev/null +++ b/spec/policies/ci/runner_manager_policy_spec.rb @@ -0,0 +1,176 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::RunnerManagerPolicy, feature_category: :runner_fleet do + let_it_be(:owner) { create(:user) } + + describe 'ability :read_runner_manager' do + let_it_be(:guest) { create(:user) } + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + + let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') } + let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) } + let_it_be_with_reload(:project) { create(:project, group: subgroup) } + + let_it_be(:instance_runner) { create(:ci_runner, :instance, :with_runner_manager) } + let_it_be(:group_runner) { create(:ci_runner, :group, :with_runner_manager, groups: [group]) } + let_it_be(:project_runner) { create(:ci_runner, :project, :with_runner_manager, projects: [project]) } + + let(:runner_manager) { runner.runner_managers.first } + + subject(:policy) { described_class.new(user, runner_manager) } + + before_all do + group.add_guest(guest) + group.add_developer(developer) + group.add_maintainer(maintainer) + group.add_owner(owner) + end + + shared_examples 'a policy allowing reading instance runner manager depending on runner sharing' do + context 'with instance runner' do + let(:runner) { instance_runner } + + it { expect_allowed :read_runner_manager } + + context 'with shared runners disabled on projects' do + before do + project.update!(shared_runners_enabled: false) + end + + it { expect_allowed :read_runner_manager } + end + + context 'with shared runners disabled for groups and projects' do + before do + group.update!(shared_runners_enabled: false) + project.update!(shared_runners_enabled: false) + end + + it { expect_disallowed :read_runner_manager } + end + end + end + + shared_examples 'a policy allowing reading group runner manager depending on runner sharing' do + context 'with group runner' do + let(:runner) { group_runner } + + it { expect_allowed :read_runner_manager } + + context 'with sharing of group runners disabled' do + before do + project.update!(group_runners_enabled: false) + end + + it { expect_disallowed :read_runner_manager } + end + end + end + + shared_examples 'does not allow reading runners managers on any scope' do + context 'with instance runner' do + let(:runner) { instance_runner } + + it { expect_disallowed :read_runner_manager } + + context 'with shared runners disabled for groups and projects' do + before do + group.update!(shared_runners_enabled: false) + project.update!(shared_runners_enabled: false) + end + + it { expect_disallowed :read_runner_manager } + end + end + + context 'with group runner' do + let(:runner) { group_runner } + + it { expect_disallowed :read_runner_manager } + + context 'with sharing of group runners disabled' do + before do + project.update!(group_runners_enabled: false) + end + + it { expect_disallowed :read_runner_manager } + end + end + + context 'with project runner' do + let(:runner) { project_runner } + + it { expect_disallowed :read_runner_manager } + end + end + + context 'without access' do + let_it_be(:user) { create(:user) } + + it_behaves_like 'does not allow reading runners managers on any scope' + end + + context 'with guest access' do + let(:user) { guest } + + it_behaves_like 'does not allow reading runners managers on any scope' + end + + context 'with developer access' do + let(:user) { developer } + + it_behaves_like 'a policy allowing reading instance runner manager depending on runner sharing' + + it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing' + + context 'with project runner' do + let(:runner) { project_runner } + + it { expect_disallowed :read_runner_manager } + end + end + + context 'with maintainer access' do + let(:user) { maintainer } + + it_behaves_like 'a policy allowing reading instance runner manager depending on runner sharing' + + it_behaves_like 'a policy allowing reading group runner manager depending on runner sharing' + + context 'with project runner' do + let(:runner) { project_runner } + + it { expect_allowed :read_runner_manager } + end + end + + context 'with owner access' do + let(:user) { owner } + + it_behaves_like 'a policy allowing reading instance runner manager depending on runner sharing' + + context 'with group runner' do + let(:runner) { group_runner } + + it { expect_allowed :read_runner_manager } + + context 'with sharing of group runners disabled' do + before do + project.update!(group_runners_enabled: false) + end + + it { expect_allowed :read_runner_manager } + end + end + + context 'with project runner' do + let(:runner) { project_runner } + + it { expect_allowed :read_runner_manager } + end + end + end +end diff --git a/spec/policies/environment_policy_spec.rb b/spec/policies/environment_policy_spec.rb index 701fc7ac9ae..f0957ff5cc9 100644 --- a/spec/policies/environment_policy_spec.rb +++ b/spec/policies/environment_policy_spec.rb @@ -50,8 +50,7 @@ RSpec.describe EnvironmentPolicy do with_them do before do project.add_member(user, access_level) unless access_level.nil? - create(:protected_branch, :no_one_can_push, - name: 'master', project: project) + create(:protected_branch, :no_one_can_push, name: 'master', project: project) end it { expect(policy).to be_disallowed :stop_environment } diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb index 3d6d95bb122..0d91c288bbc 100644 --- a/spec/policies/global_policy_spec.rb +++ b/spec/policies/global_policy_spec.rb @@ -10,6 +10,7 @@ RSpec.describe GlobalPolicy, feature_category: :shared do let_it_be(:service_account) { create(:user, :service_account) } let_it_be(:migration_bot) { create(:user, :migration_bot) } let_it_be(:security_bot) { create(:user, :security_bot) } + let_it_be(:llm_bot) { create(:user, :llm_bot) } let_it_be_with_reload(:current_user) { create(:user) } let_it_be(:user) { create(:user) } @@ -238,6 +239,12 @@ RSpec.describe GlobalPolicy, feature_category: :shared do it { is_expected.to be_disallowed(:access_api) } end + context 'llm bot' do + let(:current_user) { llm_bot } + + it { is_expected.to be_disallowed(:access_api) } + end + context 'user blocked pending approval' do before do current_user.block_pending_approval @@ -617,6 +624,12 @@ RSpec.describe GlobalPolicy, feature_category: :shared do it { is_expected.to be_disallowed(:log_in) } end + context 'llm bot' do + let(:current_user) { llm_bot } + + it { is_expected.to be_disallowed(:log_in) } + end + context 'user blocked pending approval' do before do current_user.block_pending_approval @@ -626,47 +639,53 @@ RSpec.describe GlobalPolicy, feature_category: :shared do end end - describe 'create_instance_runners' do + describe 'create_instance_runner' do context 'admin' do let(:current_user) { admin_user } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_allowed(:create_instance_runners) } + it { is_expected.to be_allowed(:create_instance_runner) } end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end end context 'with project_bot' do let(:current_user) { project_bot } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with migration_bot' do let(:current_user) { migration_bot } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with security_bot' do let(:current_user) { security_bot } + it { is_expected.to be_disallowed(:create_instance_runner) } + end + + context 'with llm_bot' do + let(:current_user) { llm_bot } + it { is_expected.to be_disallowed(:create_instance_runners) } end context 'with regular user' do let(:current_user) { user } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'create_runner_workflow_for_admin flag disabled' do @@ -678,42 +697,48 @@ RSpec.describe GlobalPolicy, feature_category: :shared do let(:current_user) { admin_user } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end end context 'with project_bot' do let(:current_user) { project_bot } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with migration_bot' do let(:current_user) { migration_bot } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with security_bot' do let(:current_user) { security_bot } + it { is_expected.to be_disallowed(:create_instance_runner) } + end + + context 'with llm_bot' do + let(:current_user) { llm_bot } + it { is_expected.to be_disallowed(:create_instance_runners) } end context 'with regular user' do let(:current_user) { user } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_instance_runners) } + it { is_expected.to be_disallowed(:create_instance_runner) } end end end diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb index 003ca2512dc..935b9124534 100644 --- a/spec/policies/group_policy_spec.rb +++ b/spec/policies/group_policy_spec.rb @@ -670,6 +670,124 @@ RSpec.describe GroupPolicy, feature_category: :system_access do end end + context 'import_projects' do + before do + group.update!(project_creation_level: project_creation_level) + end + + context 'when group has no project creation level set' do + let(:project_creation_level) { nil } + + context 'reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'developer' do + let(:current_user) { developer } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_allowed(:import_projects) } + end + + context 'owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(:import_projects) } + end + end + + context 'when group has project creation level set to no one' do + let(:project_creation_level) { ::Gitlab::Access::NO_ONE_PROJECT_ACCESS } + + context 'reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'developer' do + let(:current_user) { developer } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'owner' do + let(:current_user) { owner } + + it { is_expected.to be_disallowed(:import_projects) } + end + end + + context 'when group has project creation level set to maintainer only' do + let(:project_creation_level) { ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS } + + context 'reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'developer' do + let(:current_user) { developer } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_allowed(:import_projects) } + end + + context 'owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(:import_projects) } + end + end + + context 'when group has project creation level set to developers + maintainer' do + let(:project_creation_level) { ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS } + + context 'reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'developer' do + let(:current_user) { developer } + + it { is_expected.to be_disallowed(:import_projects) } + end + + context 'maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_allowed(:import_projects) } + end + + context 'owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(:import_projects) } + end + end + end + context 'create_subgroup' do context 'when group has subgroup creation level set to owner' do before do @@ -735,10 +853,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do it_behaves_like 'clusterable policies' do let(:clusterable) { create(:group, :crm_enabled) } let(:cluster) do - create(:cluster, - :provided_by_gcp, - :group, - groups: [clusterable]) + create(:cluster, :provided_by_gcp, :group, groups: [clusterable]) end end @@ -1275,7 +1390,7 @@ RSpec.describe GroupPolicy, feature_category: :system_access do end end - describe 'create_group_runners' do + describe 'create_runner' do shared_examples 'disallowed when group runner registration disabled' do context 'with group runner registration disabled' do before do @@ -1286,13 +1401,13 @@ RSpec.describe GroupPolicy, feature_category: :system_access do context 'with specific group runner registration enabled' do let(:runner_registration_enabled) { true } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with specific group runner registration disabled' do let(:runner_registration_enabled) { false } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end end @@ -1306,14 +1421,14 @@ RSpec.describe GroupPolicy, feature_category: :system_access do let(:current_user) { admin } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_allowed(:create_group_runners) } + it { is_expected.to be_allowed(:create_runner) } context 'with specific group runner registration disabled' do before do group.runner_registration_enabled = false end - it { is_expected.to be_allowed(:create_group_runners) } + it { is_expected.to be_allowed(:create_runner) } end context 'with group runner registration disabled' do @@ -1325,26 +1440,26 @@ RSpec.describe GroupPolicy, feature_category: :system_access do context 'with specific group runner registration enabled' do let(:runner_registration_enabled) { true } - it { is_expected.to be_allowed(:create_group_runners) } + it { is_expected.to be_allowed(:create_runner) } end context 'with specific group runner registration disabled' do let(:runner_registration_enabled) { false } - it { is_expected.to be_allowed(:create_group_runners) } + it { is_expected.to be_allowed(:create_runner) } end end end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end context 'with owner' do let(:current_user) { owner } - it { is_expected.to be_allowed(:create_group_runners) } + it { is_expected.to be_allowed(:create_runner) } it_behaves_like 'disallowed when group runner registration disabled' end @@ -1352,31 +1467,31 @@ RSpec.describe GroupPolicy, feature_category: :system_access do context 'with maintainer' do let(:current_user) { maintainer } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with reporter' do let(:current_user) { reporter } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with guest' do let(:current_user) { guest } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with developer' do let(:current_user) { developer } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end @@ -1391,28 +1506,28 @@ RSpec.describe GroupPolicy, feature_category: :system_access do let(:current_user) { admin } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } context 'with specific group runner registration disabled' do before do group.runner_registration_enabled = false end - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end it_behaves_like 'disallowed when group runner registration disabled' end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end context 'with owner' do let(:current_user) { owner } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } it_behaves_like 'disallowed when group runner registration disabled' end @@ -1420,31 +1535,31 @@ RSpec.describe GroupPolicy, feature_category: :system_access do context 'with maintainer' do let(:current_user) { maintainer } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with reporter' do let(:current_user) { reporter } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with guest' do let(:current_user) { guest } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with developer' do let(:current_user) { developer } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_group_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end end diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb index 17558787966..1142d6f80fd 100644 --- a/spec/policies/issue_policy_spec.rb +++ b/spec/policies/issue_policy_spec.rb @@ -27,8 +27,8 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do shared_examples 'support bot with service desk enabled' do before do - allow(::Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(::Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } project.update!(service_desk_enabled: true) end diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb index 50f425f4efe..ae2a11bdbf0 100644 --- a/spec/policies/project_policy_spec.rb +++ b/spec/policies/project_policy_spec.rb @@ -2810,6 +2810,14 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do it { is_expected.to be_allowed(:register_project_runners) } end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_allowed(:register_project_runners) } + end end context 'when admin mode is disabled' do @@ -2829,6 +2837,14 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do it { is_expected.to be_disallowed(:register_project_runners) } end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_disallowed(:register_project_runners) } + end end context 'with maintainer' do @@ -2862,7 +2878,7 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do end end - describe 'create_project_runners' do + describe 'create_runner' do context 'create_runner_workflow_for_namespace flag enabled' do before do stub_feature_flags(create_runner_workflow_for_namespace: [project.namespace]) @@ -2872,64 +2888,80 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do let(:current_user) { admin } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_allowed(:create_project_runners) } + it { is_expected.to be_allowed(:create_runner) } context 'with project runner registration disabled' do before do stub_application_setting(valid_runner_registrars: ['group']) end - it { is_expected.to be_allowed(:create_project_runners) } + it { is_expected.to be_allowed(:create_runner) } + end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_allowed(:create_runner) } end end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end context 'with owner' do let(:current_user) { owner } - it { is_expected.to be_allowed(:create_project_runners) } + it { is_expected.to be_allowed(:create_runner) } context 'with project runner registration disabled' do before do stub_application_setting(valid_runner_registrars: ['group']) end - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_disallowed(:create_runner) } end end context 'with maintainer' do let(:current_user) { maintainer } - it { is_expected.to be_allowed(:create_project_runners) } + it { is_expected.to be_allowed(:create_runner) } end context 'with reporter' do let(:current_user) { reporter } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with guest' do let(:current_user) { guest } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with developer' do let(:current_user) { developer } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end @@ -2942,68 +2974,162 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do let(:current_user) { admin } context 'when admin mode is enabled', :enable_admin_mode do - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } context 'with project runner registration disabled' do before do stub_application_setting(valid_runner_registrars: ['group']) end - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_disallowed(:create_runner) } end end context 'when admin mode is disabled' do - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end context 'with owner' do let(:current_user) { owner } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } context 'with project runner registration disabled' do before do stub_application_setting(valid_runner_registrars: ['group']) end - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with specific project runner registration disabled' do + before do + project.update!(runner_registration_enabled: false) + end + + it { is_expected.to be_disallowed(:create_runner) } end end context 'with maintainer' do let(:current_user) { maintainer } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with reporter' do let(:current_user) { reporter } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with guest' do let(:current_user) { guest } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with developer' do let(:current_user) { developer } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end context 'with anonymous' do let(:current_user) { nil } - it { is_expected.to be_disallowed(:create_project_runners) } + it { is_expected.to be_disallowed(:create_runner) } end end end + describe 'admin_project_runners' do + context 'admin' do + let(:current_user) { admin } + + context 'when admin mode is enabled', :enable_admin_mode do + it { is_expected.to be_allowed(:create_runner) } + end + + context 'when admin mode is disabled' do + it { is_expected.to be_disallowed(:create_runner) } + end + end + + context 'with owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(:create_runner) } + end + + context 'with maintainer' do + let(:current_user) { maintainer } + + it { is_expected.to be_allowed(:create_runner) } + end + + context 'with reporter' do + let(:current_user) { reporter } + + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with guest' do + let(:current_user) { guest } + + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with developer' do + let(:current_user) { developer } + + it { is_expected.to be_disallowed(:create_runner) } + end + + context 'with anonymous' do + let(:current_user) { nil } + + it { is_expected.to be_disallowed(:create_runner) } + end + end + + describe 'read_project_runners' do + subject(:policy) { described_class.new(user, project) } + + context 'with maintainer' do + let(:user) { maintainer } + + it { is_expected.to be_allowed(:read_project_runners) } + end + + context 'with admin', :enable_admin_mode do + let(:user) { admin } + + it { is_expected.to be_allowed(:read_project_runners) } + end + + context 'with reporter' do + let(:user) { reporter } + + it { is_expected.to be_disallowed(:read_project_runners) } + end + + context 'when the user is not part of the project' do + let(:user) { non_member } + + it { is_expected.to be_disallowed(:read_project_runners) } + end + end + describe 'update_sentry_issue' do using RSpec::Parameterized::TableSyntax @@ -3104,26 +3230,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do end end - describe 'add_catalog_resource' do - using RSpec::Parameterized::TableSyntax - - let(:current_user) { public_send(role) } - - where(:role, :allowed) do - :owner | true - :maintainer | false - :developer | false - :reporter | false - :guest | false - end - - with_them do - it do - expect(subject.can?(:add_catalog_resource)).to be(allowed) - end - end - end - describe 'read_code' do let(:current_user) { create(:user) } @@ -3145,6 +3251,18 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do end end + describe 'read_namespace_catalog' do + let(:current_user) { owner } + + specify { is_expected.to be_disallowed(:read_namespace_catalog) } + end + + describe 'add_catalog_resource' do + let(:current_user) { owner } + + specify { is_expected.to be_disallowed(:read_namespace_catalog) } + end + private def project_subject(project_type) diff --git a/spec/presenters/issue_email_participant_presenter_spec.rb b/spec/presenters/issue_email_participant_presenter_spec.rb index c270fae3058..993cc9c235b 100644 --- a/spec/presenters/issue_email_participant_presenter_spec.rb +++ b/spec/presenters/issue_email_participant_presenter_spec.rb @@ -3,54 +3,49 @@ require 'spec_helper' RSpec.describe IssueEmailParticipantPresenter, feature_category: :service_desk do - # See https://gitlab.com/gitlab-org/gitlab/-/issues/389247 - # for details around build_stubbed for access level - let_it_be(:non_member) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:guest) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:reporter) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:developer) { create(:user) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:group) { create(:group) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:project) { create(:project, group: group) } # rubocop:todo RSpec/FactoryBot/AvoidCreate - let_it_be(:issue) { build_stubbed(:issue, project: project) } - let_it_be(:participant) { build_stubbed(:issue_email_participant, issue: issue, email: 'any@email.com') } - - let(:user) { nil } - let(:presenter) { described_class.new(participant, current_user: user) } + let(:user) { build_stubbed(:user) } + let(:project) { build_stubbed(:project) } + let(:issue) { build_stubbed(:issue, project: project) } + let(:participant) { build_stubbed(:issue_email_participant, issue: issue, email: 'any@example.com') } let(:obfuscated_email) { 'an*****@e*****.c**' } - let(:email) { 'any@email.com' } + let(:email) { 'any@example.com' } - before_all do - group.add_guest(guest) - group.add_reporter(reporter) - group.add_developer(developer) - end + subject(:presenter) { described_class.new(participant, current_user: user) } describe '#email' do subject { presenter.email } - it { is_expected.to eq(obfuscated_email) } + context 'when anonymous' do + let(:user) { nil } + + it { is_expected.to eq(obfuscated_email) } + end context 'with signed in user' do + before do + stub_member_access_level(project, access_level => user) if access_level + end + context 'when user has no role in project' do - let(:user) { non_member } + let(:access_level) { nil } it { is_expected.to eq(obfuscated_email) } end context 'when user has guest role in project' do - let(:user) { guest } + let(:access_level) { :guest } it { is_expected.to eq(obfuscated_email) } end context 'when user has reporter role in project' do - let(:user) { reporter } + let(:access_level) { :reporter } it { is_expected.to eq(email) } end context 'when user has developer role in project' do - let(:user) { developer } + let(:access_level) { :developer } it { is_expected.to eq(email) } end diff --git a/spec/presenters/ml/candidates_csv_presenter_spec.rb b/spec/presenters/ml/candidates_csv_presenter_spec.rb new file mode 100644 index 00000000000..fea00565859 --- /dev/null +++ b/spec/presenters/ml/candidates_csv_presenter_spec.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Ml::CandidatesCsvPresenter, feature_category: :mlops do + # rubocop:disable RSpec/FactoryBot/AvoidCreate + let_it_be(:project) { create(:project, :repository) } + let_it_be(:experiment) { create(:ml_experiments, user_id: project.creator, project: project) } + + let_it_be(:candidate0) do + create(:ml_candidates, experiment: experiment, user: project.creator, + project: project, start_time: 1234, end_time: 5678).tap do |c| + c.params.create!([{ name: 'param1', value: 'p1' }, { name: 'param2', value: 'p2' }]) + c.metrics.create!( + [{ name: 'metric1', value: 0.1 }, { name: 'metric2', value: 0.2 }, { name: 'metric3', value: 0.3 }] + ) + end + end + + let_it_be(:candidate1) do + create(:ml_candidates, experiment: experiment, user: project.creator, name: 'candidate1', + project: project, start_time: 1111, end_time: 2222).tap do |c| + c.params.create([{ name: 'param2', value: 'p3' }, { name: 'param3', value: 'p4' }]) + c.metrics.create!(name: 'metric3', value: 0.4) + end + end + # rubocop:enable RSpec/FactoryBot/AvoidCreate + + describe '.present' do + subject { described_class.new(::Ml::Candidate.where(id: [candidate0.id, candidate1.id])).present } + + it 'generates header row correctly' do + expected_header = %w[project_id experiment_iid candidate_iid name external_id start_time end_time param1 param2 + param3 metric1 metric2 metric3].join(',') + header = subject.split("\n")[0] + + expect(header).to eq(expected_header) + end + + it 'generates the first row correctly' do + expected_row = [ + candidate0.project_id, + 1, # experiment.iid + 1, # candidate0.internal_id + '', # candidate0 has no name, column is empty + candidate0.eid, + candidate0.start_time, + candidate0.end_time, + candidate0.params[0].value, + candidate0.params[1].value, + '', # candidate0 has no param3, column is empty + candidate0.metrics[0].value, + candidate0.metrics[1].value, + candidate0.metrics[2].value + ].map(&:to_s) + + row = subject.split("\n")[1].split(",") + + expect(row).to match_array(expected_row) + end + + it 'generates the second row correctly' do + expected_row = [ + candidate1.project_id, + 1, # experiment.iid + 2, # candidate1.internal_id + 'candidate1', + candidate1.eid, + candidate1.start_time, + candidate1.end_time, + '', # candidate1 has no param1, column is empty + candidate1.params[0].value, + candidate1.params[1].value, + '', # candidate1 has no metric1, column is empty + '', # candidate1 has no metric2, column is empty + candidate1.metrics[0].value + ].map(&:to_s) + + row = subject.split("\n")[2].split(",") + + expect(row).to match_array(expected_row) + end + end +end diff --git a/spec/presenters/packages/npm/package_presenter_spec.rb b/spec/presenters/packages/npm/package_presenter_spec.rb index 4fa469c7cd2..fe4773a9cad 100644 --- a/spec/presenters/packages/npm/package_presenter_spec.rb +++ b/spec/presenters/packages/npm/package_presenter_spec.rb @@ -2,157 +2,32 @@ require 'spec_helper' -RSpec.describe ::Packages::Npm::PackagePresenter do - using RSpec::Parameterized::TableSyntax - - let_it_be(:project) { create(:project) } - let_it_be(:package_name) { "@#{project.root_namespace.path}/test" } - let_it_be(:package1) { create(:npm_package, version: '2.0.4', project: project, name: package_name) } - let_it_be(:package2) { create(:npm_package, version: '2.0.6', project: project, name: package_name) } - let_it_be(:latest_package) { create(:npm_package, version: '2.0.11', project: project, name: package_name) } - - let(:packages) { project.packages.npm.with_name(package_name).last_of_each_version } - let(:presenter) { described_class.new(package_name, packages) } - - describe '#versions' do - let_it_be('package_json') do - { - 'name': package_name, - 'version': '2.0.4', - 'deprecated': 'warning!', - 'bin': './cli.js', - 'directories': ['lib'], - 'engines': { 'npm': '^7.5.6' }, - '_hasShrinkwrap': false, - 'dist': { - 'tarball': 'http://localhost/tarball.tgz', - 'shasum': '1234567890' - }, - 'custom_field': 'foo_bar' - } - end - - let(:presenter) { described_class.new(package_name, packages) } - - subject { presenter.versions } - - where(:has_dependencies, :has_metadatum) do - true | true - false | true - true | false - false | false - end - - with_them do - if params[:has_dependencies] - ::Packages::DependencyLink.dependency_types.keys.each do |dependency_type| - let_it_be("package_dependency_link_for_#{dependency_type}") { create(:packages_dependency_link, package: package1, dependency_type: dependency_type) } - end - end - - if params[:has_metadatum] - let_it_be('package_metadatadum') { create(:npm_metadatum, package: package1, package_json: package_json) } - end - - it { is_expected.to be_a(Hash) } - it { expect(subject[package1.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') } - it { expect(subject[package2.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') } - it { expect(subject[package1.version]['custom_field']).to be_blank } - - context 'dependencies' do - ::Packages::DependencyLink.dependency_types.keys.each do |dependency_type| - if params[:has_dependencies] - it { expect(subject.dig(package1.version, dependency_type.to_s)).to be_any } - else - it { expect(subject.dig(package1.version, dependency_type)).to be nil } - end - - it { expect(subject.dig(package2.version, dependency_type)).to be nil } - end - end - - context 'metadatum' do - ::Packages::Npm::PackagePresenter::PACKAGE_JSON_ALLOWED_FIELDS.each do |metadata_field| - if params[:has_metadatum] - it { expect(subject.dig(package1.version, metadata_field)).not_to be nil } - else - it { expect(subject.dig(package1.version, metadata_field)).to be nil } - end - - it { expect(subject.dig(package2.version, metadata_field)).to be nil } - end - end +RSpec.describe Packages::Npm::PackagePresenter, feature_category: :package_registry do + let_it_be(:metadata) do + { + name: 'foo', + versions: { '1.0.0' => { 'dist' => { 'tarball' => 'http://localhost/tarball.tgz' } } }, + dist_tags: { 'latest' => '1.0.0' } + } + end - it 'avoids N+1 database queries' do - check_n_plus_one(:versions) do - create_list(:npm_package, 5, project: project, name: package_name).each do |npm_package| - next unless has_dependencies + subject { described_class.new(metadata) } - ::Packages::DependencyLink.dependency_types.keys.each do |dependency_type| - create(:packages_dependency_link, package: npm_package, dependency_type: dependency_type) - end - end - end - end + describe '#name' do + it 'returns the name' do + expect(subject.name).to eq('foo') end + end - context 'with package files pending destruction' do - let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package2, file_sha1: 'pending_destruction_sha1') } - - let(:shasums) { subject.values.map { |v| v.dig(:dist, :shasum) } } - - it 'does not return them' do - expect(shasums).not_to include(package_file_pending_destruction.file_sha1) - end + describe '#versions' do + it 'returns the versions' do + expect(subject.versions).to eq({ '1.0.0' => { 'dist' => { 'tarball' => 'http://localhost/tarball.tgz' } } }) end end describe '#dist_tags' do - subject { presenter.dist_tags } - - context 'for packages without tags' do - it { is_expected.to be_a(Hash) } - it { expect(subject["latest"]).to eq(latest_package.version) } - - it 'avoids N+1 database queries' do - check_n_plus_one(:dist_tags) do - create_list(:npm_package, 5, project: project, name: package_name) - end - end - end - - context 'for packages with tags' do - let_it_be(:package_tag1) { create(:packages_tag, package: package1, name: 'release_a') } - let_it_be(:package_tag2) { create(:packages_tag, package: package1, name: 'test_release') } - let_it_be(:package_tag3) { create(:packages_tag, package: package2, name: 'release_b') } - let_it_be(:package_tag4) { create(:packages_tag, package: latest_package, name: 'release_c') } - let_it_be(:package_tag5) { create(:packages_tag, package: latest_package, name: 'latest') } - - it { is_expected.to be_a(Hash) } - it { expect(subject[package_tag1.name]).to eq(package1.version) } - it { expect(subject[package_tag2.name]).to eq(package1.version) } - it { expect(subject[package_tag3.name]).to eq(package2.version) } - it { expect(subject[package_tag4.name]).to eq(latest_package.version) } - it { expect(subject[package_tag5.name]).to eq(latest_package.version) } - - it 'avoids N+1 database queries' do - check_n_plus_one(:dist_tags) do - create_list(:npm_package, 5, project: project, name: package_name).each_with_index do |npm_package, index| - create(:packages_tag, package: npm_package, name: "tag_#{index}") - end - end - end + it 'returns the dist_tags' do + expect(subject.dist_tags).to eq({ 'latest' => '1.0.0' }) end end - - def check_n_plus_one(field) - pkgs = project.packages.npm.with_name(package_name).last_of_each_version.preload_files - control = ActiveRecord::QueryRecorder.new { described_class.new(package_name, pkgs).public_send(field) } - - yield - - pkgs = project.packages.npm.with_name(package_name).last_of_each_version.preload_files - - expect { described_class.new(package_name, pkgs).public_send(field) }.not_to exceed_query_limit(control) - end end diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb index dfe4a191ae5..4727bce02a5 100644 --- a/spec/presenters/project_clusterable_presenter_spec.rb +++ b/spec/presenters/project_clusterable_presenter_spec.rb @@ -2,15 +2,15 @@ require 'spec_helper' -RSpec.describe ProjectClusterablePresenter do +RSpec.describe ProjectClusterablePresenter, feature_category: :environment_management do include Gitlab::Routing.url_helpers let(:presenter) { described_class.new(project) } - let(:project) { create(:project) } - let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } + let(:project) { build_stubbed(:project) } + let(:cluster) { build_stubbed(:cluster, :provided_by_gcp, projects: [project]) } describe '#can_create_cluster?' do - let(:user) { create(:user) } + let(:user) { build_stubbed(:user) } subject { presenter.can_create_cluster? } @@ -20,7 +20,7 @@ RSpec.describe ProjectClusterablePresenter do context 'when user can create' do before do - project.add_maintainer(user) + stub_member_access_level(project, maintainer: user) end it { is_expected.to be_truthy } diff --git a/spec/requests/abuse_reports_controller_spec.rb b/spec/requests/abuse_reports_controller_spec.rb index 934f123e45b..4b81394aea3 100644 --- a/spec/requests/abuse_reports_controller_spec.rb +++ b/spec/requests/abuse_reports_controller_spec.rb @@ -11,6 +11,7 @@ RSpec.describe AbuseReportsController, feature_category: :insider_threat do attributes_for(:abuse_report) do |hash| hash[:user_id] = user.id hash[:category] = abuse_category + hash[:screenshot] = fixture_file_upload('spec/fixtures/dk.png') end end diff --git a/spec/requests/admin/background_migrations_controller_spec.rb b/spec/requests/admin/background_migrations_controller_spec.rb index 88d81766e67..2681ece7d8a 100644 --- a/spec/requests/admin/background_migrations_controller_spec.rb +++ b/spec/requests/admin/background_migrations_controller_spec.rb @@ -67,6 +67,17 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode, featur expect(assigns(:migrations)).to match_array([main_database_migration]) end + + context 'for finalizing tab' do + let!(:finalizing_migration) { create(:batched_background_migration, :finalizing) } + + it 'returns only finalizing migration' do + get admin_background_migrations_path(tab: 'finalizing') + + expect(Gitlab::Database::BackgroundMigration::BatchedMigration.queued).not_to be_empty + expect(assigns(:migrations)).to match_array(Array.wrap(finalizing_migration)) + end + end end context 'when multiple database is enabled', :add_ci_connection do diff --git a/spec/requests/admin/projects_controller_spec.rb b/spec/requests/admin/projects_controller_spec.rb index 5ff49a30ed8..2462152b7c2 100644 --- a/spec/requests/admin/projects_controller_spec.rb +++ b/spec/requests/admin/projects_controller_spec.rb @@ -54,5 +54,33 @@ RSpec.describe Admin::ProjectsController, :enable_admin_mode, feature_category: expect { subject }.not_to change { project.reload.name } end end + + context 'when disabling runner registration' do + let(:project_params) { { runner_registration_enabled: false } } + + it 'changes runner registration' do + expect { subject }.to change { project.reload.runner_registration_enabled }.to(false) + end + + it 'resets the registration token' do + expect { subject }.to change { project.reload.runners_token } + end + end + + context 'when enabling runner registration' do + before do + project.update!(runner_registration_enabled: false) + end + + let(:project_params) { { runner_registration_enabled: true } } + + it 'changes runner registration' do + expect { subject }.to change { project.reload.runner_registration_enabled }.to(true) + end + + it 'does not reset the registration token' do + expect { subject }.not_to change { project.reload.runners_token } + end + end end end diff --git a/spec/requests/admin/users_controller_spec.rb b/spec/requests/admin/users_controller_spec.rb new file mode 100644 index 00000000000..5344a2c2bb7 --- /dev/null +++ b/spec/requests/admin/users_controller_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Admin::UsersController, :enable_admin_mode, feature_category: :user_management do + let_it_be(:admin) { create(:admin) } + let_it_be(:user) { create(:user) } + + describe 'PUT #block' do + context 'when request format is :json' do + before do + sign_in(admin) + end + + subject(:request) { put block_admin_user_path(user, format: :json) } + + context 'when user was blocked' do + it 'returns 200 and json data with notice' do + request + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to include('notice' => 'Successfully blocked') + end + end + + context 'when user was not blocked' do + before do + allow_next_instance_of(::Users::BlockService) do |service| + allow(service).to receive(:execute).and_return({ status: :failed }) + end + end + + it 'returns 200 and json data with error' do + request + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to include('error' => 'Error occurred. User was not blocked') + end + end + end + end +end diff --git a/spec/requests/api/admin/ci/variables_spec.rb b/spec/requests/api/admin/ci/variables_spec.rb index dd4171b257a..cd57cde74ff 100644 --- a/spec/requests/api/admin/ci/variables_spec.rb +++ b/spec/requests/api/admin/ci/variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ::API::Admin::Ci::Variables do +RSpec.describe ::API::Admin::Ci::Variables, :aggregate_failures, feature_category: :pipeline_composition do let_it_be(:admin) { create(:admin) } let_it_be(:user) { create(:user) } let_it_be(:variable) { create(:ci_instance_variable) } @@ -11,7 +11,7 @@ RSpec.describe ::API::Admin::Ci::Variables do describe 'GET /admin/ci/variables' do it_behaves_like 'GET request permissions for admin mode' - it 'returns instance-level variables for admins', :aggregate_failures do + it 'returns instance-level variables for admins' do get api(path, admin, admin_mode: true) expect(json_response).to be_a(Array) @@ -29,7 +29,7 @@ RSpec.describe ::API::Admin::Ci::Variables do it_behaves_like 'GET request permissions for admin mode' - it 'returns instance-level variable details for admins', :aggregate_failures do + it 'returns instance-level variable details for admins' do get api(path, admin, admin_mode: true) expect(json_response['value']).to eq(variable.value) @@ -56,7 +56,7 @@ RSpec.describe ::API::Admin::Ci::Variables do end context 'authorized user with proper permissions' do - it 'creates variable for admins', :aggregate_failures do + it 'creates variable for admins' do expect do post api(path, admin, admin_mode: true), params: { @@ -85,7 +85,7 @@ RSpec.describe ::API::Admin::Ci::Variables do params: { key: 'VAR_KEY', value: 'SENSITIVE', protected: true, masked: true } end - it 'creates variable with optional attributes', :aggregate_failures do + it 'creates variable with optional attributes' do expect do post api(path, admin, admin_mode: true), params: { @@ -112,7 +112,7 @@ RSpec.describe ::API::Admin::Ci::Variables do expect(response).to have_gitlab_http_status(:bad_request) end - it 'does not allow values above 10,000 characters', :aggregate_failures do + it 'does not allow values above 10,000 characters' do too_long_message = <<~MESSAGE.strip The value of the provided variable exceeds the 10000 character limit MESSAGE @@ -152,7 +152,7 @@ RSpec.describe ::API::Admin::Ci::Variables do it_behaves_like 'PUT request permissions for admin mode' context 'authorized user with proper permissions' do - it 'updates variable data', :aggregate_failures do + it 'updates variable data' do put api(path, admin, admin_mode: true), params: params expect(variable.reload.value).to eq('VALUE_1_UP') diff --git a/spec/requests/api/admin/instance_clusters_spec.rb b/spec/requests/api/admin/instance_clusters_spec.rb index 0a72f404e89..f2e62533b78 100644 --- a/spec/requests/api/admin/instance_clusters_spec.rb +++ b/spec/requests/api/admin/instance_clusters_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ::API::Admin::InstanceClusters, feature_category: :kubernetes_management do +RSpec.describe ::API::Admin::InstanceClusters, feature_category: :deployment_management do include KubernetesHelpers let_it_be(:admin_user) { create(:admin) } diff --git a/spec/requests/api/admin/sidekiq_spec.rb b/spec/requests/api/admin/sidekiq_spec.rb index 8bcd7884fd2..eca12c8e433 100644 --- a/spec/requests/api/admin/sidekiq_spec.rb +++ b/spec/requests/api/admin/sidekiq_spec.rb @@ -31,7 +31,9 @@ RSpec.describe API::Admin::Sidekiq, :clean_gitlab_redis_queues, feature_category let_it_be(:path) { "/admin/sidekiq/queues/authorized_projects?user=#{admin.username}&worker_class=AuthorizedProjectsWorker" } - it_behaves_like 'DELETE request permissions for admin mode', success_status_code: :ok + it_behaves_like 'DELETE request permissions for admin mode' do + let(:success_status_code) { :ok } + end it 'returns info about the deleted jobs' do delete api(path, admin, admin_mode: true) diff --git a/spec/requests/api/appearance_spec.rb b/spec/requests/api/appearance_spec.rb index 3550e51d585..2ea4dcce7d8 100644 --- a/spec/requests/api/appearance_spec.rb +++ b/spec/requests/api/appearance_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do +RSpec.describe API::Appearance, 'Appearance', :aggregate_failures, feature_category: :navigation do let_it_be(:user) { create(:user) } let_it_be(:admin) { create(:admin) } let_it_be(:path) { "/application/appearance" } @@ -12,7 +12,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do context 'as an admin user' do it "returns appearance" do - get api("/application/appearance", admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(json_response).to be_an Hash expect(json_response['description']).to eq('') @@ -43,7 +43,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do context 'as an admin user' do context "instance basics" do it "allows updating the settings" do - put api("/application/appearance", admin, admin_mode: true), params: { + put api(path, admin, admin_mode: true), params: { title: "GitLab Test Instance", description: "gitlab-test.example.com", pwa_name: "GitLab PWA Test", @@ -83,7 +83,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do email_header_and_footer_enabled: true } - put api("/application/appearance", admin, admin_mode: true), params: settings + put api(path, admin, admin_mode: true), params: settings expect(response).to have_gitlab_http_status(:ok) settings.each do |attribute, value| @@ -93,14 +93,14 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do context "fails on invalid color values" do it "with message_font_color" do - put api("/application/appearance", admin, admin_mode: true), params: { message_font_color: "No Color" } + put api(path, admin, admin_mode: true), params: { message_font_color: "No Color" } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['message_font_color']).to contain_exactly('must be a valid color code') end it "with message_background_color" do - put api("/application/appearance", admin, admin_mode: true), params: { message_background_color: "#1" } + put api(path, admin, admin_mode: true), params: { message_background_color: "#1" } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['message_background_color']).to contain_exactly('must be a valid color code') @@ -112,7 +112,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do let_it_be(:appearance) { create(:appearance) } it "allows updating the image files" do - put api("/application/appearance", admin, admin_mode: true), params: { + put api(path, admin, admin_mode: true), params: { logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"), header_logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"), pwa_icon: fixture_file_upload("spec/fixtures/dk.png", "image/png"), @@ -128,14 +128,14 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do context "fails on invalid color images" do it "with string instead of file" do - put api("/application/appearance", admin, admin_mode: true), params: { logo: 'not-a-file.png' } + put api(path, admin, admin_mode: true), params: { logo: 'not-a-file.png' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq("logo is invalid") end it "with .svg file instead of .png" do - put api("/application/appearance", admin, admin_mode: true), params: { favicon: fixture_file_upload("spec/fixtures/logo_sample.svg", "image/svg") } + put api(path, admin, admin_mode: true), params: { favicon: fixture_file_upload("spec/fixtures/logo_sample.svg", "image/svg") } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['favicon']).to contain_exactly("You are not allowed to upload \"svg\" files, allowed types: png, ico") diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb index 5b07bded82c..16e24807e67 100644 --- a/spec/requests/api/applications_spec.rb +++ b/spec/requests/api/applications_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Applications, :api, feature_category: :system_access do +RSpec.describe API::Applications, :aggregate_failures, :api, feature_category: :system_access do let_it_be(:admin) { create(:admin) } let_it_be(:user) { create(:user) } let_it_be(:scopes) { 'api' } @@ -135,7 +135,7 @@ RSpec.describe API::Applications, :api, feature_category: :system_access do context 'authorized user without authorization' do it 'does not create application' do expect do - post api('/applications', user), params: { name: 'application_name', redirect_uri: 'http://application.url', scopes: scopes } + post api(path, user), params: { name: 'application_name', redirect_uri: 'http://application.url', scopes: scopes } end.not_to change { Doorkeeper::Application.count } end end diff --git a/spec/requests/api/broadcast_messages_spec.rb b/spec/requests/api/broadcast_messages_spec.rb index 5cbb7dbfa12..530c81364a8 100644 --- a/spec/requests/api/broadcast_messages_spec.rb +++ b/spec/requests/api/broadcast_messages_spec.rb @@ -2,16 +2,16 @@ require 'spec_helper' -RSpec.describe API::BroadcastMessages, feature_category: :onboarding do - let_it_be(:user) { create(:user) } +RSpec.describe API::BroadcastMessages, :aggregate_failures, feature_category: :onboarding do let_it_be(:admin) { create(:admin) } let_it_be(:message) { create(:broadcast_message) } + let_it_be(:path) { '/broadcast_messages' } describe 'GET /broadcast_messages' do it 'returns an Array of BroadcastMessages' do create(:broadcast_message) - get api('/broadcast_messages') + get api(path) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -22,8 +22,10 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do end describe 'GET /broadcast_messages/:id' do + let_it_be(:path) { "#{path}/#{message.id}" } + it 'returns the specified message' do - get api("/broadcast_messages/#{message.id}") + get api(path) expect(response).to have_gitlab_http_status(:ok) expect(json_response['id']).to eq message.id @@ -33,16 +35,14 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do end describe 'POST /broadcast_messages' do - it 'returns a 401 for anonymous users' do - post api('/broadcast_messages'), params: attributes_for(:broadcast_message) - - expect(response).to have_gitlab_http_status(:unauthorized) + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { { message: 'Test message' } } end - it 'returns a 403 for users' do - post api('/broadcast_messages', user), params: attributes_for(:broadcast_message) + it 'returns a 401 for anonymous users' do + post api(path), params: attributes_for(:broadcast_message) - expect(response).to have_gitlab_http_status(:forbidden) + expect(response).to have_gitlab_http_status(:unauthorized) end context 'as an admin' do @@ -50,7 +50,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do attrs = attributes_for(:broadcast_message) attrs.delete(:message) - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq 'message is missing' @@ -59,7 +59,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'defines sane default start and end times' do time = Time.zone.parse('2016-07-02 10:11:12') travel_to(time) do - post api('/broadcast_messages', admin), params: { message: 'Test message' } + post api(path, admin, admin_mode: true), params: { message: 'Test message' } expect(response).to have_gitlab_http_status(:created) expect(json_response['starts_at']).to eq '2016-07-02T10:11:12.000Z' @@ -70,7 +70,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a custom background and foreground color' do attrs = attributes_for(:broadcast_message, color: '#000000', font: '#cecece') - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['color']).to eq attrs[:color] @@ -81,7 +81,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do target_access_levels = [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER] attrs = attributes_for(:broadcast_message, target_access_levels: target_access_levels) - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['target_access_levels']).to eq attrs[:target_access_levels] @@ -90,7 +90,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a target path' do attrs = attributes_for(:broadcast_message, target_path: "*/welcome") - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['target_path']).to eq attrs[:target_path] @@ -99,7 +99,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a broadcast type' do attrs = attributes_for(:broadcast_message, broadcast_type: 'notification') - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['broadcast_type']).to eq attrs[:broadcast_type] @@ -108,7 +108,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'uses default broadcast type' do attrs = attributes_for(:broadcast_message) - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['broadcast_type']).to eq 'banner' @@ -117,7 +117,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'errors for invalid broadcast type' do attrs = attributes_for(:broadcast_message, broadcast_type: 'invalid-type') - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:bad_request) end @@ -125,7 +125,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts an active dismissable value' do attrs = { message: 'new message', dismissable: true } - post api('/broadcast_messages', admin), params: attrs + post api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['dismissable']).to eq true @@ -134,27 +134,25 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do end describe 'PUT /broadcast_messages/:id' do - it 'returns a 401 for anonymous users' do - put api("/broadcast_messages/#{message.id}"), - params: attributes_for(:broadcast_message) + let_it_be(:path) { "#{path}/#{message.id}" } - expect(response).to have_gitlab_http_status(:unauthorized) + it_behaves_like 'PUT request permissions for admin mode' do + let(:params) { { message: 'Test message' } } end - it 'returns a 403 for users' do - put api("/broadcast_messages/#{message.id}", user), + it 'returns a 401 for anonymous users' do + put api(path), params: attributes_for(:broadcast_message) - expect(response).to have_gitlab_http_status(:forbidden) + expect(response).to have_gitlab_http_status(:unauthorized) end context 'as an admin' do it 'accepts new background and foreground colors' do attrs = { color: '#000000', font: '#cecece' } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs - expect(response).to have_gitlab_http_status(:ok) expect(json_response['color']).to eq attrs[:color] expect(json_response['font']).to eq attrs[:font] end @@ -164,7 +162,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do travel_to(time) do attrs = { starts_at: Time.zone.now, ends_at: 3.hours.from_now } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect(json_response['starts_at']).to eq '2016-07-02T10:11:12.000Z' @@ -175,7 +173,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a new message' do attrs = { message: 'new message' } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect { message.reload }.to change { message.message }.to('new message') @@ -184,7 +182,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a new target_access_levels' do attrs = { target_access_levels: [Gitlab::Access::MAINTAINER] } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect(json_response['target_access_levels']).to eq attrs[:target_access_levels] @@ -193,7 +191,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a new target_path' do attrs = { target_path: '*/welcome' } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect(json_response['target_path']).to eq attrs[:target_path] @@ -202,7 +200,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a new broadcast_type' do attrs = { broadcast_type: 'notification' } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect(json_response['broadcast_type']).to eq attrs[:broadcast_type] @@ -211,7 +209,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'errors for invalid broadcast type' do attrs = { broadcast_type: 'invalid-type' } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:bad_request) end @@ -219,7 +217,7 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do it 'accepts a new dismissable value' do attrs = { message: 'new message', dismissable: true } - put api("/broadcast_messages/#{message.id}", admin), params: attrs + put api(path, admin, admin_mode: true), params: attrs expect(response).to have_gitlab_http_status(:ok) expect(json_response['dismissable']).to eq true @@ -228,27 +226,24 @@ RSpec.describe API::BroadcastMessages, feature_category: :onboarding do end describe 'DELETE /broadcast_messages/:id' do - it 'returns a 401 for anonymous users' do - delete api("/broadcast_messages/#{message.id}"), - params: attributes_for(:broadcast_message) + let_it_be(:path) { "#{path}/#{message.id}" } - expect(response).to have_gitlab_http_status(:unauthorized) - end + it_behaves_like 'DELETE request permissions for admin mode' - it 'returns a 403 for users' do - delete api("/broadcast_messages/#{message.id}", user), + it 'returns a 401 for anonymous users' do + delete api(path), params: attributes_for(:broadcast_message) - expect(response).to have_gitlab_http_status(:forbidden) + expect(response).to have_gitlab_http_status(:unauthorized) end it_behaves_like '412 response' do - let(:request) { api("/broadcast_messages/#{message.id}", admin) } + let(:request) { api("/broadcast_messages/#{message.id}", admin, admin_mode: true) } end it 'deletes the broadcast message for admins' do expect do - delete api("/broadcast_messages/#{message.id}", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { BroadcastMessage.count }.by(-1) diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb index 23dfe865ba3..b159d4ad445 100644 --- a/spec/requests/api/bulk_imports_spec.rb +++ b/spec/requests/api/bulk_imports_spec.rb @@ -75,6 +75,8 @@ RSpec.describe API::BulkImports, feature_category: :importers do end describe 'POST /bulk_imports' do + let_it_be(:destination_namespace) { create(:group) } + let(:request) { post api('/bulk_imports', user), params: params } let(:destination_param) { { destination_slug: 'destination_slug' } } let(:params) do @@ -87,12 +89,15 @@ RSpec.describe API::BulkImports, feature_category: :importers do { source_type: 'group_entity', source_full_path: 'full_path', - destination_namespace: 'destination_namespace' + destination_namespace: destination_namespace.path }.merge(destination_param) ] } end + let(:source_entity_type) { BulkImports::CreateService::ENTITY_TYPES_MAPPING.fetch(params[:entities][0][:source_type]) } + let(:source_entity_identifier) { ERB::Util.url_encode(params[:entities][0][:source_full_path]) } + before do allow_next_instance_of(BulkImports::Clients::HTTP) do |instance| allow(instance) @@ -103,6 +108,10 @@ RSpec.describe API::BulkImports, feature_category: :importers do .to receive(:instance_enterprise) .and_return(false) end + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=access_token") + .to_return(status: 200, body: "", headers: {}) + + destination_namespace.add_owner(user) end shared_examples 'starting a new migration' do @@ -192,7 +201,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do { source_type: 'group_entity', source_full_path: 'full_path', - destination_namespace: 'destination_namespace' + destination_namespace: destination_namespace.path } ] } @@ -214,20 +223,17 @@ RSpec.describe API::BulkImports, feature_category: :importers do request expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq("entities[0][source_full_path] must be a relative path and not include protocol, sub-domain, " \ - "or domain information. E.g. 'source/full/path' not 'https://example.com/source/full/path'") + "or domain information. For example, 'source/full/path' not 'https://example.com/source/full/path'") end end - context 'when the destination_namespace is invalid' do + context 'when the destination_namespace does not exist' do it 'returns invalid error' do - params[:entities][0][:destination_namespace] = "?not a destination-namespace" + params[:entities][0][:destination_namespace] = "invalid-destination-namespace" request - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']).to eq("entities[0][destination_namespace] cannot start with a dash or forward slash, " \ - "or end with a period or forward slash. It can only contain alphanumeric " \ - "characters, periods, underscores, forward slashes and dashes. " \ - "E.g. 'destination_namespace' or 'destination/namespace'") + expect(response).to have_gitlab_http_status(:unprocessable_entity) + expect(json_response['message']).to eq("Import failed. Destination 'invalid-destination-namespace' is invalid, or you don't have permission.") end end @@ -243,15 +249,35 @@ RSpec.describe API::BulkImports, feature_category: :importers do end context 'when the destination_slug is invalid' do - it 'returns invalid error' do + it 'returns invalid error when restricting special characters is disabled' do + Feature.disable(:restrict_special_characters_in_namespace_path) + + params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg' + + request + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to include("entities[0][destination_slug] cannot start with " \ + "a non-alphanumeric character except for periods or " \ + "underscores, can contain only alphanumeric characters, " \ + "periods, and underscores, cannot end with a period or " \ + "forward slash, and has no leading or trailing forward " \ + "slashes. It can only contain alphanumeric characters, " \ + "periods, underscores, and dashes. For example, " \ + "'destination_namespace' not 'destination/namespace'") + end + + it 'returns invalid error when restricting special characters is enabled' do + Feature.enable(:restrict_special_characters_in_namespace_path) + params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg' request expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']).to include("entities[0][destination_slug] cannot start with a dash " \ - "or forward slash, or end with a period or forward slash. " \ - "It can only contain alphanumeric characters, periods, underscores, and dashes. " \ - "E.g. 'destination_namespace' not 'destination/namespace'") + expect(json_response['error']).to include("entities[0][destination_slug] must not start or " \ + "end with a special character and must not contain " \ + "consecutive special characters. It can only contain " \ + "alphanumeric characters, periods, underscores, and " \ + "dashes. For example, 'destination_namespace' not 'destination/namespace'") end end @@ -271,12 +297,41 @@ RSpec.describe API::BulkImports, feature_category: :importers do } end + it 'returns blocked url message in the error' do + request + + expect(response).to have_gitlab_http_status(:unprocessable_entity) + + expect(json_response['message']).to include("Url is blocked: Only allowed schemes are http, https") + end + end + + context 'when source instance setting is disabled' do + let(:params) do + { + configuration: { + url: 'http://gitlab.example', + access_token: 'access_token' + }, + entities: [ + source_type: 'group_entity', + source_full_path: 'full_path', + destination_slug: 'destination_slug', + destination_namespace: 'destination_namespace' + ] + } + end + it 'returns blocked url error' do + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=access_token") + .to_return(status: 404, body: "", headers: {}) + request expect(response).to have_gitlab_http_status(:unprocessable_entity) - expect(json_response['message']).to eq('Validation failed: Url is blocked: Only allowed schemes are http, https') + expect(json_response['message']).to include("Group import disabled on source or destination instance. " \ + "Ask an administrator to enable it on both instances and try again.") end end diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb index 8b3ec59b785..25871beeb4f 100644 --- a/spec/requests/api/ci/jobs_spec.rb +++ b/spec/requests/api/ci/jobs_spec.rb @@ -198,22 +198,22 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do let_it_be(:agent_authorizations_without_env) do [ - create(:agent_group_authorization, agent: create(:cluster_agent, project: other_project), group: group), - create(:agent_project_authorization, agent: create(:cluster_agent, project: project), project: project), - Clusters::Agents::ImplicitAuthorization.new(agent: create(:cluster_agent, project: project)) + create(:agent_ci_access_group_authorization, agent: create(:cluster_agent, project: other_project), group: group), + create(:agent_ci_access_project_authorization, agent: create(:cluster_agent, project: project), project: project), + Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization.new(agent: create(:cluster_agent, project: project)) ] end let_it_be(:agent_authorizations_with_review_and_production_env) do [ create( - :agent_group_authorization, + :agent_ci_access_group_authorization, agent: create(:cluster_agent, project: other_project), group: group, environments: ['production', 'review/*'] ), create( - :agent_project_authorization, + :agent_ci_access_project_authorization, agent: create(:cluster_agent, project: project), project: project, environments: ['production', 'review/*'] @@ -224,13 +224,13 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do let_it_be(:agent_authorizations_with_staging_env) do [ create( - :agent_group_authorization, + :agent_ci_access_group_authorization, agent: create(:cluster_agent, project: other_project), group: group, environments: ['staging'] ), create( - :agent_project_authorization, + :agent_ci_access_project_authorization, agent: create(:cluster_agent, project: project), project: project, environments: ['staging'] diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb index 4e81a052ecf..869b0ec9dca 100644 --- a/spec/requests/api/ci/pipelines_spec.rb +++ b/spec/requests/api/ci/pipelines_spec.rb @@ -14,7 +14,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do let_it_be(:pipeline) do create(:ci_empty_pipeline, project: project, sha: project.commit.id, - ref: project.default_branch, user: user) + ref: project.default_branch, user: user, name: 'Build pipeline') end before do @@ -41,8 +41,44 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do it 'includes pipeline source' do get api("/projects/#{project.id}/pipelines", user) - expect(json_response.first.keys).to contain_exactly(*%w[id iid project_id sha ref status web_url created_at updated_at source]) + expect(json_response.first.keys).to contain_exactly(*%w[id iid project_id sha ref status web_url created_at updated_at source name]) end + + context 'when pipeline_name_in_api feature flag is off' do + before do + stub_feature_flags(pipeline_name_in_api: false) + end + + it 'does not include pipeline name in response and ignores name parameter' do + get api("/projects/#{project.id}/pipelines", user), params: { name: 'Chatops pipeline' } + + expect(json_response.length).to eq(1) + expect(json_response.first.keys).not_to include('name') + end + end + end + + it 'avoids N+1 queries' do + # Call to trigger any one time queries + get api("/projects/#{project.id}/pipelines", user), params: {} + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + get api("/projects/#{project.id}/pipelines", user), params: {} + end + + 3.times do + create( + :ci_empty_pipeline, + project: project, + sha: project.commit.id, + ref: project.default_branch, + user: user, + name: 'Build pipeline') + end + + expect do + get api("/projects/#{project.id}/pipelines", user), params: {} + end.not_to exceed_all_query_limit(control) end context 'when parameter is passed' do @@ -303,6 +339,19 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do end end end + + context 'when name is provided' do + let_it_be(:pipeline2) { create(:ci_empty_pipeline, project: project, user: user, name: 'Chatops pipeline') } + + it 'filters by name' do + get api("/projects/#{project.id}/pipelines", user), params: { name: 'Build pipeline' } + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response.length).to eq(1) + expect(json_response.first['name']).to eq('Build pipeline') + end + end end end @@ -823,6 +872,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do expect(response).to have_gitlab_http_status(:ok) expect(json_response['sha']).to match(/\A\h{40}\z/) + expect(json_response['name']).to eq('Build pipeline') end it 'returns 404 when it does not exist', :aggregate_failures do @@ -844,6 +894,19 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do expect(json_response["coverage"]).to eq('30.00') end end + + context 'with pipeline_name_in_api disabled' do + before do + stub_feature_flags(pipeline_name_in_api: false) + end + + it 'does not return name', :aggregate_failures do + get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.keys).not_to include('name') + end + end end context 'unauthorized user' do @@ -878,7 +941,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do let!(:second_pipeline) do create(:ci_empty_pipeline, project: project, sha: second_branch.target, - ref: second_branch.name, user: user) + ref: second_branch.name, user: user, name: 'Build pipeline') end before do @@ -894,6 +957,7 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do expect(response).to match_response_schema('public_api/v4/pipeline/detail') expect(json_response['ref']).to eq(project.default_branch) expect(json_response['sha']).to eq(project.commit.id) + expect(json_response['name']).to eq('Build pipeline') end end @@ -907,6 +971,19 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do expect(json_response['sha']).to eq(second_branch.target) end end + + context 'with pipeline_name_in_api disabled' do + before do + stub_feature_flags(pipeline_name_in_api: false) + end + + it 'does not return name', :aggregate_failures do + get api("/projects/#{project.id}/pipelines/latest", user) + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.keys).not_to include('name') + end + end end context 'unauthorized user' do diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb index bf28b25e0a6..ab7ab4e74f8 100644 --- a/spec/requests/api/ci/runner/jobs_put_spec.rb +++ b/spec/requests/api/ci/runner/jobs_put_spec.rb @@ -21,13 +21,13 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego let_it_be(:project) { create(:project, namespace: group, shared_runners_enabled: false) } let_it_be(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') } let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) } - let_it_be(:runner_machine) { create(:ci_runner_machine, runner: runner) } + let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) } let_it_be(:user) { create(:user) } describe 'PUT /api/v4/jobs/:id' do let_it_be_with_reload(:job) do create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, - runner_id: runner.id, runner_machine: runner_machine) + runner_id: runner.id, runner_manager: runner_manager) end before do @@ -40,18 +40,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego it 'updates runner info' do expect { update_job(state: 'success') }.to change { runner.reload.contacted_at } - .and change { runner_machine.reload.contacted_at } - end - - context 'when runner_machine_heartbeat is disabled' do - before do - stub_feature_flags(runner_machine_heartbeat: false) - end - - it 'does not load runner machine' do - queries = ActiveRecord::QueryRecorder.new { update_job(state: 'success') } - expect(queries.log).not_to include(/ci_runner_machines/) - end + .and change { runner_manager.reload.contacted_at } end context 'when status is given' do diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb index 28dbc4fd168..f820e4a3504 100644 --- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb +++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb @@ -122,56 +122,33 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego context 'when system_id parameter is specified' do subject(:request) { request_job(**args) } - context 'with create_runner_machine FF enabled' do - before do - stub_feature_flags(create_runner_machine: true) - end - - context 'when ci_runner_machines with same system_xid does not exist' do - let(:args) { { system_id: 's_some_system_id' } } - - it 'creates respective ci_runner_machines record', :freeze_time do - expect { request }.to change { runner.runner_machines.reload.count }.from(0).to(1) - - machine = runner.runner_machines.last - expect(machine.system_xid).to eq args[:system_id] - expect(machine.runner).to eq runner - expect(machine.contacted_at).to eq Time.current - end - end - - context 'when ci_runner_machines with same system_xid already exists', :freeze_time do - let(:args) { { system_id: 's_existing_system_id' } } - let!(:runner_machine) do - create(:ci_runner_machine, runner: runner, system_xid: args[:system_id], contacted_at: 1.hour.ago) - end - - it 'does not create new ci_runner_machines record' do - expect { request }.not_to change { Ci::RunnerMachine.count } - end + context 'when ci_runner_machines with same system_xid does not exist' do + let(:args) { { system_id: 's_some_system_id' } } - it 'updates the contacted_at field' do - request + it 'creates respective ci_runner_machines record', :freeze_time do + expect { request }.to change { runner.runner_managers.reload.count }.from(0).to(1) - expect(runner_machine.reload.contacted_at).to eq Time.current - end + runner_manager = runner.runner_managers.last + expect(runner_manager.system_xid).to eq args[:system_id] + expect(runner_manager.runner).to eq runner + expect(runner_manager.contacted_at).to eq Time.current end end - context 'with create_runner_machine FF disabled' do - before do - stub_feature_flags(create_runner_machine: false) + context 'when ci_runner_machines with same system_xid already exists', :freeze_time do + let(:args) { { system_id: 's_existing_system_id' } } + let!(:runner_manager) do + create(:ci_runner_machine, runner: runner, system_xid: args[:system_id], contacted_at: 1.hour.ago) end - context 'when ci_runner_machines with same system_xid does not exist' do - let(:args) { { system_id: 's_some_system_id' } } + it 'does not create new ci_runner_machines record' do + expect { request }.not_to change { Ci::RunnerManager.count } + end - it 'does not create respective ci_runner_machines record', :freeze_time, :aggregate_failures do - expect { request }.not_to change { runner.runner_machines.reload.count } + it 'updates the contacted_at field' do + request - expect(response).to have_gitlab_http_status(:created) - expect(runner.runner_machines).to be_empty - end + expect(runner_manager.reload.contacted_at).to eq Time.current end end end diff --git a/spec/requests/api/ci/runner/runners_verify_post_spec.rb b/spec/requests/api/ci/runner/runners_verify_post_spec.rb index 1b7dfe7706c..f1b33826f5e 100644 --- a/spec/requests/api/ci/runner/runners_verify_post_spec.rb +++ b/spec/requests/api/ci/runner/runners_verify_post_spec.rb @@ -45,33 +45,12 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego context 'when valid token is provided' do let(:params) { { token: runner.token } } - context 'with create_runner_machine FF enabled' do - before do - stub_feature_flags(create_runner_machine: true) - end - - context 'with glrt-prefixed token' do - let_it_be(:registration_token) { 'glrt-abcdefg123456' } - let_it_be(:registration_type) { :authenticated_user } - let_it_be(:runner) do - create(:ci_runner, registration_type: registration_type, - token: registration_token, token_expires_at: 3.days.from_now) - end - - it 'verifies Runner credentials' do - verify - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to eq({ - 'id' => runner.id, - 'token' => runner.token, - 'token_expires_at' => runner.token_expires_at.iso8601(3) - }) - end - - it 'does not update contacted_at' do - expect { verify }.not_to change { runner.reload.contacted_at }.from(nil) - end + context 'with glrt-prefixed token' do + let_it_be(:registration_token) { 'glrt-abcdefg123456' } + let_it_be(:registration_type) { :authenticated_user } + let_it_be(:runner) do + create(:ci_runner, registration_type: registration_type, + token: registration_token, token_expires_at: 3.days.from_now) end it 'verifies Runner credentials' do @@ -85,43 +64,29 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego }) end - it 'updates contacted_at' do - expect { verify }.to change { runner.reload.contacted_at }.from(nil).to(Time.current) - end - - context 'with non-expiring runner token' do - before do - runner.update!(token_expires_at: nil) - end - - it 'verifies Runner credentials' do - verify - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to eq({ - 'id' => runner.id, - 'token' => runner.token, - 'token_expires_at' => nil - }) - end + it 'does not update contacted_at' do + expect { verify }.not_to change { runner.reload.contacted_at }.from(nil) end + end - it_behaves_like 'storing arguments in the application context for the API' do - let(:expected_params) { { client_id: "runner/#{runner.id}" } } - end + it 'verifies Runner credentials' do + verify - context 'when system_id is provided' do - let(:params) { { token: runner.token, system_id: 's_some_system_id' } } + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to eq({ + 'id' => runner.id, + 'token' => runner.token, + 'token_expires_at' => runner.token_expires_at.iso8601(3) + }) + end - it 'creates a runner_machine' do - expect { verify }.to change { Ci::RunnerMachine.count }.by(1) - end - end + it 'updates contacted_at' do + expect { verify }.to change { runner.reload.contacted_at }.from(nil).to(Time.current) end - context 'with create_runner_machine FF disabled' do + context 'with non-expiring runner token' do before do - stub_feature_flags(create_runner_machine: false) + runner.update!(token_expires_at: nil) end it 'verifies Runner credentials' do @@ -131,18 +96,20 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego expect(json_response).to eq({ 'id' => runner.id, 'token' => runner.token, - 'token_expires_at' => runner.token_expires_at.iso8601(3) + 'token_expires_at' => nil }) end + end - context 'when system_id is provided' do - let(:params) { { token: runner.token, system_id: 's_some_system_id' } } + it_behaves_like 'storing arguments in the application context for the API' do + let(:expected_params) { { client_id: "runner/#{runner.id}" } } + end - it 'does not create a runner_machine', :aggregate_failures do - expect { verify }.not_to change { Ci::RunnerMachine.count } + context 'when system_id is provided' do + let(:params) { { token: runner.token, system_id: 's_some_system_id' } } - expect(response).to have_gitlab_http_status(:ok) - end + it 'creates a runner_manager' do + expect { verify }.to change { Ci::RunnerManager.count }.by(1) end end end diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb index ec9b5621c37..2b2d2e0def8 100644 --- a/spec/requests/api/ci/runners_spec.rb +++ b/spec/requests/api/ci/runners_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do +RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :runner_fleet do let_it_be(:admin) { create(:user, :admin) } let_it_be(:user) { create(:user) } let_it_be(:user2) { create(:user) } @@ -35,7 +35,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do describe 'GET /runners' do context 'authorized user' do - it 'returns response status and headers', :aggregate_failures do + it 'returns response status and headers' do get api('/runners', user) expect(response).to have_gitlab_http_status(:ok) @@ -53,7 +53,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do ] end - it 'filters runners by scope', :aggregate_failures do + it 'filters runners by scope' do create(:ci_runner, :project, :inactive, description: 'Inactive project runner', projects: [project]) get api('/runners?scope=paused', user) @@ -112,7 +112,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(response).to have_gitlab_http_status(:bad_request) end - it 'filters runners by tag_list', :aggregate_failures do + it 'filters runners by tag_list' do create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2]) create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2']) @@ -134,17 +134,21 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'GET /runners/all' do + let(:path) { '/runners/all' } + + it_behaves_like 'GET request permissions for admin mode' + context 'authorized user' do context 'with admin privileges' do it 'returns response status and headers' do - get api('/runners/all', admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers end it 'returns all runners' do - get api('/runners/all', admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(json_response).to match_array [ a_hash_including('description' => 'Project runner', 'is_shared' => false, 'active' => true, 'paused' => false, 'runner_type' => 'project_type'), @@ -155,7 +159,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do ] end - it 'filters runners by scope', :aggregate_failures do + it 'filters runners by scope' do get api('/runners/all?scope=shared', admin, admin_mode: true) shared = json_response.all? { |r| r['is_shared'] } @@ -166,7 +170,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(shared).to be_truthy end - it 'filters runners by scope', :aggregate_failures do + it 'filters runners by scope' do get api('/runners/all?scope=specific', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -235,7 +239,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(response).to have_gitlab_http_status(:bad_request) end - it 'filters runners by tag_list', :aggregate_failures do + it 'filters runners by tag_list' do create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2]) create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2']) @@ -249,7 +253,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'without admin privileges' do it 'does not return runners list' do - get api('/runners/all', user) + get api(path, user) expect(response).to have_gitlab_http_status(:forbidden) end @@ -266,9 +270,13 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'GET /runners/:id' do + let(:path) { "/runners/#{project_runner.id}" } + + it_behaves_like 'GET request permissions for admin mode' + context 'admin user' do context 'when runner is shared' do - it "returns runner's details", :aggregate_failures do + it "returns runner's details" do get api("/runners/#{shared_runner.id}", admin) expect(response).to have_gitlab_http_status(:ok) @@ -284,7 +292,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'when unused runner is present' do let!(:unused_project_runner) { create(:ci_runner, :project, :without_projects) } - it 'deletes unused runner', :aggregate_failures do + it 'deletes unused runner' do expect do delete api("/runners/#{unused_project_runner.id}", admin, admin_mode: true) @@ -293,29 +301,29 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end end - it "returns runner's details", :aggregate_failures do - get api("/runners/#{project_runner.id}", admin, admin_mode: true) + it "returns runner's details" do + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['description']).to eq(project_runner.description) end it "returns the project's details for a project runner" do - get api("/runners/#{project_runner.id}", admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(json_response['projects'].first['id']).to eq(project.id) end end it 'returns 404 if runner does not exist' do - get api('/runners/0', admin, admin_mode: true) + get api("/runners/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end end context 'when the runner is a group runner' do - it "returns the runner's details", :aggregate_failures do + it "returns the runner's details" do get api("/runners/#{group_runner_a.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -326,8 +334,8 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context "runner project's administrative user" do context 'when runner is not shared' do - it "returns runner's details", :aggregate_failures do - get api("/runners/#{project_runner.id}", user) + it "returns runner's details" do + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['description']).to eq(project_runner.description) @@ -335,7 +343,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner is shared' do - it "returns runner's details", :aggregate_failures do + it "returns runner's details" do get api("/runners/#{shared_runner.id}", user) expect(response).to have_gitlab_http_status(:ok) @@ -346,7 +354,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'other authorized user' do it "does not return project runner's details" do - get api("/runners/#{project_runner.id}", user2) + get api(path, user2) expect(response).to have_gitlab_http_status(:forbidden) end @@ -354,7 +362,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'unauthorized user' do it "does not return project runner's details" do - get api("/runners/#{project_runner.id}") + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -362,6 +370,12 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'PUT /runners/:id' do + let(:path) { "/runners/#{project_runner.id}" } + + it_behaves_like 'PUT request permissions for admin mode' do + let(:params) { { description: 'test' } } + end + context 'admin user' do # see https://gitlab.com/gitlab-org/gitlab-foss/issues/48625 context 'single parameter update' do @@ -373,7 +387,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(shared_runner.reload.description).to eq("#{description}_updated") end - it 'runner active state', :aggregate_failures do + it 'runner active state' do active = shared_runner.active update_runner(shared_runner.id, admin, active: !active) @@ -381,7 +395,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(shared_runner.reload.active).to eq(!active) end - it 'runner paused state', :aggregate_failures do + it 'runner paused state' do active = shared_runner.active update_runner(shared_runner.id, admin, paused: active) @@ -389,14 +403,14 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(shared_runner.reload.active).to eq(!active) end - it 'runner tag list', :aggregate_failures do + it 'runner tag list' do update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql']) expect(response).to have_gitlab_http_status(:ok) expect(shared_runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql') end - it 'unrelated runner attribute on an existing runner with too many tags', :aggregate_failures do + it 'unrelated runner attribute on an existing runner with too many tags' do # This test ensures that it is possible to update any attribute on a runner that currently fails the # validation that ensures that there aren't too many tags associated with a runner existing_invalid_shared_runner = build(:ci_runner, :instance, tag_list: (1..::Ci::Runner::TAG_LIST_MAX_LENGTH + 1).map { |i| "tag#{i}" }) @@ -409,7 +423,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(existing_invalid_shared_runner.reload.active).to eq(!active) end - it 'runner untagged flag', :aggregate_failures do + it 'runner untagged flag' do # Ensure tag list is non-empty before setting untagged to false. update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql']) update_runner(shared_runner.id, admin, run_untagged: 'false') @@ -418,28 +432,28 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(shared_runner.reload.run_untagged?).to be(false) end - it 'runner unlocked flag', :aggregate_failures do + it 'runner unlocked flag' do update_runner(shared_runner.id, admin, locked: 'true') expect(response).to have_gitlab_http_status(:ok) expect(shared_runner.reload.locked?).to be(true) end - it 'runner access level', :aggregate_failures do + it 'runner access level' do update_runner(shared_runner.id, admin, access_level: 'ref_protected') expect(response).to have_gitlab_http_status(:ok) expect(shared_runner.reload.ref_protected?).to be_truthy end - it 'runner maximum timeout', :aggregate_failures do + it 'runner maximum timeout' do update_runner(shared_runner.id, admin, maximum_timeout: 1234) expect(response).to have_gitlab_http_status(:ok) expect(shared_runner.reload.maximum_timeout).to eq(1234) end - it 'fails with no parameters', :aggregate_failures do + it 'fails with no parameters' do put api("/runners/#{shared_runner.id}", admin) shared_runner.reload @@ -448,7 +462,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner is shared' do - it 'updates runner', :aggregate_failures do + it 'updates runner' do description = shared_runner.description active = shared_runner.active runner_queue_value = shared_runner.ensure_runner_queue_value @@ -476,7 +490,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner is not shared' do - it 'updates runner', :aggregate_failures do + it 'updates runner' do description = project_runner.description runner_queue_value = project_runner.ensure_runner_queue_value @@ -492,7 +506,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end it 'returns 404 if runner does not exist' do - update_runner(0, admin, description: 'test') + update_runner(non_existing_record_id, admin, description: 'test') expect(response).to have_gitlab_http_status(:not_found) end @@ -515,14 +529,14 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'when runner is not shared' do it 'does not update project runner without access to it' do - put api("/runners/#{project_runner.id}", user2), params: { description: 'test' } + put api(path, user2), params: { description: 'test' } expect(response).to have_gitlab_http_status(:forbidden) end - it 'updates project runner with access to it', :aggregate_failures do + it 'updates project runner with access to it' do description = project_runner.description - put api("/runners/#{project_runner.id}", admin, admin_mode: true), params: params + put api(path, admin, admin_mode: true), params: params project_runner.reload expect(project_runner.description).to eq('test') @@ -533,7 +547,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'unauthorized user' do it 'does not delete project runner' do - put api("/runners/#{project_runner.id}") + put api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -541,27 +555,31 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'DELETE /runners/:id' do + let(:path) { "/runners/#{shared_runner.id}" } + + it_behaves_like 'DELETE request permissions for admin mode' + context 'admin user' do context 'when runner is shared' do - it 'deletes runner', :aggregate_failures do + it 'deletes runner' do expect_next_instance_of(Ci::Runners::UnregisterRunnerService, shared_runner, admin) do |service| expect(service).to receive(:execute).once.and_call_original end expect do - delete api("/runners/#{shared_runner.id}", admin, admin_mode: true) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { ::Ci::Runner.instance_type.count }.by(-1) end it_behaves_like '412 response' do - let(:request) { api("/runners/#{shared_runner.id}", admin) } + let(:request) { api(path, admin, admin_mode: true) } end end context 'when runner is not shared' do - it 'deletes used project runner', :aggregate_failures do + it 'deletes used project runner' do expect_next_instance_of(Ci::Runners::UnregisterRunnerService, project_runner, admin) do |service| expect(service).to receive(:execute).once.and_call_original end @@ -574,12 +592,12 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end end - it 'returns 404 if runner does not exist', :aggregate_failures do + it 'returns 404 if runner does not exist' do allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service| expect(service).not_to receive(:execute) end - delete api('/runners/0', admin, admin_mode: true) + delete api("/runners/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -588,7 +606,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'authorized user' do context 'when runner is shared' do it 'does not delete runner' do - delete api("/runners/#{shared_runner.id}", user) + delete api(path, user) expect(response).to have_gitlab_http_status(:forbidden) end end @@ -604,7 +622,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(response).to have_gitlab_http_status(:forbidden) end - it 'deletes project runner for one owned project', :aggregate_failures do + it 'deletes project runner for one owned project' do expect do delete api("/runners/#{project_runner.id}", user) @@ -659,7 +677,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'unauthorized user' do - it 'does not delete project runner', :aggregate_failures do + it 'does not delete project runner' do allow_next_instance_of(Ci::Runners::UnregisterRunnerService) do |service| expect(service).not_to receive(:execute) end @@ -672,32 +690,38 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'POST /runners/:id/reset_authentication_token' do + let(:path) { "/runners/#{shared_runner.id}/reset_authentication_token" } + + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { {} } + end + context 'admin user' do - it 'resets shared runner authentication token', :aggregate_failures do + it 'resets shared runner authentication token' do expect do - post api("/runners/#{shared_runner.id}/reset_authentication_token", admin, admin_mode: true) + post api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:success) expect(json_response).to eq({ 'token' => shared_runner.reload.token, 'token_expires_at' => nil }) end.to change { shared_runner.reload.token } end - it 'returns 404 if runner does not exist', :aggregate_failures do - post api('/runners/0/reset_authentication_token', admin, admin_mode: true) + it 'returns 404 if runner does not exist' do + post api("/runners/#{non_existing_record_id}/reset_authentication_token", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end end context 'authorized user' do - it 'does not reset project runner authentication token without access to it', :aggregate_failures do + it 'does not reset project runner authentication token without access to it' do expect do post api("/runners/#{project_runner.id}/reset_authentication_token", user2) expect(response).to have_gitlab_http_status(:forbidden) end.not_to change { project_runner.reload.token } end - it 'resets project runner authentication token for owned project', :aggregate_failures do + it 'resets project runner authentication token for owned project' do expect do post api("/runners/#{project_runner.id}/reset_authentication_token", user) @@ -706,7 +730,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.to change { project_runner.reload.token } end - it 'does not reset group runner authentication token with guest access', :aggregate_failures do + it 'does not reset group runner authentication token with guest access' do expect do post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_guest) @@ -714,7 +738,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.not_to change { group_runner_a.reload.token } end - it 'does not reset group runner authentication token with reporter access', :aggregate_failures do + it 'does not reset group runner authentication token with reporter access' do expect do post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_reporter) @@ -722,7 +746,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.not_to change { group_runner_a.reload.token } end - it 'does not reset group runner authentication token with developer access', :aggregate_failures do + it 'does not reset group runner authentication token with developer access' do expect do post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_developer) @@ -730,7 +754,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.not_to change { group_runner_a.reload.token } end - it 'does not reset group runner authentication token with maintainer access', :aggregate_failures do + it 'does not reset group runner authentication token with maintainer access' do expect do post api("/runners/#{group_runner_a.id}/reset_authentication_token", group_maintainer) @@ -738,7 +762,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.not_to change { group_runner_a.reload.token } end - it 'resets group runner authentication token with owner access', :aggregate_failures do + it 'resets group runner authentication token with owner access' do expect do post api("/runners/#{group_runner_a.id}/reset_authentication_token", user) @@ -747,7 +771,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end.to change { group_runner_a.reload.token } end - it 'resets group runner authentication token with owner access with expiration time', :aggregate_failures, :freeze_time do + it 'resets group runner authentication token with owner access with expiration time', :freeze_time do expect(group_runner_a.reload.token_expires_at).to be_nil group.update!(runner_token_expiration_interval: 5.days) @@ -764,9 +788,9 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'unauthorized user' do - it 'does not reset authentication token', :aggregate_failures do + it 'does not reset authentication token' do expect do - post api("/runners/#{shared_runner.id}/reset_authentication_token") + post api(path) expect(response).to have_gitlab_http_status(:unauthorized) end.not_to change { shared_runner.reload.token } @@ -780,11 +804,14 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do let_it_be(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) } let_it_be(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) } let_it_be(:job_5) { create(:ci_build, :failed, runner: project_runner, project: project) } + let(:path) { "/runners/#{project_runner.id}/jobs" } + + it_behaves_like 'GET request permissions for admin mode' context 'admin user' do context 'when runner exists' do context 'when runner is shared' do - it 'return jobs', :aggregate_failures do + it 'return jobs' do get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -796,8 +823,8 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner is a project runner' do - it 'return jobs', :aggregate_failures do - get api("/runners/#{project_runner.id}/jobs", admin, admin_mode: true) + it 'return jobs' do + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -807,7 +834,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when user does not have authorization to see all jobs' do - it 'shows only jobs it has permission to see', :aggregate_failures do + it 'shows only jobs it has permission to see' do create(:ci_build, :running, runner: two_projects_runner, project: project) create(:ci_build, :running, runner: two_projects_runner, project: project2) @@ -825,7 +852,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when valid status is provided' do - it 'return filtered jobs', :aggregate_failures do + it 'return filtered jobs' do get api("/runners/#{project_runner.id}/jobs?status=failed", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -839,7 +866,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'when valid order_by is provided' do context 'when sort order is not specified' do - it 'return jobs in descending order', :aggregate_failures do + it 'return jobs in descending order' do get api("/runners/#{project_runner.id}/jobs?order_by=id", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -852,7 +879,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when sort order is specified as asc' do - it 'return jobs sorted in ascending order', :aggregate_failures do + it 'return jobs sorted in ascending order' do get api("/runners/#{project_runner.id}/jobs?order_by=id&sort=asc", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -949,8 +976,8 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner is a project runner' do - it 'return jobs', :aggregate_failures do - get api("/runners/#{project_runner.id}/jobs", user) + it 'return jobs' do + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -961,7 +988,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when valid status is provided' do - it 'return filtered jobs', :aggregate_failures do + it 'return filtered jobs' do get api("/runners/#{project_runner.id}/jobs?status=failed", user) expect(response).to have_gitlab_http_status(:ok) @@ -993,7 +1020,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'other authorized user' do it 'does not return jobs' do - get api("/runners/#{project_runner.id}/jobs", user2) + get api(path, user2) expect(response).to have_gitlab_http_status(:forbidden) end @@ -1001,7 +1028,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'unauthorized user' do it 'does not return jobs' do - get api("/runners/#{project_runner.id}/jobs") + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -1028,7 +1055,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do describe 'GET /projects/:id/runners' do context 'authorized user with maintainer privileges' do - it 'returns response status and headers', :aggregate_failures do + it 'returns response status and headers' do get api('/runners/all', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) @@ -1045,7 +1072,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do ] end - it 'filters runners by scope', :aggregate_failures do + it 'filters runners by scope' do get api("/projects/#{project.id}/runners?scope=specific", user) expect(response).to have_gitlab_http_status(:ok) @@ -1103,7 +1130,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do expect(response).to have_gitlab_http_status(:bad_request) end - it 'filters runners by tag_list', :aggregate_failures do + it 'filters runners by tag_list' do create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2]) create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2']) @@ -1184,7 +1211,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end end - it 'filters runners by tag_list', :aggregate_failures do + it 'filters runners by tag_list' do create(:ci_runner, :group, description: 'Runner tagged with tag1 and tag2', groups: [group], tag_list: %w[tag1 tag2]) create(:ci_runner, :group, description: 'Runner tagged with tag2', groups: [group], tag_list: %w[tag1]) @@ -1201,41 +1228,49 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end describe 'POST /projects/:id/runners' do + let(:path) { "/projects/#{project.id}/runners" } + + it_behaves_like 'POST request permissions for admin mode' do + let!(:new_project_runner) { create(:ci_runner, :project) } + let(:params) { { runner_id: new_project_runner.id } } + let(:failed_status_code) { :not_found } + end + context 'authorized user' do let_it_be(:project_runner2) { create(:ci_runner, :project, projects: [project2]) } - it 'enables project runner', :aggregate_failures do + it 'enables project runner' do expect do - post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id } + post api(path, user), params: { runner_id: project_runner2.id } end.to change { project.runners.count }.by(+1) expect(response).to have_gitlab_http_status(:created) end - it 'avoids changes when enabling already enabled runner', :aggregate_failures do + it 'avoids changes when enabling already enabled runner' do expect do - post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner.id } + post api(path, user), params: { runner_id: project_runner.id } end.to change { project.runners.count }.by(0) expect(response).to have_gitlab_http_status(:bad_request) end - it 'does not enable locked runner', :aggregate_failures do + it 'does not enable locked runner' do project_runner2.update!(locked: true) expect do - post api("/projects/#{project.id}/runners", user), params: { runner_id: project_runner2.id } + post api(path, user), params: { runner_id: project_runner2.id } end.to change { project.runners.count }.by(0) expect(response).to have_gitlab_http_status(:forbidden) end it 'does not enable shared runner' do - post api("/projects/#{project.id}/runners", user), params: { runner_id: shared_runner.id } + post api(path, user), params: { runner_id: shared_runner.id } expect(response).to have_gitlab_http_status(:forbidden) end it 'does not enable group runner' do - post api("/projects/#{project.id}/runners", user), params: { runner_id: group_runner_a.id } + post api(path, user), params: { runner_id: group_runner_a.id } expect(response).to have_gitlab_http_status(:forbidden) end @@ -1244,9 +1279,9 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'when project runner is used' do let!(:new_project_runner) { create(:ci_runner, :project) } - it 'enables any project runner', :aggregate_failures do + it 'enables any project runner' do expect do - post api("/projects/#{project.id}/runners", admin, admin_mode: true), params: { runner_id: new_project_runner.id } + post api(path, admin, admin_mode: true), params: { runner_id: new_project_runner.id } end.to change { project.runners.count }.by(+1) expect(response).to have_gitlab_http_status(:created) end @@ -1256,9 +1291,9 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do create(:plan_limits, :default_plan, ci_registered_project_runners: 1) end - it 'does not enable project runner', :aggregate_failures do + it 'does not enable project runner' do expect do - post api("/projects/#{project.id}/runners", admin, admin_mode: true), params: { runner_id: new_project_runner.id } + post api(path, admin, admin_mode: true), params: { runner_id: new_project_runner.id } end.not_to change { project.runners.count } expect(response).to have_gitlab_http_status(:bad_request) end @@ -1267,7 +1302,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end it 'raises an error when no runner_id param is provided' do - post api("/projects/#{project.id}/runners", admin, admin_mode: true) + post api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -1277,7 +1312,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do let!(:new_project_runner) { create(:ci_runner, :project) } it 'does not enable runner without access to' do - post api("/projects/#{project.id}/runners", user), params: { runner_id: new_project_runner.id } + post api(path, user), params: { runner_id: new_project_runner.id } expect(response).to have_gitlab_http_status(:forbidden) end @@ -1285,7 +1320,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'authorized user without permissions' do it 'does not enable runner' do - post api("/projects/#{project.id}/runners", user2) + post api(path, user2) expect(response).to have_gitlab_http_status(:forbidden) end @@ -1293,7 +1328,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do context 'unauthorized user' do it 'does not enable runner' do - post api("/projects/#{project.id}/runners") + post api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -1317,7 +1352,7 @@ RSpec.describe API::Ci::Runners, feature_category: :runner_fleet do end context 'when runner have one associated projects' do - it "does not disable project's runner", :aggregate_failures do + it "does not disable project's runner" do expect do delete api("/projects/#{project.id}/runners/#{project_runner.id}", user) end.to change { project.runners.count }.by(0) diff --git a/spec/requests/api/ci/variables_spec.rb b/spec/requests/api/ci/variables_spec.rb index 5ea9104cb15..e937c4c2b8f 100644 --- a/spec/requests/api/ci/variables_spec.rb +++ b/spec/requests/api/ci/variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Ci::Variables, feature_category: :pipeline_composition do +RSpec.describe API::Ci::Variables, feature_category: :secrets_management do let(:user) { create(:user) } let(:user2) { create(:user) } let!(:project) { create(:project, creator_id: user.id) } diff --git a/spec/requests/api/clusters/agent_tokens_spec.rb b/spec/requests/api/clusters/agent_tokens_spec.rb index b2d996e8002..a1dd6e84492 100644 --- a/spec/requests/api/clusters/agent_tokens_spec.rb +++ b/spec/requests/api/clusters/agent_tokens_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Clusters::AgentTokens, feature_category: :kubernetes_management do +RSpec.describe API::Clusters::AgentTokens, feature_category: :deployment_management do let_it_be(:agent) { create(:cluster_agent) } let_it_be(:agent_token_one) { create(:cluster_agent_token, agent: agent) } let_it_be(:revoked_agent_token) { create(:cluster_agent_token, :revoked, agent: agent) } diff --git a/spec/requests/api/clusters/agents_spec.rb b/spec/requests/api/clusters/agents_spec.rb index a09713bd6e7..12056567e9d 100644 --- a/spec/requests/api/clusters/agents_spec.rb +++ b/spec/requests/api/clusters/agents_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Clusters::Agents, feature_category: :kubernetes_management do +RSpec.describe API::Clusters::Agents, feature_category: :deployment_management do let_it_be(:agent) { create(:cluster_agent) } let(:user) { agent.created_by_user } diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb index 025d065df7b..7540e19e278 100644 --- a/spec/requests/api/commit_statuses_spec.rb +++ b/spec/requests/api/commit_statuses_spec.rb @@ -533,8 +533,8 @@ RSpec.describe API::CommitStatuses, feature_category: :continuous_integration do end end - context 'with partitions' do - let(:current_partition_id) { 123 } + context 'with partitions', :ci_partitionable do + let(:current_partition_id) { ci_testing_partition_id } before do allow(Ci::Pipeline) diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb index 25b99862100..18e5bfd711e 100644 --- a/spec/requests/api/debian_group_packages_spec.rb +++ b/spec/requests/api/debian_group_packages_spec.rb @@ -6,28 +6,48 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do include WorkhorseHelpers include_context 'Debian repository shared context', :group, false do + shared_examples 'a Debian package tracking event' do |action| + include_context 'Debian repository access', :public, :developer, :basic do + let(:snowplow_gitlab_standard_context) do + { project: nil, namespace: container, user: user, property: 'i_package_debian_user' } + end + + it_behaves_like 'a package tracking event', described_class.name, action + end + end + + shared_examples 'not a Debian package tracking event' do + include_context 'Debian repository access', :public, :developer, :basic do + it_behaves_like 'not a package tracking event' + end + end + context 'with invalid parameter' do let(:url) { "/groups/1/-/packages/debian/dists/with+space/InRelease" } it_behaves_like 'Debian packages GET request', :bad_request, /^distribution is invalid$/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/Release.gpg" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNATURE-----/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/Release" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Codename: fixture-distribution\n$/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/InRelease" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNED MESSAGE-----/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do @@ -36,12 +56,14 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/binary-#{architecture.name}/Packages" } it_behaves_like 'Debian packages index endpoint', /Description: This is an incomplete Packages file/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages.gz' do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/Packages.gz" } it_behaves_like 'Debian packages read endpoint', 'GET', :not_found, /Format gz is not supported/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/by-hash/SHA256/:file_sha256' do @@ -51,6 +73,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/binary-#{architecture.name}/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/source/Sources' do @@ -59,6 +82,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/source/Sources" } it_behaves_like 'Debian packages index endpoint', /^Description: This is an incomplete Sources file$/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/source/by-hash/SHA256/:file_sha256' do @@ -68,6 +92,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/source/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/Packages' do @@ -76,12 +101,14 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/debian-installer/binary-#{architecture.name}/Packages" } it_behaves_like 'Debian packages index endpoint', /Description: This is an incomplete D-I Packages file/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/Packages.gz' do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{component.name}/debian-installer/binary-#{architecture.name}/Packages.gz" } it_behaves_like 'Debian packages read endpoint', 'GET', :not_found, /Format gz is not supported/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/by-hash/SHA256/:file_sha256' do @@ -91,6 +118,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution.codename}/#{target_component_name}/debian-installer/binary-#{architecture.name}/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' end describe 'GET groups/:id/-/packages/debian/pool/:codename/:project_id/:letter/:package_name/:package_version/:file_name' do @@ -111,6 +139,7 @@ RSpec.describe API::DebianGroupPackages, feature_category: :package_registry do with_them do it_behaves_like 'Debian packages read endpoint', 'GET', :success, params[:success_body] + it_behaves_like 'a Debian package tracking event', 'pull_package' context 'for bumping last downloaded at' do include_context 'Debian repository access', :public, :developer, :basic do diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb index e9ad39a08ab..17a413ed059 100644 --- a/spec/requests/api/debian_project_packages_spec.rb +++ b/spec/requests/api/debian_project_packages_spec.rb @@ -6,6 +6,22 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d include WorkhorseHelpers include_context 'Debian repository shared context', :project, false do + shared_examples 'a Debian package tracking event' do |action| + include_context 'Debian repository access', :public, :developer, :basic do + let(:snowplow_gitlab_standard_context) do + { project: container, namespace: container.namespace, user: user, property: 'i_package_debian_user' } + end + + it_behaves_like 'a package tracking event', described_class.name, action + end + end + + shared_examples 'not a Debian package tracking event' do + include_context 'Debian repository access', :public, :developer, :basic do + it_behaves_like 'not a package tracking event' + end + end + shared_examples 'accept GET request on private project with access to package registry for everyone' do include_context 'Debian repository access', :private, :anonymous, :basic do before do @@ -20,12 +36,14 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/1/packages/debian/dists/with+space/InRelease" } it_behaves_like 'Debian packages GET request', :bad_request, /^distribution is invalid$/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET projects/:id/packages/debian/dists/*distribution/Release.gpg' do let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release.gpg" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNATURE-----/ + it_behaves_like 'not a Debian package tracking event' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -33,6 +51,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Codename: fixture-distribution\n$/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -40,6 +59,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/InRelease" } it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNED MESSAGE-----/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -49,6 +69,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/binary-#{architecture.name}/Packages" } it_behaves_like 'Debian packages index endpoint', /Description: This is an incomplete Packages file/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -56,6 +77,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/Packages.gz" } it_behaves_like 'Debian packages read endpoint', 'GET', :not_found, /Format gz is not supported/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/by-hash/SHA256/:file_sha256' do @@ -65,6 +87,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/binary-#{architecture.name}/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -74,6 +97,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/source/Sources" } it_behaves_like 'Debian packages index endpoint', /^Description: This is an incomplete Sources file$/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -84,6 +108,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/source/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -93,6 +118,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/debian-installer/binary-#{architecture.name}/Packages" } it_behaves_like 'Debian packages index endpoint', /Description: This is an incomplete D-I Packages file/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -100,6 +126,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/debian-installer/binary-#{architecture.name}/Packages.gz" } it_behaves_like 'Debian packages read endpoint', 'GET', :not_found, /Format gz is not supported/ + it_behaves_like 'not a Debian package tracking event' end describe 'GET projects/:id/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/by-hash/SHA256/:file_sha256' do @@ -109,6 +136,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{target_component_name}/debian-installer/binary-#{architecture.name}/by-hash/SHA256/#{target_sha256}" } it_behaves_like 'Debian packages index sha256 endpoint', /^Other SHA256$/ + it_behaves_like 'a Debian package tracking event', 'list_package' it_behaves_like 'accept GET request on private project with access to package registry for everyone' end @@ -130,6 +158,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d with_them do it_behaves_like 'Debian packages read endpoint', 'GET', :success, params[:success_body] + it_behaves_like 'a Debian package tracking event', 'pull_package' context 'for bumping last downloaded at' do include_context 'Debian repository access', :public, :developer, :basic do @@ -146,17 +175,18 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d describe 'PUT projects/:id/packages/debian/:file_name' do let(:method) { :put } let(:url) { "/projects/#{container.id}/packages/debian/#{file_name}" } - let(:snowplow_gitlab_standard_context) { { project: container, user: user, namespace: container.namespace } } context 'with a deb' do let(:file_name) { 'libsample0_1.2.3~alpha2_amd64.deb' } it_behaves_like 'Debian packages write endpoint', 'upload', :created, nil + it_behaves_like 'a Debian package tracking event', 'push_package' context 'with codename and component' do let(:extra_params) { { distribution: distribution.codename, component: 'main' } } it_behaves_like 'Debian packages write endpoint', 'upload', :created, nil + it_behaves_like 'a Debian package tracking event', 'push_package' end context 'with codename and without component' do @@ -165,6 +195,8 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d include_context 'Debian repository access', :public, :developer, :basic do it_behaves_like 'Debian packages GET request', :bad_request, /component is missing/ end + + it_behaves_like 'not a Debian package tracking event' end end @@ -173,13 +205,19 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d include_context 'Debian repository access', :public, :developer, :basic do it_behaves_like "Debian packages upload request", :created, nil + end - context 'with codename and component' do - let(:extra_params) { { distribution: distribution.codename, component: 'main' } } + it_behaves_like 'a Debian package tracking event', 'push_package' + context 'with codename and component' do + let(:extra_params) { { distribution: distribution.codename, component: 'main' } } + + include_context 'Debian repository access', :public, :developer, :basic do it_behaves_like "Debian packages upload request", :bad_request, /^file_name Only debs, udebs and ddebs can be directly added to a distribution$/ end + + it_behaves_like 'not a Debian package tracking event' end end @@ -187,6 +225,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:file_name) { 'sample_1.2.3~alpha2_amd64.changes' } it_behaves_like 'Debian packages write endpoint', 'upload', :created, nil + it_behaves_like 'a Debian package tracking event', 'push_package' end end @@ -196,6 +235,7 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d let(:url) { "/projects/#{container.id}/packages/debian/#{file_name}/authorize" } it_behaves_like 'Debian packages write endpoint', 'upload authorize', :created, nil + it_behaves_like 'not a Debian package tracking event' end end end diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb index 15880d920c5..18a9211df3e 100644 --- a/spec/requests/api/deploy_keys_spec.rb +++ b/spec/requests/api/deploy_keys_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do +RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuous_delivery do let_it_be(:user) { create(:user) } let_it_be(:maintainer) { create(:user) } let_it_be(:admin) { create(:admin) } @@ -11,33 +11,29 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do let_it_be(:project3) { create(:project, creator_id: user.id) } let_it_be(:deploy_key) { create(:deploy_key, public: true) } let_it_be(:deploy_key_private) { create(:deploy_key, public: false) } + let_it_be(:path) { '/deploy_keys' } + let_it_be(:project_path) { "/projects/#{project.id}#{path}" } let!(:deploy_keys_project) do create(:deploy_keys_project, project: project, deploy_key: deploy_key) end describe 'GET /deploy_keys' do + it_behaves_like 'GET request permissions for admin mode' + context 'when unauthenticated' do it 'returns authentication error' do - get api('/deploy_keys') + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end end - context 'when authenticated as non-admin user' do - it 'returns a 403 error' do - get api('/deploy_keys', user) - - expect(response).to have_gitlab_http_status(:forbidden) - end - end - context 'when authenticated as admin' do - let_it_be(:pat) { create(:personal_access_token, user: admin) } + let_it_be(:pat) { create(:personal_access_token, :admin_mode, user: admin) } def make_api_request(params = {}) - get api('/deploy_keys', personal_access_token: pat), params: params + get api(path, personal_access_token: pat), params: params end it 'returns all deploy keys' do @@ -91,14 +87,18 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do describe 'GET /projects/:id/deploy_keys' do let(:deploy_key) { create(:deploy_key, public: true, user: admin) } + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { project_path } + let(:failed_status_code) { :not_found } + end + def perform_request - get api("/projects/#{project.id}/deploy_keys", admin) + get api(project_path, admin, admin_mode: true) end it 'returns array of ssh keys' do perform_request - expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers expect(json_response).to be_an Array expect(json_response.first['title']).to eq(deploy_key.title) @@ -117,31 +117,59 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do end describe 'GET /projects/:id/deploy_keys/:key_id' do + let_it_be(:path) { "#{project_path}/#{deploy_key.id}" } + let_it_be(:unfindable_path) { "#{project_path}/404" } + + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + it 'returns a single key' do - get api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) + get api(path, admin, admin_mode: true) - expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(deploy_key.title) expect(json_response).not_to have_key(:projects_with_write_access) end it 'returns 404 Not Found with invalid ID' do - get api("/projects/#{project.id}/deploy_keys/404", admin) + get api(unfindable_path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end + + context 'when deploy key has expiry date' do + let(:deploy_key) { create(:deploy_key, :expired, public: true) } + let(:deploy_keys_project) { create(:deploy_keys_project, project: project, deploy_key: deploy_key) } + + it 'returns expiry date' do + get api("#{project_path}/#{deploy_key.id}", admin, admin_mode: true) + + expect(response).to have_gitlab_http_status(:ok) + expect(Time.parse(json_response['expires_at'])).to be_like_time(deploy_key.expires_at) + end + end end describe 'POST /projects/:id/deploy_keys' do + around do |example| + freeze_time { example.run } + end + + it_behaves_like 'POST request permissions for admin mode', :not_found do + let(:params) { attributes_for :another_key } + let(:path) { project_path } + let(:failed_status_code) { :not_found } + end + it 'does not create an invalid ssh key' do - post api("/projects/#{project.id}/deploy_keys", admin), params: { title: 'invalid key' } + post api(project_path, admin, admin_mode: true), params: { title: 'invalid key' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('key is missing') end it 'does not create a key without title' do - post api("/projects/#{project.id}/deploy_keys", admin), params: { key: 'some key' } + post api(project_path, admin, admin_mode: true), params: { key: 'some key' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('title is missing') @@ -151,7 +179,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do key_attrs = attributes_for :another_key expect do - post api("/projects/#{project.id}/deploy_keys", admin), params: key_attrs + post api(project_path, admin, admin_mode: true), params: key_attrs end.to change { project.deploy_keys.count }.by(1) new_key = project.deploy_keys.last @@ -161,7 +189,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do it 'returns an existing ssh key when attempting to add a duplicate' do expect do - post api("/projects/#{project.id}/deploy_keys", admin), params: { key: deploy_key.key, title: deploy_key.title } + post api(project_path, admin, admin_mode: true), params: { key: deploy_key.key, title: deploy_key.title } end.not_to change { project.deploy_keys.count } expect(response).to have_gitlab_http_status(:created) @@ -169,7 +197,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do it 'joins an existing ssh key to a new project' do expect do - post api("/projects/#{project2.id}/deploy_keys", admin), params: { key: deploy_key.key, title: deploy_key.title } + post api("/projects/#{project2.id}/deploy_keys", admin, admin_mode: true), params: { key: deploy_key.key, title: deploy_key.title } end.to change { project2.deploy_keys.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -178,18 +206,34 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do it 'accepts can_push parameter' do key_attrs = attributes_for(:another_key).merge(can_push: true) - post api("/projects/#{project.id}/deploy_keys", admin), params: key_attrs + post api(project_path, admin, admin_mode: true), params: key_attrs expect(response).to have_gitlab_http_status(:created) expect(json_response['can_push']).to eq(true) end + + it 'accepts expires_at parameter' do + key_attrs = attributes_for(:another_key).merge(expires_at: 2.days.since.iso8601) + + post api(project_path, admin, admin_mode: true), params: key_attrs + + expect(response).to have_gitlab_http_status(:created) + expect(Time.parse(json_response['expires_at'])).to be_like_time(2.days.since) + end end describe 'PUT /projects/:id/deploy_keys/:key_id' do + let(:path) { "#{project_path}/#{deploy_key.id}" } let(:extra_params) { {} } + let(:admin_mode) { false } + + it_behaves_like 'PUT request permissions for admin mode' do + let(:params) { { title: 'new title', can_push: true } } + let(:failed_status_code) { :not_found } + end subject do - put api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", api_user), params: extra_params + put api(path, api_user, admin_mode: admin_mode), params: extra_params end context 'with non-admin' do @@ -204,6 +248,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do context 'with admin' do let(:api_user) { admin } + let(:admin_mode) { true } context 'public deploy key attached to project' do let(:extra_params) { { title: 'new title', can_push: true } } @@ -258,9 +303,13 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do context 'public deploy key attached to project' do let(:extra_params) { { title: 'new title', can_push: true } } - it 'updates the title of the deploy key' do - expect { subject }.to change { deploy_key.reload.title }.to 'new title' - expect(response).to have_gitlab_http_status(:ok) + context 'with admin mode on' do + let(:admin_mode) { true } + + it 'updates the title of the deploy key' do + expect { subject }.to change { deploy_key.reload.title }.to 'new title' + expect(response).to have_gitlab_http_status(:ok) + end end it 'updates can_push of deploy_keys_project' do @@ -298,18 +347,22 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do deploy_key end + let(:path) { "#{project_path}/#{deploy_key.id}" } + + it_behaves_like 'DELETE request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + it 'removes existing key from project' do expect do - delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) - - expect(response).to have_gitlab_http_status(:no_content) + delete api(path, admin, admin_mode: true) end.to change { project.deploy_keys.count }.by(-1) end context 'when the deploy key is public' do it 'does not delete the deploy key' do expect do - delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.not_to change { DeployKey.count } @@ -322,7 +375,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do context 'when the deploy key is only used by this project' do it 'deletes the deploy key' do expect do - delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { DeployKey.count }.by(-1) @@ -336,7 +389,7 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do it 'does not delete the deploy key' do expect do - delete api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.not_to change { DeployKey.count } @@ -345,26 +398,31 @@ RSpec.describe API::DeployKeys, feature_category: :continuous_delivery do end it 'returns 404 Not Found with invalid ID' do - delete api("/projects/#{project.id}/deploy_keys/404", admin) + delete api("#{project_path}/404", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end it_behaves_like '412 response' do - let(:request) { api("/projects/#{project.id}/deploy_keys/#{deploy_key.id}", admin) } + let(:request) { api("#{project_path}/#{deploy_key.id}", admin, admin_mode: true) } end end describe 'POST /projects/:id/deploy_keys/:key_id/enable' do - let(:project2) { create(:project) } + let_it_be(:project2) { create(:project) } + let_it_be(:path) { "/projects/#{project2.id}/deploy_keys/#{deploy_key.id}/enable" } + let_it_be(:params) { {} } + + it_behaves_like 'POST request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end context 'when the user can admin the project' do it 'enables the key' do expect do - post api("/projects/#{project2.id}/deploy_keys/#{deploy_key.id}/enable", admin) + post api(path, admin, admin_mode: true) end.to change { project2.deploy_keys.count }.from(0).to(1) - expect(response).to have_gitlab_http_status(:created) expect(json_response['id']).to eq(deploy_key.id) end end diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb index 4efe49e843f..c0e36bf03bf 100644 --- a/spec/requests/api/deploy_tokens_spec.rb +++ b/spec/requests/api/deploy_tokens_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do +RSpec.describe API::DeployTokens, :aggregate_failures, feature_category: :continuous_delivery do let_it_be(:user) { create(:user) } let_it_be(:creator) { create(:user) } let_it_be(:project) { create(:project, creator_id: creator.id) } @@ -17,26 +17,25 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do describe 'GET /deploy_tokens' do subject do - get api('/deploy_tokens', user) + get api('/deploy_tokens', user, admin_mode: admin_mode) response end - context 'when unauthenticated' do - let(:user) { nil } + let_it_be(:admin_mode) { false } - it { is_expected.to have_gitlab_http_status(:unauthorized) } + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { '/deploy_tokens' } end - context 'when authenticated as non-admin user' do - let(:user) { creator } + context 'when unauthenticated' do + let(:user) { nil } - it { is_expected.to have_gitlab_http_status(:forbidden) } + it { is_expected.to have_gitlab_http_status(:unauthorized) } end context 'when authenticated as admin' do let(:user) { create(:admin) } - - it { is_expected.to have_gitlab_http_status(:ok) } + let_it_be(:admin_mode) { true } it 'returns all deploy tokens' do subject @@ -57,7 +56,7 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do context 'and active=true' do it 'only returns active deploy tokens' do - get api('/deploy_tokens?active=true', user) + get api('/deploy_tokens?active=true', user, admin_mode: true) token_ids = json_response.map { |token| token['id'] } expect(response).to have_gitlab_http_status(:ok) @@ -73,8 +72,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'GET /projects/:id/deploy_tokens' do + let(:path) { "/projects/#{project.id}/deploy_tokens" } + subject do - get api("/projects/#{project.id}/deploy_tokens", user) + get api(path, user) response end @@ -134,8 +135,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'GET /projects/:id/deploy_tokens/:token_id' do + let(:path) { "/projects/#{project.id}/deploy_tokens/#{deploy_token.id}" } + subject do - get api("/projects/#{project.id}/deploy_tokens/#{deploy_token.id}", user) + get api(path, user) response end @@ -183,8 +186,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'GET /groups/:id/deploy_tokens' do + let(:path) { "/groups/#{group.id}/deploy_tokens" } + subject do - get api("/groups/#{group.id}/deploy_tokens", user) + get api(path, user) response end @@ -241,8 +246,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'GET /groups/:id/deploy_tokens/:token_id' do + let(:path) { "/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}" } + subject do - get api("/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}", user) + get api(path, user) response end @@ -290,8 +297,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'DELETE /projects/:id/deploy_tokens/:token_id' do + let(:path) { "/projects/#{project.id}/deploy_tokens/#{deploy_token.id}" } + subject do - delete api("/projects/#{project.id}/deploy_tokens/#{deploy_token.id}", user) + delete api(path, user) response end @@ -455,8 +464,10 @@ RSpec.describe API::DeployTokens, feature_category: :continuous_delivery do end describe 'DELETE /groups/:id/deploy_tokens/:token_id' do + let(:path) { "/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}" } + subject do - delete api("/groups/#{group.id}/deploy_tokens/#{group_deploy_token.id}", user) + delete api(path, user) response end diff --git a/spec/requests/api/draft_notes_spec.rb b/spec/requests/api/draft_notes_spec.rb index d239853ac1d..3911bb8bc00 100644 --- a/spec/requests/api/draft_notes_spec.rb +++ b/spec/requests/api/draft_notes_spec.rb @@ -322,4 +322,47 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do end end end + + describe "Bulk publishing draft notes" do + let(:bulk_publish_draft_notes) do + post api( + "#{base_url}/bulk_publish", + user + ) + end + + let!(:draft_note_by_current_user_2) { create(:draft_note, merge_request: merge_request, author: user) } + + context "when publishing an existing draft note by the user" do + it "returns 204 No Content status" do + bulk_publish_draft_notes + + expect(response).to have_gitlab_http_status(:no_content) + end + + it "publishes the specified draft notes" do + expect { bulk_publish_draft_notes }.to change { Note.count }.by(2) + expect(DraftNote.exists?(draft_note_by_current_user.id)).to eq(false) + expect(DraftNote.exists?(draft_note_by_current_user_2.id)).to eq(false) + end + + it "only publishes the user's draft notes" do + bulk_publish_draft_notes + + expect(DraftNote.exists?(draft_note_by_random_user.id)).to eq(true) + end + end + + context "when DraftNotes::PublishService returns a non-success" do + it "returns an :internal_server_error and a message" do + expect_next_instance_of(DraftNotes::PublishService) do |instance| + expect(instance).to receive(:execute).and_return({ status: :failure, message: "Error message" }) + end + + bulk_publish_draft_notes + + expect(response).to have_gitlab_http_status(:internal_server_error) + end + end + end end diff --git a/spec/requests/api/error_tracking/project_settings_spec.rb b/spec/requests/api/error_tracking/project_settings_spec.rb index 3b01dec6f9c..bde90627983 100644 --- a/spec/requests/api/error_tracking/project_settings_spec.rb +++ b/spec/requests/api/error_tracking/project_settings_spec.rb @@ -4,9 +4,9 @@ require 'spec_helper' RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tracking do let_it_be(:user) { create(:user) } - - let(:setting) { create(:project_error_tracking_setting) } - let(:project) { setting.project } + let_it_be(:project) { create(:project) } + let_it_be(:setting) { create(:project_error_tracking_setting, project: project) } + let_it_be(:project_without_setting) { create(:project) } shared_examples 'returns project settings' do it 'returns correct project settings' do @@ -100,7 +100,8 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end describe "PATCH /projects/:id/error_tracking/settings" do - let(:params) { { active: false } } + let(:params) { { active: false, integrated: integrated } } + let(:integrated) { false } def make_request patch api("/projects/#{project.id}/error_tracking/settings", user), params: params @@ -111,82 +112,78 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra project.add_maintainer(user) end - context 'patch settings' do - context 'integrated_error_tracking feature enabled' do - it_behaves_like 'returns project settings' - end - - context 'integrated_error_tracking feature disabled' do - before do - stub_feature_flags(integrated_error_tracking: false) - end + context 'with integrated_error_tracking feature enabled' do + it_behaves_like 'returns project settings' + end - it_behaves_like 'returns project settings with false for integrated' + context 'with integrated_error_tracking feature disabled' do + before do + stub_feature_flags(integrated_error_tracking: false) end - it 'updates enabled flag' do - expect(setting).to be_enabled + it_behaves_like 'returns project settings with false for integrated' + end - make_request + it 'updates enabled flag' do + expect(setting).to be_enabled - expect(json_response).to include('active' => false) - expect(setting.reload).not_to be_enabled - end + make_request - context 'active is invalid' do - let(:params) { { active: "randomstring" } } + expect(json_response).to include('active' => false) + expect(setting.reload).not_to be_enabled + end - it 'returns active is invalid if non boolean' do - make_request + context 'when active is invalid' do + let(:params) { { active: "randomstring" } } - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']) - .to eq('active is invalid') - end + it 'returns active is invalid if non boolean' do + make_request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']) + .to eq('active is invalid') end + end - context 'active is empty' do - let(:params) { { active: '' } } + context 'when active is empty' do + let(:params) { { active: '' } } - it 'returns 400' do - make_request + it 'returns 400' do + make_request - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']) - .to eq('active is empty') - end + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']) + .to eq('active is empty') end + end - context 'with integrated param' do - let(:params) { { active: true, integrated: true } } + context 'with integrated param' do + let(:params) { { active: true, integrated: true } } - context 'integrated_error_tracking feature enabled' do - before do - stub_feature_flags(integrated_error_tracking: true) - end + context 'when integrated_error_tracking feature enabled' do + before do + stub_feature_flags(integrated_error_tracking: true) + end - it 'updates the integrated flag' do - expect(setting.integrated).to be_falsey + it 'updates the integrated flag' do + expect(setting.integrated).to be_falsey - make_request + make_request - expect(json_response).to include('integrated' => true) - expect(setting.reload.integrated).to be_truthy - end + expect(json_response).to include('integrated' => true) + expect(setting.reload.integrated).to be_truthy end end end context 'without a project setting' do - let_it_be(:project) { create(:project) } + let(:project) { project_without_setting } before do project.add_maintainer(user) end - context 'patch settings' do - it_behaves_like 'returns no project settings' - end + it_behaves_like 'returns no project settings' end context "when ::Projects::Operations::UpdateService responds with an error" do @@ -210,38 +207,22 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end end - context 'when authenticated as reporter' do - before do - project.add_reporter(user) - end - - context 'patch request' do - it_behaves_like 'returns 403' - end - end - context 'when authenticated as developer' do before do project.add_developer(user) end - context 'patch request' do - it_behaves_like 'returns 403' - end + it_behaves_like 'returns 403' end context 'when authenticated as non-member' do - context 'patch request' do - it_behaves_like 'returns 404' - end + it_behaves_like 'returns 404' end context 'when unauthenticated' do let(:user) { nil } - context 'patch request' do - it_behaves_like 'returns 401' - end + it_behaves_like 'returns 401' end end @@ -255,43 +236,25 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra project.add_maintainer(user) end - context 'get settings' do - context 'integrated_error_tracking feature enabled' do - before do - stub_feature_flags(integrated_error_tracking: true) - end + it_behaves_like 'returns project settings' - it_behaves_like 'returns project settings' + context 'when integrated_error_tracking feature disabled' do + before do + stub_feature_flags(integrated_error_tracking: false) end - context 'integrated_error_tracking feature disabled' do - before do - stub_feature_flags(integrated_error_tracking: false) - end - - it_behaves_like 'returns project settings with false for integrated' - end + it_behaves_like 'returns project settings with false for integrated' end end context 'without a project setting' do - let(:project) { create(:project) } + let(:project) { project_without_setting } before do project.add_maintainer(user) end - context 'get settings' do - it_behaves_like 'returns no project settings' - end - end - - context 'when authenticated as reporter' do - before do - project.add_reporter(user) - end - - it_behaves_like 'returns 403' + it_behaves_like 'returns no project settings' end context 'when authenticated as developer' do @@ -329,9 +292,8 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end context "when integrated" do - let(:integrated) { true } - context "with existing setting" do + let(:project) { setting.project } let(:setting) { create(:project_error_tracking_setting, :integrated) } let(:active) { false } @@ -351,8 +313,8 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end context "without setting" do + let(:project) { project_without_setting } let(:active) { true } - let_it_be(:project) { create(:project) } it "creates a setting" do expect { make_request }.to change { ErrorTracking::ProjectErrorTrackingSetting.count } @@ -362,7 +324,7 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra expect(json_response).to eq( "active" => true, "api_url" => nil, - "integrated" => integrated, + "integrated" => true, "project_name" => nil, "sentry_external_url" => nil ) @@ -382,9 +344,7 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end end - context "integrated_error_tracking feature disabled" do - let(:integrated) { true } - + context "when integrated_error_tracking feature disabled" do before do stub_feature_flags(integrated_error_tracking: false) end @@ -405,14 +365,6 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra end end - context 'as reporter' do - before do - project.add_reporter(user) - end - - it_behaves_like 'returns 403' - end - context "as developer" do before do project.add_developer(user) @@ -428,7 +380,6 @@ RSpec.describe API::ErrorTracking::ProjectSettings, feature_category: :error_tra context "when unauthorized" do let(:user) { nil } - let(:integrated) { true } it_behaves_like 'returns 401' end diff --git a/spec/requests/api/freeze_periods_spec.rb b/spec/requests/api/freeze_periods_spec.rb index a53db516940..b582c2e0f4e 100644 --- a/spec/requests/api/freeze_periods_spec.rb +++ b/spec/requests/api/freeze_periods_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do +RSpec.describe API::FreezePeriods, :aggregate_failures, feature_category: :continuous_delivery do let_it_be(:project) { create(:project, :repository, :private) } let_it_be(:user) { create(:user) } let_it_be(:admin) { create(:admin) } @@ -12,12 +12,21 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do let(:last_freeze_period) { project.freeze_periods.last } describe 'GET /projects/:id/freeze_periods' do - let_it_be(:path) { "/projects/#{project.id}/freeze_periods" } + let(:path) { "/projects/#{project.id}/freeze_periods" } + + it_behaves_like 'GET request permissions for admin mode' do + let!(:freeze_period) { create(:ci_freeze_period, project: project, created_at: 2.days.ago) } + let(:failed_status_code) { :not_found } + end context 'when the user is the admin' do let!(:freeze_period) { create(:ci_freeze_period, project: project, created_at: 2.days.ago) } - it_behaves_like 'GET request permissions for admin mode when admin', :not_found + it 'returns 200 HTTP status' do + get api(path, admin, admin_mode: true) + + expect(response).to have_gitlab_http_status(:ok) + end end context 'when the user is the maintainer' do @@ -29,10 +38,15 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do let!(:freeze_period_1) { create(:ci_freeze_period, project: project, created_at: 2.days.ago) } let!(:freeze_period_2) { create(:ci_freeze_period, project: project, created_at: 1.day.ago) } - it 'returns freeze_periods ordered by created_at ascending', :aggregate_failures do + it 'returns 200 HTTP status' do get api(path, user) expect(response).to have_gitlab_http_status(:ok) + end + + it 'returns freeze_periods ordered by created_at ascending' do + get api(path, user) + expect(json_response.count).to eq(2) expect(freeze_period_ids).to eq([freeze_period_1.id, freeze_period_2.id]) end @@ -45,10 +59,15 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do end context 'when there are no freeze_periods' do - it 'returns 200 HTTP status with empty response', :aggregate_failures do + it 'returns 200 HTTP status' do get api(path, user) expect(response).to have_gitlab_http_status(:ok) + end + + it 'returns an empty response' do + get api(path, user) + expect(json_response).to be_empty end end @@ -63,37 +82,56 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do create(:ci_freeze_period, project: project) end - context 'and responds 403 Forbidden' do - it_behaves_like 'GET request permissions for admin mode when user', :forbidden do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + get api(path, user) + + expect(response).to have_gitlab_http_status(:forbidden) end end context 'when user is not a project member' do - it_behaves_like 'GET request permissions for admin mode when user', :not_found + it 'responds 404 Not Found' do + get api(path, user) + + expect(response).to have_gitlab_http_status(:not_found) + end context 'when project is public' do let(:project) { create(:project, :public) } - let(:path) { "/projects/#{project.id}/freeze_periods" } - it_behaves_like 'GET request permissions for admin mode when user', :forbidden + it 'responds 403 Forbidden' do + get api(path, user) + + expect(response).to have_gitlab_http_status(:forbidden) + end end end end describe 'GET /projects/:id/freeze_periods/:freeze_period_id' do - context 'when there is a freeze period' do + let(:path) { "/projects/#{project.id}/freeze_periods/#{freeze_period.id}" } + + it_behaves_like 'GET request permissions for admin mode' do let!(:freeze_period) do create(:ci_freeze_period, project: project) end - let(:path) { "/projects/#{project.id}/freeze_periods/#{freeze_period.id}" } + let(:failed_status_code) { :not_found } + end + + context 'when there is a freeze period' do + let!(:freeze_period) do + create(:ci_freeze_period, project: project) + end context 'when the user is the admin' do let!(:freeze_period) { create(:ci_freeze_period, project: project, created_at: 2.days.ago) } - it_behaves_like 'GET request permissions for admin mode when admin', :not_found + it 'responds 200 OK' do + get api(path, admin, admin_mode: true) + + expect(response).to have_gitlab_http_status(:ok) + end end context 'when the user is the maintainer' do @@ -101,10 +139,15 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_maintainer(user) end - it 'returns a freeze period', :aggregate_failures do + it 'responds 200 OK' do get api(path, user) expect(response).to have_gitlab_http_status(:ok) + end + + it 'returns a freeze period' do + get api(path, user) + expect(json_response).to include( 'id' => freeze_period.id, 'freeze_start' => freeze_period.freeze_start, @@ -124,26 +167,28 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_guest(user) end - context 'and responds 403 Forbidden' do - it_behaves_like 'GET request permissions for admin mode when user' do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + get api(path, user) + + expect(response).to have_gitlab_http_status(:forbidden) end context 'when project is public' do let(:project) { create(:project, :public) } - context 'and responds 403 Forbidden when freeze_period exists' do - it_behaves_like 'GET request permissions for admin mode when user' do - let(:current_user) { user } + context 'when freeze_period exists' do + it 'responds 403 Forbidden' do + get api(path, user) + + expect(response).to have_gitlab_http_status(:forbidden) end end - context 'and responds 403 Forbidden when freeze_period does not exist' do - let(:path) { "/projects/#{project.id}/freeze_periods/0" } + context 'when freeze_period does not exist' do + it 'responds 403 Forbidden' do + get api("/projects/#{project.id}/freeze_periods/0", user) - it_behaves_like 'GET request permissions for admin mode when user' do - let(:current_user) { user } + expect(response).to have_gitlab_http_status(:forbidden) end end end @@ -162,11 +207,21 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do let(:path) { "/projects/#{project.id}/freeze_periods" } - subject { post api(path, api_user), params: params } + it_behaves_like 'POST request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + + subject do + post api(path, api_user, admin_mode: api_user.admin?), params: params + end context 'when the user is the admin' do - it_behaves_like 'POST request permissions for admin mode when admin', :not_found do - let(:current_user) { admin } + let(:api_user) { admin } + + it 'accepts the request' do + subject + + expect(response).to have_gitlab_http_status(:created) end end @@ -182,7 +237,7 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do expect(response).to have_gitlab_http_status(:created) end - it 'creates a new freeze period', :aggregate_failures do + it 'creates a new freeze period' do expect do subject end.to change { Ci::FreezePeriod.count }.by(1) @@ -238,10 +293,10 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_developer(user) end - context 'and responds 403 Forbidden' do - it_behaves_like 'POST request permissions for admin mode when user' do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end @@ -250,22 +305,28 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_reporter(user) end - context 'and responds 403 Forbidden' do - it_behaves_like 'POST request permissions for admin mode when user' do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end context 'when user is not a project member' do - context 'and responds 403 Forbidden' do - it_behaves_like 'POST request permissions for admin mode when user', :not_found + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:not_found) end context 'when project is public' do let(:project) { create(:project, :public) } - it_behaves_like 'POST request permissions for admin mode when user', :forbidden + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) + end end end end @@ -273,12 +334,20 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do describe 'PUT /projects/:id/freeze_periods/:freeze_period_id' do let(:params) { { freeze_start: '0 22 * * 5', freeze_end: '5 4 * * sun' } } let!(:freeze_period) { create :ci_freeze_period, project: project } - let(:path) { "/projects/#{project.id}/freeze_periods/#{freeze_period.id}" } - subject { put api(path, api_user), params: params } + subject do + put api("/projects/#{project.id}/freeze_periods/#{freeze_period.id}", api_user, admin_mode: api_user.admin?), + params: params + end context 'when user is the admin' do - it_behaves_like 'PUT request permissions for admin mode when admin', :not_found + let(:api_user) { admin } + + it 'accepts the request' do + subject + + expect(response).to have_gitlab_http_status(:ok) + end end context 'when user is the maintainer' do @@ -326,23 +395,27 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_reporter(user) end - context 'and responds 403 Forbidden' do - it_behaves_like 'PUT request permissions for admin mode when user' do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end context 'when user is not a project member' do - context 'and responds 404 Not Found' do - it_behaves_like 'PUT request permissions for admin mode when user', :not_found + it 'responds 404 Not Found' do + subject + + expect(response).to have_gitlab_http_status(:not_found) end context 'when project is public' do let(:project) { create(:project, :public) } - context 'and responds 403 Forbidden' do - it_behaves_like 'PUT request permissions for admin mode when user' + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end end @@ -351,12 +424,19 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do describe 'DELETE /projects/:id/freeze_periods/:freeze_period_id' do let!(:freeze_period) { create :ci_freeze_period, project: project } let(:freeze_period_id) { freeze_period.id } - let(:path) { "/projects/#{project.id}/freeze_periods/#{freeze_period_id}" } - subject { delete api(path, api_user) } + subject do + delete api("/projects/#{project.id}/freeze_periods/#{freeze_period_id}", api_user, admin_mode: api_user.admin?) + end context 'when user is the admin' do - it_behaves_like 'DELETE request permissions for admin mode when admin', failed_status_code: :not_found + let(:api_user) { admin } + + it 'accepts the request' do + subject + + expect(response).to have_gitlab_http_status(:no_content) + end end context 'when user is the maintainer' do @@ -392,23 +472,27 @@ RSpec.describe API::FreezePeriods, feature_category: :continuous_delivery do project.add_reporter(user) end - context 'and responds 403 Forbidden' do - it_behaves_like 'DELETE request permissions for admin mode when user' do - let(:current_user) { user } - end + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end context 'when user is not a project member' do - context 'and responds 404 Not Found' do - it_behaves_like 'DELETE request permissions for admin mode when user', :not_found + it 'responds 404 Not Found' do + subject + + expect(response).to have_gitlab_http_status(:not_found) end context 'when project is public' do let(:project) { create(:project, :public) } - context 'and responds 403 Forbidden' do - it_behaves_like 'DELETE request permissions for admin mode when user' + it 'responds 403 Forbidden' do + subject + + expect(response).to have_gitlab_http_status(:forbidden) end end end diff --git a/spec/requests/api/graphql/ci/config_variables_spec.rb b/spec/requests/api/graphql/ci/config_variables_spec.rb index d77e66d2239..4bad5dec684 100644 --- a/spec/requests/api/graphql/ci/config_variables_spec.rb +++ b/spec/requests/api/graphql/ci/config_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)', feature_category: :pipeline_composition do +RSpec.describe 'Query.project(fullPath).ciConfigVariables(ref)', feature_category: :secrets_management do include GraphqlHelpers include ReactiveCachingHelpers @@ -20,7 +20,7 @@ RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)', feature_categor %( query { project(fullPath: "#{project.full_path}") { - ciConfigVariables(sha: "#{ref}") { + ciConfigVariables(ref: "#{ref}") { key value valueOptions diff --git a/spec/requests/api/graphql/ci/group_variables_spec.rb b/spec/requests/api/graphql/ci/group_variables_spec.rb index 042f93e9779..3b8eeefb707 100644 --- a/spec/requests/api/graphql/ci/group_variables_spec.rb +++ b/spec/requests/api/graphql/ci/group_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Query.group(fullPath).ciVariables', feature_category: :pipeline_composition do +RSpec.describe 'Query.group(fullPath).ciVariables', feature_category: :secrets_management do include GraphqlHelpers let_it_be(:group) { create(:group) } diff --git a/spec/requests/api/graphql/ci/instance_variables_spec.rb b/spec/requests/api/graphql/ci/instance_variables_spec.rb index 286a7af3c01..a612b4c91b6 100644 --- a/spec/requests/api/graphql/ci/instance_variables_spec.rb +++ b/spec/requests/api/graphql/ci/instance_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Query.ciVariables', feature_category: :pipeline_composition do +RSpec.describe 'Query.ciVariables', feature_category: :secrets_management do include GraphqlHelpers let(:query) do diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb index 3f556a75869..ea44be2ec7f 100644 --- a/spec/requests/api/graphql/ci/jobs_spec.rb +++ b/spec/requests/api/graphql/ci/jobs_spec.rb @@ -1,6 +1,84 @@ # frozen_string_literal: true require 'spec_helper' +RSpec.describe 'Query.jobs', feature_category: :continuous_integration do + include GraphqlHelpers + + let_it_be(:admin) { create(:admin) } + let_it_be(:project) { create(:project, :repository, :public) } + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:build) do + create(:ci_build, pipeline: pipeline, name: 'my test job', ref: 'HEAD', tag_list: %w[tag1 tag2]) + end + + let(:query) do + %( + query { + jobs { + nodes { + id + #{fields.join(' ')} + } + } + } + ) + end + + let(:jobs_graphql_data) { graphql_data_at(:jobs, :nodes) } + + let(:fields) do + %w[commitPath refPath webPath browseArtifactsPath playPath tags] + end + + it 'returns the paths in each job of a pipeline' do + post_graphql(query, current_user: admin) + + expect(jobs_graphql_data).to contain_exactly( + a_graphql_entity_for( + build, + commit_path: "/#{project.full_path}/-/commit/#{build.sha}", + ref_path: "/#{project.full_path}/-/commits/HEAD", + web_path: "/#{project.full_path}/-/jobs/#{build.id}", + browse_artifacts_path: "/#{project.full_path}/-/jobs/#{build.id}/artifacts/browse", + play_path: "/#{project.full_path}/-/jobs/#{build.id}/play", + tags: build.tag_list + ) + ) + end + + context 'when requesting individual fields' do + using RSpec::Parameterized::TableSyntax + + let_it_be(:admin2) { create(:admin) } + let_it_be(:project2) { create(:project) } + let_it_be(:pipeline2) { create(:ci_pipeline, project: project2) } + + where(:field) { fields } + + with_them do + let(:fields) do + [field] + end + + it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do + # warm-up cache and so on: + args = { current_user: admin } + args2 = { current_user: admin2 } + post_graphql(query, **args2) + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + post_graphql(query, **args) + end + + create(:ci_build, pipeline: pipeline2, name: 'my test job2', ref: 'HEAD', tag_list: %w[tag3]) + post_graphql(query, **args) + + expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control) + end + end + end +end + RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integration do include GraphqlHelpers @@ -260,12 +338,12 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati end end - describe '.jobs.runnerMachine' do + describe '.jobs.runnerManager' do let_it_be(:admin) { create(:admin) } - let_it_be(:runner_machine) { create(:ci_runner_machine, created_at: Time.current, contacted_at: Time.current) } + let_it_be(:runner_manager) { create(:ci_runner_machine, created_at: Time.current, contacted_at: Time.current) } let_it_be(:pipeline) { create(:ci_pipeline, project: project) } let_it_be(:build) do - create(:ci_build, pipeline: pipeline, name: 'my test job', runner_machine: runner_machine) + create(:ci_build, pipeline: pipeline, name: 'my test job', runner_manager: runner_manager) end let(:query) do @@ -277,8 +355,8 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati nodes { id name - runnerMachine { - #{all_graphql_fields_for('CiRunnerMachine', excluded: [:runner], max_depth: 1)} + runnerManager { + #{all_graphql_fields_for('CiRunnerManager', excluded: [:runner], max_depth: 1)} } } } @@ -290,19 +368,19 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati let(:jobs_graphql_data) { graphql_data_at(:project, :pipeline, :jobs, :nodes) } - it 'returns the runner machine in each job of a pipeline' do + it 'returns the runner manager in each job of a pipeline' do post_graphql(query, current_user: admin) expect(jobs_graphql_data).to contain_exactly( a_graphql_entity_for( build, name: build.name, - runner_machine: a_graphql_entity_for( - runner_machine, - system_id: runner_machine.system_xid, - created_at: runner_machine.created_at.iso8601, - contacted_at: runner_machine.contacted_at.iso8601, - status: runner_machine.status.to_s.upcase + runner_manager: a_graphql_entity_for( + runner_manager, + system_id: runner_manager.system_xid, + created_at: runner_manager.created_at.iso8601, + contacted_at: runner_manager.contacted_at.iso8601, + status: runner_manager.status.to_s.upcase ) ) ) @@ -315,8 +393,8 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati post_graphql(query, current_user: admin) end - runner_machine2 = create(:ci_runner_machine) - create(:ci_build, pipeline: pipeline, name: 'my test job2', runner_machine: runner_machine2) + runner_manager2 = create(:ci_runner_machine) + create(:ci_build, pipeline: pipeline, name: 'my test job2', runner_manager: runner_manager2) expect { post_graphql(query, current_user: admin2) }.not_to exceed_all_query_limit(control) end diff --git a/spec/requests/api/graphql/ci/manual_variables_spec.rb b/spec/requests/api/graphql/ci/manual_variables_spec.rb index 98d91e9ded0..47dccc0deb6 100644 --- a/spec/requests/api/graphql/ci/manual_variables_spec.rb +++ b/spec/requests/api/graphql/ci/manual_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables', feature_category: :pipeline_composition do +RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables', feature_category: :secrets_management do include GraphqlHelpers let_it_be(:project) { create(:project) } diff --git a/spec/requests/api/graphql/ci/project_variables_spec.rb b/spec/requests/api/graphql/ci/project_variables_spec.rb index 947991a2e62..62fc2623a0f 100644 --- a/spec/requests/api/graphql/ci/project_variables_spec.rb +++ b/spec/requests/api/graphql/ci/project_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Query.project(fullPath).ciVariables', feature_category: :pipeline_composition do +RSpec.describe 'Query.project(fullPath).ciVariables', feature_category: :secrets_management do include GraphqlHelpers let_it_be(:project) { create(:project) } diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb index da71ee675b7..ed180522c98 100644 --- a/spec/requests/api/graphql/ci/runner_spec.rb +++ b/spec/requests/api/graphql/ci/runner_spec.rb @@ -10,7 +10,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do let_it_be(:group) { create(:group) } let_it_be(:active_instance_runner) do - create(:ci_runner, :instance, :with_runner_machine, + create(:ci_runner, :instance, :with_runner_manager, description: 'Runner 1', creator: user, contacted_at: 2.hours.ago, @@ -59,7 +59,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do let_it_be(:project1) { create(:project) } let_it_be(:active_project_runner) do - create(:ci_runner, :project, :with_runner_machine, projects: [project1]) + create(:ci_runner, :project, :with_runner_manager, projects: [project1]) end shared_examples 'runner details fetch' do @@ -91,7 +91,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do locked: false, active: runner.active, paused: !runner.active, - status: runner.status('14.5').to_s.upcase, + status: runner.status.to_s.upcase, job_execution_status: runner.builds.running.any? ? 'RUNNING' : 'IDLE', maximum_timeout: runner.maximum_timeout, access_level: runner.access_level.to_s.upcase, @@ -121,8 +121,8 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do 'deleteRunner' => true, 'assignRunner' => true }, - machines: a_hash_including( - "count" => runner.runner_machines.count, + managers: a_hash_including( + "count" => runner.runner_managers.count, "nodes" => an_instance_of(Array), "pageInfo" => anything ) @@ -130,7 +130,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do expect(runner_data['tagList']).to match_array runner.tag_list end - it 'does not execute more queries per runner', :aggregate_failures do + it 'does not execute more queries per runner', :use_sql_query_cache, :aggregate_failures do # warm-up license cache and so on: personal_access_token = create(:personal_access_token, user: user) args = { current_user: user, token: { personal_access_token: personal_access_token } } @@ -139,12 +139,12 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do personal_access_token = create(:personal_access_token, user: another_admin) args = { current_user: another_admin, token: { personal_access_token: personal_access_token } } - control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) } + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(query, **args) } create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: another_admin) create(:ci_runner, :project, version: '14.0.1', projects: [project1], tag_list: %w[tag3 tag8], creator: another_admin) - expect { post_graphql(query, **args) }.not_to exceed_query_limit(control) + expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control) end end @@ -378,6 +378,86 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do end end + describe 'ephemeralRegisterUrl' do + let(:query) do + %( + query { + runner(id: "#{runner.to_global_id}") { + ephemeralRegisterUrl + } + } + ) + end + + shared_examples 'has register url' do + it 'retrieves register url' do + post_graphql(query, current_user: user) + expect(graphql_data_at(:runner, :ephemeral_register_url)).to eq(expected_url) + end + end + + shared_examples 'has no register url' do + it 'retrieves no register url' do + post_graphql(query, current_user: user) + expect(graphql_data_at(:runner, :ephemeral_register_url)).to eq(nil) + end + end + + context 'with an instance runner' do + context 'with registration available' do + let_it_be(:runner) { create(:ci_runner, registration_type: :authenticated_user) } + + it_behaves_like 'has register url' do + let(:expected_url) { "http://localhost/admin/runners/#{runner.id}/register" } + end + end + + context 'with no registration available' do + let_it_be(:runner) { create(:ci_runner) } + + it_behaves_like 'has no register url' + end + end + + context 'with a group runner' do + context 'with registration available' do + let_it_be(:runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) } + + it_behaves_like 'has register url' do + let(:expected_url) { "http://localhost/groups/#{group.path}/-/runners/#{runner.id}/register" } + end + end + + context 'with no group' do + let(:destroyed_group) { create(:group) } + let(:runner) { create(:ci_runner, :group, groups: [destroyed_group], registration_type: :authenticated_user) } + + before do + destroyed_group.destroy! + end + + it_behaves_like 'has no register url' + end + + context 'with no registration available' do + let_it_be(:runner) { create(:ci_runner, :group, groups: [group]) } + + it_behaves_like 'has no register url' + end + + context 'with no access' do + let_it_be(:user) { create(:user) } + let_it_be(:runner) { create(:ci_runner, :group, groups: [group], registration_type: :authenticated_user) } + + before do + group.add_maintainer(user) + end + + it_behaves_like 'has no register url' + end + end + end + describe 'for runner with status' do let_it_be(:stale_runner) { create(:ci_runner, description: 'Stale runner 1', created_at: 3.months.ago) } let_it_be(:never_contacted_instance_runner) { create(:ci_runner, description: 'Missing runner 1', created_at: 1.month.ago, contacted_at: nil) } @@ -412,8 +492,8 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do paused_runner_data = graphql_data_at(:paused_runner) expect(paused_runner_data).to match a_hash_including( - 'status' => 'PAUSED', - 'legacyStatusWithExplicitVersion' => 'PAUSED', + 'status' => 'OFFLINE', + 'legacyStatusWithExplicitVersion' => 'OFFLINE', 'newStatus' => 'OFFLINE' ) @@ -424,6 +504,37 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do 'newStatus' => 'NEVER_CONTACTED' ) end + + context 'when disable_runner_graphql_legacy_mode is enabled' do + before do + stub_feature_flags(disable_runner_graphql_legacy_mode: false) + end + + it 'retrieves status fields with expected values' do + post_graphql(query, current_user: user) + + stale_runner_data = graphql_data_at(:stale_runner) + expect(stale_runner_data).to match a_hash_including( + 'status' => 'STALE', + 'legacyStatusWithExplicitVersion' => 'STALE', + 'newStatus' => 'STALE' + ) + + paused_runner_data = graphql_data_at(:paused_runner) + expect(paused_runner_data).to match a_hash_including( + 'status' => 'PAUSED', + 'legacyStatusWithExplicitVersion' => 'PAUSED', + 'newStatus' => 'OFFLINE' + ) + + never_contacted_instance_runner_data = graphql_data_at(:never_contacted_instance_runner) + expect(never_contacted_instance_runner_data).to match a_hash_including( + 'status' => 'NEVER_CONTACTED', + 'legacyStatusWithExplicitVersion' => 'NEVER_CONTACTED', + 'newStatus' => 'NEVER_CONTACTED' + ) + end + end end describe 'for multiple runners' do @@ -625,12 +736,12 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do context 'with runner created in last hour' do let(:created_at) { (Ci::Runner::REGISTRATION_AVAILABILITY_TIME - 1.second).ago } - context 'with no runner machine registed yet' do + context 'with no runner manager registered yet' do it_behaves_like 'an ephemeral_authentication_token' end - context 'with first runner machine already registed' do - let!(:runner_machine) { create(:ci_runner_machine, runner: runner) } + context 'with first runner manager already registered' do + let!(:runner_manager) { create(:ci_runner_machine, runner: runner) } it_behaves_like 'a protected ephemeral_authentication_token' end @@ -777,20 +888,20 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do end describe 'Query limits with jobs' do - let!(:group1) { create(:group) } - let!(:group2) { create(:group) } - let!(:project1) { create(:project, :repository, group: group1) } - let!(:project2) { create(:project, :repository, group: group1) } - let!(:project3) { create(:project, :repository, group: group2) } + let_it_be(:group1) { create(:group) } + let_it_be(:group2) { create(:group) } + let_it_be(:project1) { create(:project, :repository, group: group1) } + let_it_be(:project2) { create(:project, :repository, group: group1) } + let_it_be(:project3) { create(:project, :repository, group: group2) } - let!(:merge_request1) { create(:merge_request, source_project: project1) } - let!(:merge_request2) { create(:merge_request, source_project: project3) } + let_it_be(:merge_request1) { create(:merge_request, source_project: project1) } + let_it_be(:merge_request2) { create(:merge_request, source_project: project3) } let(:project_runner2) { create(:ci_runner, :project, projects: [project1, project2]) } let!(:build1) { create(:ci_build, :success, name: 'Build One', runner: project_runner2, pipeline: pipeline1) } - let!(:pipeline1) do + let_it_be(:pipeline1) do create(:ci_pipeline, project: project1, source: :merge_request_event, merge_request: merge_request1, ref: 'main', - target_sha: 'xxx') + target_sha: 'xxx') end let(:query) do @@ -801,24 +912,7 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do jobs { nodes { id - detailedStatus { - id - detailsPath - group - icon - text - } - project { - id - name - webUrl - } - shortSha - commitPath - finishedAt - duration - queuedDuration - tags + #{field} } } } @@ -826,31 +920,58 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do QUERY end - it 'does not execute more queries per job', :aggregate_failures do - # warm-up license cache and so on: - personal_access_token = create(:personal_access_token, user: user) - args = { current_user: user, token: { personal_access_token: personal_access_token } } - post_graphql(query, **args) - - control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true) { post_graphql(query, **args) } - - # Add a new build to project_runner2 - project_runner2.runner_projects << build(:ci_runner_project, runner: project_runner2, project: project3) - pipeline2 = create(:ci_pipeline, project: project3, source: :merge_request_event, merge_request: merge_request2, - ref: 'main', target_sha: 'xxx') - build2 = create(:ci_build, :success, name: 'Build Two', runner: project_runner2, pipeline: pipeline2) + context 'when requesting individual fields' do + using RSpec::Parameterized::TableSyntax - args[:current_user] = create(:user, :admin) # do not reuse same user - expect { post_graphql(query, **args) }.not_to exceed_all_query_limit(control) + where(:field) do + [ + 'detailedStatus { id detailsPath group icon text }', + 'project { id name webUrl }' + ] + %w[ + shortSha + browseArtifactsPath + commitPath + playPath + refPath + webPath + finishedAt + duration + queuedDuration + tags + ] + end - expect(graphql_data.count).to eq 1 - expect(graphql_data).to match( - a_hash_including( - 'runner' => a_graphql_entity_for( - project_runner2, - jobs: { 'nodes' => containing_exactly(a_graphql_entity_for(build1), a_graphql_entity_for(build2)) } - ) - )) + with_them do + it 'does not execute more queries per job', :use_sql_query_cache, :aggregate_failures do + admin2 = create(:user, :admin) # do not reuse same user + + # warm-up license cache and so on: + personal_access_token = create(:personal_access_token, user: user) + personal_access_token2 = create(:personal_access_token, user: admin2) + args = { current_user: user, token: { personal_access_token: personal_access_token } } + args2 = { current_user: admin2, token: { personal_access_token: personal_access_token2 } } + post_graphql(query, **args2) + + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(query, **args) } + + # Add a new build to project_runner2 + project_runner2.runner_projects << build(:ci_runner_project, runner: project_runner2, project: project3) + pipeline2 = create(:ci_pipeline, project: project3, source: :merge_request_event, merge_request: merge_request2, + ref: 'main', target_sha: 'xxx') + build2 = create(:ci_build, :success, name: 'Build Two', runner: project_runner2, pipeline: pipeline2) + + expect { post_graphql(query, **args2) }.not_to exceed_all_query_limit(control) + + expect(graphql_data.count).to eq 1 + expect(graphql_data).to match( + a_hash_including( + 'runner' => a_graphql_entity_for( + project_runner2, + jobs: { 'nodes' => containing_exactly(a_graphql_entity_for(build1), a_graphql_entity_for(build2)) } + ) + )) + end + end end end diff --git a/spec/requests/api/graphql/group/data_transfer_spec.rb b/spec/requests/api/graphql/group/data_transfer_spec.rb new file mode 100644 index 00000000000..b7c038afa54 --- /dev/null +++ b/spec/requests/api/graphql/group/data_transfer_spec.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'group data transfers', feature_category: :source_code_management do + include GraphqlHelpers + + let_it_be(:current_user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project_1) { create(:project, group: group) } + let_it_be(:project_2) { create(:project, group: group) } + + let(:fields) do + <<~QUERY + #{all_graphql_fields_for('GroupDataTransfer'.classify)} + QUERY + end + + let(:query) do + graphql_query_for( + 'group', + { fullPath: group.full_path }, + query_graphql_field('DataTransfer', params, fields) + ) + end + + let(:from) { Date.new(2022, 1, 1) } + let(:to) { Date.new(2023, 1, 1) } + let(:params) { { from: from, to: to } } + let(:egress_data) do + graphql_data.dig('group', 'dataTransfer', 'egressNodes', 'nodes') + end + + before do + create(:project_data_transfer, project: project_1, date: '2022-01-01', repository_egress: 1) + create(:project_data_transfer, project: project_1, date: '2022-02-01', repository_egress: 2) + create(:project_data_transfer, project: project_2, date: '2022-02-01', repository_egress: 4) + end + + subject { post_graphql(query, current_user: current_user) } + + context 'with anonymous access' do + let_it_be(:current_user) { nil } + + before do + subject + end + + it_behaves_like 'a working graphql query' + + it 'returns no data' do + expect(graphql_data_at(:group, :data_transfer)).to be_nil + expect(graphql_errors).to be_nil + end + end + + context 'with authorized user but without enough permissions' do + before do + group.add_developer(current_user) + subject + end + + it_behaves_like 'a working graphql query' + + it 'returns empty results' do + expect(graphql_data_at(:group, :data_transfer)).to be_nil + expect(graphql_errors).to be_nil + end + end + + context 'when user has enough permissions' do + before do + group.add_owner(current_user) + end + + context 'when data_transfer_monitoring_mock_data is NOT enabled' do + before do + stub_feature_flags(data_transfer_monitoring_mock_data: false) + subject + end + + it 'returns real results' do + expect(response).to have_gitlab_http_status(:ok) + + expect(egress_data.count).to eq(2) + + expect(egress_data.first.keys).to match_array( + %w[date totalEgress repositoryEgress artifactsEgress packagesEgress registryEgress] + ) + + expect(egress_data.pluck('repositoryEgress')).to match_array(%w[1 6]) + end + + it_behaves_like 'a working graphql query' + end + + context 'when data_transfer_monitoring_mock_data is enabled' do + before do + stub_feature_flags(data_transfer_monitoring_mock_data: true) + subject + end + + it 'returns mock results' do + expect(response).to have_gitlab_http_status(:ok) + + expect(egress_data.count).to eq(12) + expect(egress_data.first.keys).to match_array( + %w[date totalEgress repositoryEgress artifactsEgress packagesEgress registryEgress] + ) + end + + it_behaves_like 'a working graphql query' + end + end +end diff --git a/spec/requests/api/graphql/group/labels_query_spec.rb b/spec/requests/api/graphql/group/labels_query_spec.rb deleted file mode 100644 index 28886f8d80b..00000000000 --- a/spec/requests/api/graphql/group/labels_query_spec.rb +++ /dev/null @@ -1,19 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'getting group label information', feature_category: :team_planning do - include GraphqlHelpers - - let_it_be(:group) { create(:group, :public) } - let_it_be(:label_factory) { :group_label } - let_it_be(:label_attrs) { { group: group } } - - it_behaves_like 'querying a GraphQL type with labels' do - let(:path_prefix) { ['group'] } - - def make_query(fields) - graphql_query_for('group', { full_path: group.full_path }, fields) - end - end -end diff --git a/spec/requests/api/graphql/jobs_query_spec.rb b/spec/requests/api/graphql/jobs_query_spec.rb index 0aea8e4c253..179c90fc564 100644 --- a/spec/requests/api/graphql/jobs_query_spec.rb +++ b/spec/requests/api/graphql/jobs_query_spec.rb @@ -14,7 +14,7 @@ RSpec.describe 'getting job information', feature_category: :continuous_integrat context 'when user is admin' do let_it_be(:current_user) { create(:admin) } - it 'has full access to all jobs', :aggregate_failure do + it 'has full access to all jobs', :aggregate_failures do post_graphql(query, current_user: current_user) expect(graphql_data_at(:jobs, :count)).to eq(1) @@ -25,14 +25,14 @@ RSpec.describe 'getting job information', feature_category: :continuous_integrat let_it_be(:pending_job) { create(:ci_build, :pending) } let_it_be(:failed_job) { create(:ci_build, :failed) } - it 'gets pending jobs', :aggregate_failure do + it 'gets pending jobs', :aggregate_failures do post_graphql(graphql_query_for(:jobs, { statuses: :PENDING }), current_user: current_user) expect(graphql_data_at(:jobs, :count)).to eq(1) expect(graphql_data_at(:jobs, :nodes)).to contain_exactly(a_graphql_entity_for(pending_job)) end - it 'gets pending and failed jobs', :aggregate_failure do + it 'gets pending and failed jobs', :aggregate_failures do post_graphql(graphql_query_for(:jobs, { statuses: [:PENDING, :FAILED] }), current_user: current_user) expect(graphql_data_at(:jobs, :count)).to eq(2) @@ -45,7 +45,7 @@ RSpec.describe 'getting job information', feature_category: :continuous_integrat context 'if the user is not an admin' do let_it_be(:current_user) { create(:user) } - it 'has no access to the jobs', :aggregate_failure do + it 'has no access to the jobs', :aggregate_failures do post_graphql(query, current_user: current_user) expect(graphql_data_at(:jobs, :count)).to eq(0) diff --git a/spec/requests/api/graphql/mutations/achievements/delete_spec.rb b/spec/requests/api/graphql/mutations/achievements/delete_spec.rb new file mode 100644 index 00000000000..276da4f46a8 --- /dev/null +++ b/spec/requests/api/graphql/mutations/achievements/delete_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::Achievements::Delete, feature_category: :user_profile do + include GraphqlHelpers + + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:group) { create(:group) } + + let!(:achievement) { create(:achievement, namespace: group) } + let(:mutation) { graphql_mutation(:achievements_delete, params) } + let(:achievement_id) { achievement&.to_global_id } + let(:params) { { achievement_id: achievement_id } } + + subject { post_graphql_mutation(mutation, current_user: current_user) } + + def mutation_response + graphql_mutation_response(:achievements_delete) + end + + before_all do + group.add_developer(developer) + group.add_maintainer(maintainer) + end + + context 'when the user does not have permission' do + let(:current_user) { developer } + + it_behaves_like 'a mutation that returns a top-level access error' + + it 'does not revoke any achievements' do + expect { subject }.not_to change { Achievements::Achievement.count } + end + end + + context 'when the user has permission' do + let(:current_user) { maintainer } + + context 'when the params are invalid' do + let(:achievement) { nil } + + it 'returns the validation error' do + subject + + expect(graphql_errors.to_s).to include('invalid value for achievementId (Expected value to not be null)') + end + end + + context 'when the achievement_id is invalid' do + let(:achievement_id) { "gid://gitlab/Achievements::Achievement/#{non_existing_record_id}" } + + it 'returns the validation error' do + subject + + expect(graphql_errors.to_s) + .to include("The resource that you are attempting to access does not exist or you don't have permission") + end + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(achievements: false) + end + + it 'returns the relevant error' do + subject + + expect(graphql_errors.to_s) + .to include("The resource that you are attempting to access does not exist or you don't have permission") + end + end + + it 'deletes the achievement' do + expect { subject }.to change { Achievements::Achievement.count }.by(-1) + end + end +end diff --git a/spec/requests/api/graphql/mutations/achievements/update_spec.rb b/spec/requests/api/graphql/mutations/achievements/update_spec.rb new file mode 100644 index 00000000000..b2bb01b564c --- /dev/null +++ b/spec/requests/api/graphql/mutations/achievements/update_spec.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Mutations::Achievements::Update, feature_category: :user_profile do + include GraphqlHelpers + include WorkhorseHelpers + + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:group) { create(:group) } + + let!(:achievement) { create(:achievement, namespace: group) } + let(:mutation) { graphql_mutation(:achievements_update, params) } + let(:achievement_id) { achievement&.to_global_id } + let(:params) { { achievement_id: achievement_id, name: 'GitLab', avatar: avatar } } + let(:avatar) { nil } + + subject { post_graphql_mutation_with_uploads(mutation, current_user: current_user) } + + def mutation_response + graphql_mutation_response(:achievements_update) + end + + before_all do + group.add_developer(developer) + group.add_maintainer(maintainer) + end + + context 'when the user does not have permission' do + let(:current_user) { developer } + + it_behaves_like 'a mutation that returns a top-level access error' + + it 'does not update the achievement' do + expect { subject }.not_to change { achievement.reload.name } + end + end + + context 'when the user has permission' do + let(:current_user) { maintainer } + + context 'when the params are invalid' do + let(:achievement) { nil } + + it 'returns the validation error' do + subject + + expect(graphql_errors.to_s).to include('invalid value for achievementId (Expected value to not be null)') + end + end + + context 'when the achievement_id is invalid' do + let(:achievement_id) { "gid://gitlab/Achievements::Achievement/#{non_existing_record_id}" } + + it 'returns the validation error' do + subject + + expect(graphql_errors.to_s) + .to include("The resource that you are attempting to access does not exist or you don't have permission") + end + end + + context 'when the feature flag is disabled' do + before do + stub_feature_flags(achievements: false) + end + + it 'returns the relevant permission error' do + subject + + expect(graphql_errors.to_s) + .to include("The resource that you are attempting to access does not exist or you don't have permission") + end + end + + context 'with a new avatar' do + let(:avatar) { fixture_file_upload("spec/fixtures/dk.png") } + + it 'updates the achievement' do + subject + + achievement.reload + + expect(achievement.name).to eq('GitLab') + expect(achievement.avatar.file).not_to be_nil + end + end + end +end diff --git a/spec/requests/api/graphql/mutations/ci/job/play_spec.rb b/spec/requests/api/graphql/mutations/ci/job/play_spec.rb index 8100274ed97..0c700248f85 100644 --- a/spec/requests/api/graphql/mutations/ci/job/play_spec.rb +++ b/spec/requests/api/graphql/mutations/ci/job/play_spec.rb @@ -63,7 +63,7 @@ RSpec.describe 'JobPlay', feature_category: :continuous_integration do } end - it 'provides those variables to the job', :aggregated_errors do + it 'provides those variables to the job', :aggregate_failures do expect_next_instance_of(Ci::PlayBuildService) do |instance| expect(instance).to receive(:execute).with(an_instance_of(Ci::Build), variables[:variables]).and_call_original end diff --git a/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb b/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb index f39f6f84c99..f592a2a3fe3 100644 --- a/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb +++ b/spec/requests/api/graphql/mutations/ci/runner/create_spec.rb @@ -6,8 +6,12 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do include GraphqlHelpers let_it_be(:user) { create(:user) } + let_it_be(:group_owner) { create(:user) } let_it_be(:admin) { create(:admin) } + let_it_be(:group) { create(:group) } + let_it_be(:other_group) { create(:group) } + let(:mutation_params) do { description: 'create description', @@ -17,7 +21,7 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do paused: true, run_untagged: false, tag_list: %w[tag1 tag2] - } + }.deep_merge(mutation_scope_params) end let(:mutation) do @@ -49,72 +53,263 @@ RSpec.describe 'RunnerCreate', feature_category: :runner_fleet do let(:mutation_response) { graphql_mutation_response(:runner_create) } - context 'when user does not have permissions' do + before do + group.add_owner(group_owner) + end + + shared_context 'when model is invalid returns error' do + let(:mutation_params) do + { + description: '', + maintenanceNote: '', + paused: true, + accessLevel: 'NOT_PROTECTED', + runUntagged: false, + tagList: [], + maximumTimeout: 1 + }.deep_merge(mutation_scope_params) + end + + it do + post_graphql_mutation(mutation, current_user: current_user) + + expect(response).to have_gitlab_http_status(:success) + + expect(mutation_response['errors']).to contain_exactly( + 'Tags list can not be empty when runner is not allowed to pick untagged jobs', + 'Maximum timeout needs to be at least 10 minutes' + ) + end + end + + shared_context 'when user does not have permissions' do let(:current_user) { user } it 'returns an error' do post_graphql_mutation(mutation, current_user: current_user) - expect(mutation_response['errors']).to contain_exactly "Insufficient permissions" + expect_graphql_errors_to_include( + 'The resource that you are attempting to access does not exist ' \ + "or you don't have permission to perform this action" + ) end end - context 'when user has permissions', :enable_admin_mode do - let(:current_user) { admin } + shared_context 'when :create_runner_workflow_for_namespace feature flag is disabled' do + before do + stub_feature_flags(create_runner_workflow_for_namespace: [other_group]) + end - context 'when :create_runner_workflow_for_admin feature flag is disabled' do - before do - stub_feature_flags(create_runner_workflow_for_admin: false) + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include('`create_runner_workflow_for_namespace` feature flag is disabled.') + end + end + + shared_context 'when runner is created successfully' do + before do + stub_feature_flags(create_runner_workflow_for_namespace: [group]) + end + + it do + expected_args = { user: current_user, params: anything } + expect_next_instance_of(::Ci::Runners::CreateRunnerService, expected_args) do |service| + expect(service).to receive(:execute).and_call_original end - it 'returns an error' do - post_graphql_mutation(mutation, current_user: current_user) + post_graphql_mutation(mutation, current_user: current_user) + + expect(response).to have_gitlab_http_status(:success) - expect(graphql_errors).not_to be_empty - expect(graphql_errors[0]['message']) - .to eq("`create_runner_workflow_for_admin` feature flag is disabled.") + expect(mutation_response['errors']).to eq([]) + expect(mutation_response['runner']).not_to be_nil + mutation_params.except(:group_id, :project_id).each_key do |key| + expect(mutation_response['runner'][key.to_s.camelize(:lower)]).to eq mutation_params[key] end + + expect(mutation_response['runner']['ephemeralAuthenticationToken']) + .to start_with Ci::Runner::CREATED_RUNNER_TOKEN_PREFIX + end + end + + context 'when runnerType is INSTANCE_TYPE' do + let(:mutation_scope_params) do + { runner_type: 'INSTANCE_TYPE' } end - context 'when success' do - it do - post_graphql_mutation(mutation, current_user: current_user) + it_behaves_like 'when user does not have permissions' - expect(response).to have_gitlab_http_status(:success) + context 'when user has permissions', :enable_admin_mode do + let(:current_user) { admin } - mutation_params.each_key do |key| - expect(mutation_response['runner'][key.to_s.camelize(:lower)]).to eq mutation_params[key] + context 'when :create_runner_workflow_for_admin feature flag is disabled' do + before do + stub_feature_flags(create_runner_workflow_for_admin: false) end - expect(mutation_response['runner']['ephemeralAuthenticationToken']).to start_with 'glrt' + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) - expect(mutation_response['errors']).to eq([]) + expect_graphql_errors_to_include('`create_runner_workflow_for_admin` feature flag is disabled.') + end end + + it_behaves_like 'when runner is created successfully' + it_behaves_like 'when model is invalid returns error' end + end - context 'when failure' do - let(:mutation_params) do - { - description: "", - maintenanceNote: "", - paused: true, - accessLevel: "NOT_PROTECTED", - runUntagged: false, - tagList: - [], - maximumTimeout: 1 - } + context 'when runnerType is GROUP_TYPE' do + let(:mutation_scope_params) do + { + runner_type: 'GROUP_TYPE', + group_id: group.to_global_id + } + end + + it_behaves_like 'when user does not have permissions' + + context 'when user has permissions' do + context 'when user is group owner' do + let(:current_user) { group_owner } + + it_behaves_like 'when :create_runner_workflow_for_namespace feature flag is disabled' + it_behaves_like 'when runner is created successfully' + it_behaves_like 'when model is invalid returns error' + + context 'when group_id is missing' do + let(:mutation_scope_params) do + { runner_type: 'GROUP_TYPE' } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include('`group_id` is missing') + end + end + + context 'when group_id is malformed' do + let(:mutation_scope_params) do + { + runner_type: 'GROUP_TYPE', + group_id: '' + } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include( + "RunnerCreateInput! was provided invalid value for groupId" + ) + end + end + + context 'when group_id does not exist' do + let(:mutation_scope_params) do + { + runner_type: 'GROUP_TYPE', + group_id: "gid://gitlab/Group/#{non_existing_record_id}" + } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include( + 'The resource that you are attempting to access does not exist ' \ + "or you don't have permission to perform this action" + ) + end + end end - it do - post_graphql_mutation(mutation, current_user: current_user) + context 'when user is admin in admin mode', :enable_admin_mode do + let(:current_user) { admin } + + it_behaves_like 'when :create_runner_workflow_for_namespace feature flag is disabled' + it_behaves_like 'when runner is created successfully' + it_behaves_like 'when model is invalid returns error' + end + end + end + + context 'when runnerType is PROJECT_TYPE' do + let_it_be(:project) { create(:project, namespace: group) } + + let(:mutation_scope_params) do + { + runner_type: 'PROJECT_TYPE', + project_id: project.to_global_id + } + end + + it_behaves_like 'when user does not have permissions' + + context 'when user has permissions' do + context 'when user is group owner' do + let(:current_user) { group_owner } + + it_behaves_like 'when :create_runner_workflow_for_namespace feature flag is disabled' + it_behaves_like 'when runner is created successfully' + it_behaves_like 'when model is invalid returns error' + + context 'when project_id is missing' do + let(:mutation_scope_params) do + { runner_type: 'PROJECT_TYPE' } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include('`project_id` is missing') + end + end + + context 'when project_id is malformed' do + let(:mutation_scope_params) do + { + runner_type: 'PROJECT_TYPE', + project_id: '' + } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include( + "RunnerCreateInput! was provided invalid value for projectId" + ) + end + end + + context 'when project_id does not exist' do + let(:mutation_scope_params) do + { + runner_type: 'PROJECT_TYPE', + project_id: "gid://gitlab/Project/#{non_existing_record_id}" + } + end + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect_graphql_errors_to_include( + 'The resource that you are attempting to access does not exist ' \ + "or you don't have permission to perform this action" + ) + end + end + end - expect(response).to have_gitlab_http_status(:success) + context 'when user is admin in admin mode', :enable_admin_mode do + let(:current_user) { admin } - expect(mutation_response['errors']).to contain_exactly( - "Tags list can not be empty when runner is not allowed to pick untagged jobs", - "Maximum timeout needs to be at least 10 minutes" - ) + it_behaves_like 'when :create_runner_workflow_for_namespace feature flag is disabled' + it_behaves_like 'when runner is created successfully' + it_behaves_like 'when model is invalid returns error' end end end diff --git a/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb b/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb index f544cef8864..ef0d44395bf 100644 --- a/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb +++ b/spec/requests/api/graphql/mutations/clusters/agent_tokens/agent_tokens/create_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Create a new cluster agent token', feature_category: :kubernetes_management do +RSpec.describe 'Create a new cluster agent token', feature_category: :deployment_management do include GraphqlHelpers let_it_be(:cluster_agent) { create(:cluster_agent) } diff --git a/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb b/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb index 66e6c5cc629..1d1e72dcff9 100644 --- a/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb +++ b/spec/requests/api/graphql/mutations/clusters/agents/create_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Create a new cluster agent', feature_category: :kubernetes_management do +RSpec.describe 'Create a new cluster agent', feature_category: :deployment_management do include GraphqlHelpers let(:project) { create(:project, :public, :repository) } diff --git a/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb b/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb index 27a566dfb8c..b70a6282a7a 100644 --- a/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb +++ b/spec/requests/api/graphql/mutations/clusters/agents/delete_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Delete a cluster agent', feature_category: :kubernetes_management do +RSpec.describe 'Delete a cluster agent', feature_category: :deployment_management do include GraphqlHelpers let(:cluster_agent) { create(:cluster_agent) } diff --git a/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb b/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb index 8b76c19cda6..ef159e41d3d 100644 --- a/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb +++ b/spec/requests/api/graphql/mutations/container_repository/destroy_spec.rb @@ -39,7 +39,7 @@ RSpec.describe 'Destroying a container repository', feature_category: :container expect(DeleteContainerRepositoryWorker) .not_to receive(:perform_async) - expect { subject }.to change { ::Packages::Event.count }.by(1) + subject expect(container_repository_mutation_response).to match_schema('graphql/container_repository') expect(container_repository_mutation_response['status']).to eq('DELETE_SCHEDULED') @@ -53,7 +53,7 @@ RSpec.describe 'Destroying a container repository', feature_category: :container expect(DeleteContainerRepositoryWorker) .not_to receive(:perform_async).with(user.id, container_repository.id) - expect { subject }.not_to change { ::Packages::Event.count } + subject expect(mutation_response).to be_nil end diff --git a/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb b/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb index 9e07a831076..0cb607e13ec 100644 --- a/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb +++ b/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb @@ -36,7 +36,7 @@ RSpec.describe 'Destroying a container repository tags', feature_category: :cont it 'destroys the container repository tags' do expect(Projects::ContainerRepository::DeleteTagsService) .to receive(:new).and_call_original - expect { subject }.to change { ::Packages::Event.count }.by(1) + subject expect(tag_names_response).to eq(tags) expect(errors_response).to eq([]) @@ -50,7 +50,7 @@ RSpec.describe 'Destroying a container repository tags', feature_category: :cont expect(Projects::ContainerRepository::DeleteTagsService) .not_to receive(:new) - expect { subject }.not_to change { ::Packages::Event.count } + subject expect(mutation_response).to be_nil end @@ -89,7 +89,7 @@ RSpec.describe 'Destroying a container repository tags', feature_category: :cont let(:tags) { Array.new(Mutations::ContainerRepositories::DestroyTags::LIMIT + 1, 'x') } it 'returns too many tags error' do - expect { subject }.not_to change { ::Packages::Event.count } + subject explanation = graphql_errors.dig(0, 'message') expect(explanation).to eq(Mutations::ContainerRepositories::DestroyTags::TOO_MANY_TAGS_ERROR_MESSAGE) @@ -113,7 +113,7 @@ RSpec.describe 'Destroying a container repository tags', feature_category: :cont it 'does not create a package event' do expect(::Packages::CreateEventService).not_to receive(:new) - expect { subject }.not_to change { ::Packages::Event.count } + subject end end end diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb index b5f2042c42a..d41628704a1 100644 --- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb +++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb @@ -106,7 +106,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled, featur end context 'when passing an empty list of assignees' do - let(:db_query_limit) { 31 } + let(:db_query_limit) { 35 } let(:input) { { assignee_usernames: [] } } before do diff --git a/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb b/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb index a77c026dd06..f3af662c2a0 100644 --- a/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb +++ b/spec/requests/api/graphql/mutations/projects/sync_fork_spec.rb @@ -50,6 +50,20 @@ RSpec.describe "Sync project fork", feature_category: :source_code_management do end end + context 'when the branch is protected', :use_clean_rails_redis_caching do + let_it_be(:protected_branch) do + create(:protected_branch, :no_one_can_push, :no_one_can_merge, project: project, name: target_branch) + end + + it_behaves_like 'a mutation that returns a top-level access error' + + it 'does not call the sync service' do + expect(::Projects::Forks::SyncWorker).not_to receive(:perform_async) + + post_graphql_mutation(mutation, current_user: current_user) + end + end + context 'when the user does not have permission' do let_it_be(:current_user) { create(:user) } @@ -96,6 +110,14 @@ RSpec.describe "Sync project fork", feature_category: :source_code_management do end end + context 'when the specified branch does not exist' do + let(:target_branch) { 'non-existent-branch' } + + it 'returns an error' do + expect_error_response('Target branch does not exist') + end + end + context 'when the previous execution resulted in a conflict' do it 'returns an error' do expect_next_instance_of(::Projects::Forks::Details) do |instance| diff --git a/spec/requests/api/graphql/mutations/work_items/convert_spec.rb b/spec/requests/api/graphql/mutations/work_items/convert_spec.rb new file mode 100644 index 00000000000..8017a85d75d --- /dev/null +++ b/spec/requests/api/graphql/mutations/work_items/convert_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe "Converts a work item to a new type", feature_category: :team_planning do + include GraphqlHelpers + + let_it_be(:project) { create(:project) } + let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } } + let_it_be(:new_type) { create(:work_item_type, :incident, :default) } + let_it_be(:work_item, refind: true) do + create(:work_item, :task, project: project, milestone: create(:milestone, project: project)) + end + + let(:work_item_type_id) { new_type.to_global_id.to_s } + let(:mutation) { graphql_mutation(:workItemConvert, input) } + let(:mutation_response) { graphql_mutation_response(:work_item_convert) } + let(:input) do + { + 'id' => work_item.to_global_id.to_s, + 'work_item_type_id' => work_item_type_id + } + end + + context 'when user is not allowed to update a work item' do + let(:current_user) { create(:user) } + + it_behaves_like 'a mutation that returns a top-level access error' + end + + context 'when user has permissions to convert the work item type' do + let(:current_user) { developer } + + context 'when work item type does not exist' do + let(:work_item_type_id) { "gid://gitlab/WorkItems::Type/#{non_existing_record_id}" } + + it 'returns an error' do + post_graphql_mutation(mutation, current_user: current_user) + + expect(graphql_errors).to include( + a_hash_including('message' => "Work Item type with id #{non_existing_record_id} was not found") + ) + end + end + + context 'when feature flag is enabled' do + it 'converts the work item', :aggregate_failures do + expect do + post_graphql_mutation(mutation, current_user: current_user) + end.to change { work_item.reload.work_item_type }.to(new_type) + + expect(response).to have_gitlab_http_status(:success) + expect(work_item.reload.issue_type).to eq('incident') + expect(work_item.reload.work_item_type.base_type).to eq('incident') + expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s) + expect(work_item.reload.milestone).to be_nil + end + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(work_item_conversion: false) + end + + it 'does not convert the work item', :aggregate_failures do + expect do + post_graphql_mutation(mutation, current_user: current_user) + end.not_to change { work_item.reload.work_item_type } + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['errors']).to match_array(['Feature flag disabled']) + end + end + + it_behaves_like 'has spam protection' do + let(:mutation_class) { ::Mutations::WorkItems::Convert } + end + end +end diff --git a/spec/requests/api/graphql/mutations/work_items/create_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_spec.rb index 7519389ab49..c576d4d286a 100644 --- a/spec/requests/api/graphql/mutations/work_items/create_spec.rb +++ b/spec/requests/api/graphql/mutations/work_items/create_spec.rb @@ -5,8 +5,9 @@ require 'spec_helper' RSpec.describe 'Create a work item', feature_category: :team_planning do include GraphqlHelpers - let_it_be(:project) { create(:project) } - let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:developer) { create(:user).tap { |user| group.add_developer(user) } } let(:input) do { @@ -17,26 +18,17 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do } end - let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path)) } - + let(:fields) { nil } let(:mutation_response) { graphql_mutation_response(:work_item_create) } + let(:current_user) { developer } - context 'the user is not allowed to create a work item' do - let(:current_user) { create(:user) } - - it_behaves_like 'a mutation that returns a top-level access error' - end - - context 'when user has permissions to create a work item' do - let(:current_user) { developer } - + RSpec.shared_examples 'creates work item' do it 'creates the work item' do expect do post_graphql_mutation(mutation, current_user: current_user) end.to change(WorkItem, :count).by(1) created_work_item = WorkItem.last - expect(response).to have_gitlab_http_status(:success) expect(created_work_item.issue_type).to eq('task') expect(created_work_item).to be_confidential @@ -90,10 +82,8 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do FIELDS end - let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path), fields) } - context 'when setting parent' do - let_it_be(:parent) { create(:work_item, project: project) } + let_it_be(:parent) { create(:work_item, **container_params) } let(:input) do { @@ -117,7 +107,7 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do end context 'when parent work item type is invalid' do - let_it_be(:parent) { create(:work_item, :task, project: project) } + let_it_be(:parent) { create(:work_item, :task, **container_params) } it 'returns error' do post_graphql_mutation(mutation, current_user: current_user) @@ -139,7 +129,7 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do end context 'when adjacent is already in place' do - let_it_be(:adjacent) { create(:work_item, :task, project: project) } + let_it_be(:adjacent) { create(:work_item, :task, **container_params) } let(:work_item) { WorkItem.last } @@ -206,11 +196,9 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do FIELDS end - let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path), fields) } - context 'when setting milestone on work item creation' do let_it_be(:project_milestone) { create(:milestone, project: project) } - let_it_be(:group_milestone) { create(:milestone, project: project) } + let_it_be(:group_milestone) { create(:milestone, group: group) } let(:input) do { @@ -237,6 +225,11 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do end context 'when assigning a project milestone' do + before do + group_work_item = container_params[:namespace].present? + skip('cannot set a project level milestone to a group level work item') if group_work_item + end + it_behaves_like "work item's milestone is set" do let(:milestone) { project_milestone } end @@ -250,4 +243,66 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do end end end + + context 'the user is not allowed to create a work item' do + let(:current_user) { create(:user) } + let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path), fields) } + + it_behaves_like 'a mutation that returns a top-level access error' + end + + context 'when user has permissions to create a work item' do + context 'when creating work items in a project' do + context 'with projectPath' do + let_it_be(:container_params) { { project: project } } + let(:mutation) { graphql_mutation(:workItemCreate, input.merge('projectPath' => project.full_path), fields) } + + it_behaves_like 'creates work item' + end + + context 'with namespacePath' do + let_it_be(:container_params) { { project: project } } + let(:mutation) { graphql_mutation(:workItemCreate, input.merge('namespacePath' => project.full_path), fields) } + + it_behaves_like 'creates work item' + end + end + + context 'when creating work items in a group' do + let_it_be(:container_params) { { namespace: group } } + let(:mutation) { graphql_mutation(:workItemCreate, input.merge(namespacePath: group.full_path), fields) } + + it_behaves_like 'creates work item' + end + + context 'when both projectPath and namespacePath are passed' do + let_it_be(:container_params) { { project: project } } + let(:mutation) do + graphql_mutation( + :workItemCreate, + input.merge('projectPath' => project.full_path, 'namespacePath' => project.full_path), + fields + ) + end + + it_behaves_like 'a mutation that returns top-level errors', errors: [ + Mutations::WorkItems::Create::MUTUALLY_EXCLUSIVE_ARGUMENTS_ERROR + ] + end + + context 'when neither of projectPath nor namespacePath are passed' do + let_it_be(:container_params) { { project: project } } + let(:mutation) do + graphql_mutation( + :workItemCreate, + input, + fields + ) + end + + it_behaves_like 'a mutation that returns top-level errors', errors: [ + Mutations::WorkItems::Create::MUTUALLY_EXCLUSIVE_ARGUMENTS_ERROR + ] + end + end end diff --git a/spec/requests/api/graphql/mutations/work_items/export_spec.rb b/spec/requests/api/graphql/mutations/work_items/export_spec.rb index 3cadaab5201..d87fd5f84eb 100644 --- a/spec/requests/api/graphql/mutations/work_items/export_spec.rb +++ b/spec/requests/api/graphql/mutations/work_items/export_spec.rb @@ -35,7 +35,7 @@ RSpec.describe 'Export work items', feature_category: :team_planning do let(:current_user) { reporter } let(:input) do super().merge( - 'selectedFields' => %w[TITLE AUTHOR TYPE AUTHOR_USERNAME CREATED_AT], + 'selectedFields' => %w[TITLE DESCRIPTION AUTHOR TYPE AUTHOR_USERNAME CREATED_AT], 'authorUsername' => 'admin', 'iids' => [work_item.iid.to_s], 'state' => 'opened', @@ -47,7 +47,7 @@ RSpec.describe 'Export work items', feature_category: :team_planning do it 'schedules export job with given arguments', :aggregate_failures do expected_arguments = { - selected_fields: ['title', 'author', 'type', 'author username', 'created_at'], + selected_fields: ['title', 'description', 'author', 'type', 'author username', 'created_at'], author_username: 'admin', iids: [work_item.iid.to_s], state: 'opened', diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb index 76dc60be799..bea9ba25f76 100644 --- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb +++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb @@ -468,9 +468,62 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do FIELDS end + let_it_be(:valid_parent) { create(:work_item, project: project) } + let_it_be(:valid_child1) { create(:work_item, :task, project: project, created_at: 5.minutes.ago) } + let_it_be(:valid_child2) { create(:work_item, :task, project: project, created_at: 5.minutes.from_now) } + let(:input_base) { { parentId: valid_parent.to_gid.to_s } } + let(:child1_ref) { { adjacentWorkItemId: valid_child1.to_global_id.to_s } } + let(:child2_ref) { { adjacentWorkItemId: valid_child2.to_global_id.to_s } } + let(:relative_range) { [valid_child1, valid_child2].map(&:parent_link).map(&:relative_position) } + + let(:invalid_relative_position_error) do + WorkItems::Widgets::HierarchyService::UpdateService::INVALID_RELATIVE_POSITION_ERROR + end + + shared_examples 'updates work item parent and sets the relative position' do + it do + expect do + post_graphql_mutation(mutation, current_user: current_user) + work_item.reload + end.to change(work_item, :work_item_parent).from(nil).to(valid_parent) + + expect(response).to have_gitlab_http_status(:success) + expect(widgets_response).to include({ 'type' => 'HIERARCHY', 'children' => { 'edges' => [] }, + 'parent' => { 'id' => valid_parent.to_global_id.to_s } }) + + expect(work_item.parent_link.relative_position).to be_between(*relative_range) + end + end + + shared_examples 'sets the relative position and does not update work item parent' do + it do + expect do + post_graphql_mutation(mutation, current_user: current_user) + work_item.reload + end.to not_change(work_item, :work_item_parent) + + expect(response).to have_gitlab_http_status(:success) + expect(widgets_response).to include({ 'type' => 'HIERARCHY', 'children' => { 'edges' => [] }, + 'parent' => { 'id' => valid_parent.to_global_id.to_s } }) + + expect(work_item.parent_link.relative_position).to be_between(*relative_range) + end + end + + shared_examples 'returns "relative position is not valid" error message' do + it do + expect do + post_graphql_mutation(mutation, current_user: current_user) + work_item.reload + end.to not_change(work_item, :work_item_parent) + + expect(mutation_response['workItem']).to be_nil + expect(mutation_response['errors']).to match_array([invalid_relative_position_error]) + end + end + context 'when updating parent' do let_it_be(:work_item, reload: true) { create(:work_item, :task, project: project) } - let_it_be(:valid_parent) { create(:work_item, project: project) } let_it_be(:invalid_parent) { create(:work_item, :task, project: project) } context 'when parent work item type is invalid' do @@ -493,20 +546,15 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do context 'when parent work item has a valid type' do let(:input) { { 'hierarchyWidget' => { 'parentId' => valid_parent.to_global_id.to_s } } } - it 'sets the parent for the work item' do + it 'updates work item parent' do expect do post_graphql_mutation(mutation, current_user: current_user) work_item.reload end.to change(work_item, :work_item_parent).from(nil).to(valid_parent) expect(response).to have_gitlab_http_status(:success) - expect(widgets_response).to include( - { - 'children' => { 'edges' => [] }, - 'parent' => { 'id' => valid_parent.to_global_id.to_s }, - 'type' => 'HIERARCHY' - } - ) + expect(widgets_response).to include({ 'type' => 'HIERARCHY', 'children' => { 'edges' => [] }, + 'parent' => { 'id' => valid_parent.to_global_id.to_s } }) end context 'when a parent is already present' do @@ -523,6 +571,31 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do end.to change(work_item, :work_item_parent).from(existing_parent).to(valid_parent) end end + + context 'when updating relative position' do + before(:all) do + create(:parent_link, work_item_parent: valid_parent, work_item: valid_child1) + create(:parent_link, work_item_parent: valid_parent, work_item: valid_child2) + end + + context "when incomplete positioning arguments are given" do + let(:input) { { hierarchyWidget: input_base.merge(child1_ref) } } + + it_behaves_like 'returns "relative position is not valid" error message' + end + + context 'when moving after adjacent' do + let(:input) { { hierarchyWidget: input_base.merge(child1_ref).merge(relativePosition: 'AFTER') } } + + it_behaves_like 'updates work item parent and sets the relative position' + end + + context 'when moving before adjacent' do + let(:input) { { hierarchyWidget: input_base.merge(child2_ref).merge(relativePosition: 'BEFORE') } } + + it_behaves_like 'updates work item parent and sets the relative position' + end + end end context 'when parentId is null' do @@ -578,9 +651,37 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do end end + context 'when reordering existing child' do + let_it_be(:work_item, reload: true) { create(:work_item, :task, project: project) } + + context "when parent is already assigned" do + before(:all) do + create(:parent_link, work_item_parent: valid_parent, work_item: work_item) + create(:parent_link, work_item_parent: valid_parent, work_item: valid_child1) + create(:parent_link, work_item_parent: valid_parent, work_item: valid_child2) + end + + context "when incomplete positioning arguments are given" do + let(:input) { { hierarchyWidget: child1_ref } } + + it_behaves_like 'returns "relative position is not valid" error message' + end + + context 'when moving after adjacent' do + let(:input) { { hierarchyWidget: child1_ref.merge(relativePosition: 'AFTER') } } + + it_behaves_like 'sets the relative position and does not update work item parent' + end + + context 'when moving before adjacent' do + let(:input) { { hierarchyWidget: child2_ref.merge(relativePosition: 'BEFORE') } } + + it_behaves_like 'sets the relative position and does not update work item parent' + end + end + end + context 'when updating children' do - let_it_be(:valid_child1) { create(:work_item, :task, project: project) } - let_it_be(:valid_child2) { create(:work_item, :task, project: project) } let_it_be(:invalid_child) { create(:work_item, project: project) } let(:input) { { 'hierarchyWidget' => { 'childrenIds' => children_ids } } } @@ -978,18 +1079,293 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do end end + context 'when updating currentUserTodos' do + let_it_be(:current_user) { reporter } + + let(:fields) do + <<~FIELDS + workItem { + widgets { + type + ... on WorkItemWidgetCurrentUserTodos { + currentUserTodos { + nodes { + id + state + } + } + } + } + } + errors + FIELDS + end + + subject(:update_work_item) { post_graphql_mutation(mutation, current_user: current_user) } + + context 'when adding a new todo' do + let(:input) { { 'currentUserTodosWidget' => { 'action' => 'ADD' } } } + + context 'when user has access to the work item' do + it 'adds a new todo for the user on the work item' do + expect { update_work_item }.to change { current_user.todos.count }.by(1) + + created_todo = current_user.todos.last + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']['widgets']).to include( + { + 'type' => 'CURRENT_USER_TODOS', + 'currentUserTodos' => { + 'nodes' => [ + { 'id' => created_todo.to_global_id.to_s, 'state' => 'pending' } + ] + } + } + ) + end + end + + context 'when user has no access' do + let_it_be(:current_user) { create(:user) } + + it 'does not create a new todo' do + expect { update_work_item }.to change { Todo.count }.by(0) + + expect(response).to have_gitlab_http_status(:success) + end + end + end + + context 'when marking all todos of the work item as done' do + let_it_be(:pending_todo1) do + create(:todo, target: work_item, target_type: 'WorkItem', user: current_user, state: :pending) + end + + let_it_be(:pending_todo2) do + create(:todo, target: work_item, target_type: 'WorkItem', user: current_user, state: :pending) + end + + let(:input) { { 'currentUserTodosWidget' => { 'action' => 'MARK_AS_DONE' } } } + + context 'when user has access' do + it 'marks all todos of the user on the work item as done' do + expect { update_work_item }.to change { current_user.todos.done.count }.by(2) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']['widgets']).to include( + { + 'type' => 'CURRENT_USER_TODOS', + 'currentUserTodos' => { + 'nodes' => match_array([ + { 'id' => pending_todo1.to_global_id.to_s, 'state' => 'done' }, + { 'id' => pending_todo2.to_global_id.to_s, 'state' => 'done' } + ]) + } + } + ) + end + end + + context 'when user has no access' do + let_it_be(:current_user) { create(:user) } + + it 'does not mark todos as done' do + expect { update_work_item }.to change { Todo.done.count }.by(0) + + expect(response).to have_gitlab_http_status(:success) + end + end + end + + context 'when marking one todo of the work item as done' do + let_it_be(:pending_todo1) do + create(:todo, target: work_item, target_type: 'WorkItem', user: current_user, state: :pending) + end + + let_it_be(:pending_todo2) do + create(:todo, target: work_item, target_type: 'WorkItem', user: current_user, state: :pending) + end + + let(:input) do + { 'currentUserTodosWidget' => { 'action' => 'MARK_AS_DONE', todo_id: global_id_of(pending_todo1) } } + end + + context 'when user has access' do + it 'marks the todo of the work item as done' do + expect { update_work_item }.to change { current_user.todos.done.count }.by(1) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']['widgets']).to include( + { + 'type' => 'CURRENT_USER_TODOS', + 'currentUserTodos' => { + 'nodes' => match_array([ + { 'id' => pending_todo1.to_global_id.to_s, 'state' => 'done' }, + { 'id' => pending_todo2.to_global_id.to_s, 'state' => 'pending' } + ]) + } + } + ) + end + end + + context 'when user has no access' do + let_it_be(:current_user) { create(:user) } + + it 'does not mark the todo as done' do + expect { update_work_item }.to change { Todo.done.count }.by(0) + + expect(response).to have_gitlab_http_status(:success) + end + end + end + end + + context 'when updating awardEmoji' do + let_it_be(:current_user) { work_item.author } + let_it_be(:upvote) { create(:award_emoji, :upvote, awardable: work_item, user: current_user) } + let(:award_action) { 'ADD' } + let(:award_name) { 'star' } + let(:input) { { 'awardEmojiWidget' => { 'action' => award_action, 'name' => award_name } } } + + let(:fields) do + <<~FIELDS + workItem { + widgets { + type + ... on WorkItemWidgetAwardEmoji { + upvotes + downvotes + awardEmoji { + nodes { + name + user { id } + } + } + } + } + } + errors + FIELDS + end + + subject(:update_work_item) { post_graphql_mutation(mutation, current_user: current_user) } + + context 'when user cannot award work item' do + before do + allow(Ability).to receive(:allowed?).and_call_original + allow(Ability).to receive(:allowed?) + .with(current_user, :award_emoji, work_item).and_return(false) + end + + it 'ignores the update request' do + expect do + update_work_item + end.to not_change(AwardEmoji, :count) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['errors']).to be_empty + expect(graphql_errors).to be_blank + end + end + + context 'when user can award work item' do + shared_examples 'request with error' do |message| + it 'ignores update and returns an error' do + expect do + update_work_item + end.not_to change(AwardEmoji, :count) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']).to be_nil + expect(mutation_response['errors'].first).to include(message) + end + end + + shared_examples 'request that removes emoji' do + it "updates work item's award emoji" do + expect do + update_work_item + end.to change(AwardEmoji, :count).by(-1) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']['widgets']).to include( + { + 'upvotes' => 0, + 'downvotes' => 0, + 'awardEmoji' => { 'nodes' => [] }, + 'type' => 'AWARD_EMOJI' + } + ) + end + end + + shared_examples 'request that adds emoji' do + it "updates work item's award emoji" do + expect do + update_work_item + end.to change(AwardEmoji, :count).by(1) + + expect(response).to have_gitlab_http_status(:success) + expect(mutation_response['workItem']['widgets']).to include( + { + 'upvotes' => 1, + 'downvotes' => 0, + 'awardEmoji' => { 'nodes' => [ + { 'name' => 'thumbsup', 'user' => { 'id' => current_user.to_gid.to_s } }, + { 'name' => award_name, 'user' => { 'id' => current_user.to_gid.to_s } } + ] }, + 'type' => 'AWARD_EMOJI' + } + ) + end + end + + context 'when adding award emoji' do + it_behaves_like 'request that adds emoji' + + context 'when the emoji name is not valid' do + let(:award_name) { 'xxqq' } + + it_behaves_like 'request with error', 'Name is not a valid emoji name' + end + end + + context 'when removing award emoji' do + let(:award_action) { 'REMOVE' } + + context 'when emoji was awarded by current user' do + let(:award_name) { 'thumbsup' } + + it_behaves_like 'request that removes emoji' + end + + context 'when emoji was awarded by a different user' do + let(:award_name) { 'thumbsdown' } + + before do + create(:award_emoji, :downvote, awardable: work_item) + end + + it_behaves_like 'request with error', + 'User has not awarded emoji of type thumbsdown on the awardable' + end + end + end + end + context 'when unsupported widget input is sent' do - let_it_be(:test_case) { create(:work_item_type, :default, :test_case) } - let_it_be(:work_item) { create(:work_item, work_item_type: test_case, project: project) } + let_it_be(:work_item) { create(:work_item, :incident, project: project) } let(:input) do { - 'hierarchyWidget' => {} + 'assigneesWidget' => { 'assigneeIds' => [developer.to_gid.to_s] } } end it_behaves_like 'a mutation that returns top-level errors', - errors: ["Following widget keys are not supported by Test Case type: [:hierarchy_widget]"] + errors: ["Following widget keys are not supported by Incident type: [:assignees_widget]"] end end end diff --git a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb index 16dd0dfcfcb..c1ac0367853 100644 --- a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb +++ b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb @@ -51,7 +51,7 @@ RSpec.describe 'getting Alert Management Alert Notes', feature_category: :team_p expect(first_notes_result.first).to include( 'id' => first_system_note.to_global_id.to_s, - 'systemNoteIconName' => 'git-merge', + 'systemNoteIconName' => 'merge', 'body' => first_system_note.note ) end diff --git a/spec/requests/api/graphql/project/branches_tipping_at_commit_spec.rb b/spec/requests/api/graphql/project/branches_tipping_at_commit_spec.rb new file mode 100644 index 00000000000..bba8977078d --- /dev/null +++ b/spec/requests/api/graphql/project/branches_tipping_at_commit_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Query.project(fullPath).tagsTippingAtCommit(commitSha)', feature_category: :source_code_management do + include GraphqlHelpers + include Presentable + + let_it_be(:project) { create(:project, :repository) } + let_it_be(:repository) { project.repository.raw } + let_it_be(:current_user) { project.first_owner } + let_it_be(:branches_names) { %w[master not-merged-branch v1.1.0] } + + let(:post_query) { post_graphql(query, current_user: current_user) } + let(:path) { %w[project branchesTippingAtCommit names] } + let(:data) { graphql_data.dig(*path) } + + let(:query) do + graphql_query_for( + :project, + { fullPath: project.full_path }, + query_graphql_field(:branchesTippingAtCommit, { commitSha: commit_sha }, :names) + ) + end + + context 'when commit exists and is tipping branches' do + let_it_be(:commit_sha) { repository.commit.id } + + context 'with authorized user' do + it 'returns branches names tipping the commit' do + post_query + + expect(data).to eq(branches_names) + end + end + + context 'when user is not authorized' do + let(:current_user) { create(:user) } + + it 'returns branches names tipping the commit' do + post_query + + expect(data).to eq(nil) + end + end + end + + context 'when commit does not exist' do + let(:commit_sha) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff4' } + + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq([]) + end + end + + context 'when commit exists but does not tip any branches' do + let(:commit_sha) { project.repository.commits(nil, { limit: 4 }).commits[2].id } + + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq([]) + end + end +end diff --git a/spec/requests/api/graphql/project/cluster_agents_spec.rb b/spec/requests/api/graphql/project/cluster_agents_spec.rb index 0881eb9cdc3..a50b176cdac 100644 --- a/spec/requests/api/graphql/project/cluster_agents_spec.rb +++ b/spec/requests/api/graphql/project/cluster_agents_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Project.cluster_agents', feature_category: :kubernetes_management do +RSpec.describe 'Project.cluster_agents', feature_category: :deployment_management do include GraphqlHelpers let_it_be(:project) { create(:project, :public) } diff --git a/spec/requests/api/graphql/project/data_transfer_spec.rb b/spec/requests/api/graphql/project/data_transfer_spec.rb new file mode 100644 index 00000000000..aafa8d65eb9 --- /dev/null +++ b/spec/requests/api/graphql/project/data_transfer_spec.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'project data transfers', feature_category: :source_code_management do + include GraphqlHelpers + + let_it_be(:current_user) { create(:user) } + let_it_be(:project) { create(:project) } + + let(:fields) do + <<~QUERY + #{all_graphql_fields_for('ProjectDataTransfer'.classify)} + QUERY + end + + let(:query) do + graphql_query_for( + 'project', + { fullPath: project.full_path }, + query_graphql_field('DataTransfer', params, fields) + ) + end + + let(:from) { Date.new(2022, 1, 1) } + let(:to) { Date.new(2023, 1, 1) } + let(:params) { { from: from, to: to } } + let(:egress_data) do + graphql_data.dig('project', 'dataTransfer', 'egressNodes', 'nodes') + end + + before do + create(:project_data_transfer, project: project, date: '2022-01-01', repository_egress: 1) + create(:project_data_transfer, project: project, date: '2022-02-01', repository_egress: 2) + end + + subject { post_graphql(query, current_user: current_user) } + + context 'with anonymous access' do + let_it_be(:current_user) { nil } + + before do + subject + end + + it_behaves_like 'a working graphql query' + + it 'returns no data' do + expect(graphql_data_at(:project, :data_transfer)).to be_nil + expect(graphql_errors).to be_nil + end + end + + context 'with authorized user but without enough permissions' do + before do + project.add_developer(current_user) + subject + end + + it_behaves_like 'a working graphql query' + + it 'returns empty results' do + expect(graphql_data_at(:project, :data_transfer)).to be_nil + expect(graphql_errors).to be_nil + end + end + + context 'when user has enough permissions' do + before do + project.add_owner(current_user) + end + + context 'when data_transfer_monitoring_mock_data is NOT enabled' do + before do + stub_feature_flags(data_transfer_monitoring_mock_data: false) + subject + end + + it 'returns real results' do + expect(response).to have_gitlab_http_status(:ok) + + expect(egress_data.count).to eq(2) + + expect(egress_data.first.keys).to match_array( + %w[date totalEgress repositoryEgress artifactsEgress packagesEgress registryEgress] + ) + + expect(egress_data.pluck('repositoryEgress')).to match_array(%w[1 2]) + end + + it_behaves_like 'a working graphql query' + end + + context 'when data_transfer_monitoring_mock_data is enabled' do + before do + stub_feature_flags(data_transfer_monitoring_mock_data: true) + subject + end + + it 'returns mock results' do + expect(response).to have_gitlab_http_status(:ok) + + expect(egress_data.count).to eq(12) + expect(egress_data.first.keys).to match_array( + %w[date totalEgress repositoryEgress artifactsEgress packagesEgress registryEgress] + ) + end + + it_behaves_like 'a working graphql query' + end + end +end diff --git a/spec/requests/api/graphql/project/fork_details_spec.rb b/spec/requests/api/graphql/project/fork_details_spec.rb index 0baf29b970e..91a04dc7c50 100644 --- a/spec/requests/api/graphql/project/fork_details_spec.rb +++ b/spec/requests/api/graphql/project/fork_details_spec.rb @@ -24,12 +24,23 @@ RSpec.describe 'getting project fork details', feature_category: :source_code_ma ) end - it 'returns fork details' do - post_graphql(query, current_user: current_user) + context 'when a ref is specified' do + using RSpec::Parameterized::TableSyntax + + where(:ref, :counts) do + 'feature' | { 'ahead' => 1, 'behind' => 29 } + 'v1.1.1' | { 'ahead' => 5, 'behind' => 0 } + '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' | { 'ahead' => 9, 'behind' => 0 } + 'non-existent-branch' | { 'ahead' => nil, 'behind' => nil } + end - expect(graphql_data['project']['forkDetails']).to eq( - { 'ahead' => 1, 'behind' => 29 } - ) + with_them do + it 'returns fork details' do + post_graphql(query, current_user: current_user) + + expect(graphql_data['project']['forkDetails']).to eq(counts) + end + end end context 'when a project is not a fork' do @@ -52,28 +63,6 @@ RSpec.describe 'getting project fork details', feature_category: :source_code_ma end end - context 'when the specified ref does not exist' do - let(:ref) { 'non-existent-branch' } - - it 'does not return fork details' do - post_graphql(query, current_user: current_user) - - expect(graphql_data['project']['forkDetails']).to be_nil - end - end - - context 'when fork_divergence_counts feature flag is disabled' do - before do - stub_feature_flags(fork_divergence_counts: false) - end - - it 'does not return fork details' do - post_graphql(query, current_user: current_user) - - expect(graphql_data['project']['forkDetails']).to be_nil - end - end - context 'when a user cannot read the code' do let_it_be(:current_user) { create(:user) } diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb index 76e5d687fd1..80c7258c05d 100644 --- a/spec/requests/api/graphql/project/merge_request_spec.rb +++ b/spec/requests/api/graphql/project/merge_request_spec.rb @@ -480,4 +480,31 @@ RSpec.describe 'getting merge request information nested in a project', feature_ merge_request.assignees << user end end + + context 'when selecting `awardEmoji`' do + let_it_be(:award_emoji) { create(:award_emoji, awardable: merge_request, user: current_user) } + + let(:mr_fields) do + <<~QUERY + awardEmoji { + nodes { + user { + username + } + name + } + } + QUERY + end + + it 'includes award emojis' do + post_graphql(query, current_user: current_user) + + response = merge_request_graphql_data['awardEmoji']['nodes'] + + expect(response.length).to eq(1) + expect(response.first['user']['username']).to eq(current_user.username) + expect(response.first['name']).to eq(award_emoji.name) + end + end end diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb index 156886ca211..e3c4396e7d8 100644 --- a/spec/requests/api/graphql/project/merge_requests_spec.rb +++ b/spec/requests/api/graphql/project/merge_requests_spec.rb @@ -226,6 +226,28 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat it_behaves_like 'when searching with parameters' end + context 'when searching by approved' do + let(:approved_mr) { create(:merge_request, target_project: project, source_project: project) } + + before do + create(:approval, merge_request: approved_mr) + end + + context 'when true' do + let(:search_params) { { approved: true } } + let(:mrs) { [approved_mr] } + + it_behaves_like 'when searching with parameters' + end + + context 'when false' do + let(:search_params) { { approved: false } } + let(:mrs) { all_merge_requests } + + it_behaves_like 'when searching with parameters' + end + end + context 'when requesting `approved_by`' do let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } } let(:extra_iid_for_second_query) { merge_request_c.iid.to_s } @@ -331,7 +353,7 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat end context 'when award emoji votes' do - let(:requested_fields) { [:upvotes, :downvotes] } + let(:requested_fields) { 'upvotes downvotes awardEmoji { nodes { name } }' } before do create_list(:award_emoji, 2, name: 'thumbsup', awardable: merge_request_a) diff --git a/spec/requests/api/graphql/project/tags_tipping_at_commit_spec.rb b/spec/requests/api/graphql/project/tags_tipping_at_commit_spec.rb new file mode 100644 index 00000000000..a5e26482a9e --- /dev/null +++ b/spec/requests/api/graphql/project/tags_tipping_at_commit_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Query.project(fullPath).tagsTippingAtCommit(commitSha)', feature_category: :source_code_management do + include GraphqlHelpers + include Presentable + + let_it_be(:project) { create(:project, :repository) } + let_it_be(:repository) { project.repository.raw } + let_it_be(:current_user) { project.first_owner } + let_it_be(:tag_name) { 'v1.0.0' } + + let(:post_query) { post_graphql(query, current_user: current_user) } + let(:path) { %w[project tagsTippingAtCommit names] } + let(:data) { graphql_data.dig(*path) } + + let(:query) do + graphql_query_for( + :project, + { fullPath: project.full_path }, + query_graphql_field(:tagsTippingAtCommit, { commitSha: commit_sha }, :names) + ) + end + + context 'when commit exists and is tipping tags' do + let(:commit_sha) { repository.find_tag(tag_name).dereferenced_target.sha } + + context 'with authorized user' do + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq([tag_name]) + end + end + + context 'when user is not authorized' do + let(:current_user) { create(:user) } + + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq(nil) + end + end + end + + context 'when commit does not exist' do + let(:commit_sha) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff4' } + + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq([]) + end + end + + context 'when commit exists but does not tip any tags' do + let(:commit_sha) { project.repository.commits(nil, { limit: 4 }).commits[2].id } + + it 'returns tags names tipping the commit' do + post_query + + expect(data).to eq([]) + end + end +end diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb index d5dd12de63e..b792505374e 100644 --- a/spec/requests/api/graphql/project/work_items_spec.rb +++ b/spec/requests/api/graphql/project/work_items_spec.rb @@ -341,6 +341,51 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team end end + context 'when fetching work item award emoji widget' do + let(:fields) do + <<~GRAPHQL + nodes { + widgets { + type + ... on WorkItemWidgetAwardEmoji { + awardEmoji { + nodes { + name + emoji + user { id } + } + } + upvotes + downvotes + } + } + } + GRAPHQL + end + + before do + create(:award_emoji, name: 'star', user: current_user, awardable: item1) + create(:award_emoji, :upvote, awardable: item1) + create(:award_emoji, :downvote, awardable: item1) + end + + it 'executes limited number of N+1 queries', :use_sql_query_cache do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + post_graphql(query, current_user: current_user) + end + + create_list(:work_item, 2, project: project) do |item| + create(:award_emoji, name: 'rocket', awardable: item) + create_list(:award_emoji, 2, :upvote, awardable: item) + create_list(:award_emoji, 2, :downvote, awardable: item) + end + + expect { post_graphql(query, current_user: current_user) } + .not_to exceed_all_query_limit(control) + expect_graphql_errors_to_be_empty + end + end + def item_ids graphql_dig_at(items_data, :node, :id) end diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb index 281a08e6548..9f51258c163 100644 --- a/spec/requests/api/graphql/project_query_spec.rb +++ b/spec/requests/api/graphql/project_query_spec.rb @@ -120,6 +120,67 @@ RSpec.describe 'getting project information', feature_category: :projects do end end + describe 'is_catalog_resource' do + before do + project.add_owner(current_user) + end + + let(:catalog_resource_query) do + <<~GRAPHQL + { + project(fullPath: "#{project.full_path}") { + isCatalogResource + } + } + GRAPHQL + end + + context 'when the project is not a catalog resource' do + it 'is false' do + post_graphql(catalog_resource_query, current_user: current_user) + + expect(graphql_data.dig('project', 'isCatalogResource')).to be(false) + end + end + + context 'when the project is a catalog resource' do + before do + create(:catalog_resource, project: project) + end + + it 'is true' do + post_graphql(catalog_resource_query, current_user: current_user) + + expect(graphql_data.dig('project', 'isCatalogResource')).to be(true) + end + end + + context 'for N+1 queries with isCatalogResource' do + let_it_be(:project1) { create(:project, group: group) } + let_it_be(:project2) { create(:project, group: group) } + + it 'avoids N+1 database queries' do + pending('See: https://gitlab.com/gitlab-org/gitlab/-/issues/403634') + ctx = { current_user: current_user } + + baseline_query = graphql_query_for(:project, { full_path: project1.full_path }, 'isCatalogResource') + + query = <<~GQL + query { + a: #{query_graphql_field(:project, { full_path: project1.full_path }, 'isCatalogResource')} + b: #{query_graphql_field(:project, { full_path: project2.full_path }, 'isCatalogResource')} + } + GQL + + control = ActiveRecord::QueryRecorder.new do + run_with_clean_state(baseline_query, context: ctx) + end + + expect { run_with_clean_state(query, context: ctx) }.not_to exceed_query_limit(control) + end + end + end + context 'when the user has reporter access to the project' do let(:statistics_query) do <<~GRAPHQL diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb index 24c72a8bb00..4ef1ab03574 100644 --- a/spec/requests/api/graphql/work_item_spec.rb +++ b/spec/requests/api/graphql/work_item_spec.rb @@ -59,7 +59,8 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do 'readWorkItem' => true, 'updateWorkItem' => true, 'deleteWorkItem' => false, - 'adminWorkItem' => true + 'adminWorkItem' => true, + 'adminParentLink' => true }, 'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path) ) @@ -399,6 +400,135 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do ) end end + + describe 'currentUserTodos widget' do + let_it_be(:current_user) { developer } + let_it_be(:other_todo) { create(:todo, state: :pending, user: current_user) } + + let_it_be(:done_todo) do + create(:todo, state: :done, target: work_item, target_type: work_item.class.name, user: current_user) + end + + let_it_be(:pending_todo) do + create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: current_user) + end + + let_it_be(:other_user_todo) do + create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: create(:user)) + end + + let(:work_item_fields) do + <<~GRAPHQL + id + widgets { + type + ... on WorkItemWidgetCurrentUserTodos { + currentUserTodos { + nodes { + id + state + } + } + } + } + GRAPHQL + end + + context 'with access' do + it 'returns widget information' do + expect(work_item_data).to include( + 'id' => work_item.to_gid.to_s, + 'widgets' => include( + hash_including( + 'type' => 'CURRENT_USER_TODOS', + 'currentUserTodos' => { + 'nodes' => match_array( + [done_todo, pending_todo].map { |t| { 'id' => t.to_gid.to_s, 'state' => t.state } } + ) + } + ) + ) + ) + end + end + + context 'with filter' do + let(:work_item_fields) do + <<~GRAPHQL + id + widgets { + type + ... on WorkItemWidgetCurrentUserTodos { + currentUserTodos(state: done) { + nodes { + id + state + } + } + } + } + GRAPHQL + end + + it 'returns widget information' do + expect(work_item_data).to include( + 'id' => work_item.to_gid.to_s, + 'widgets' => include( + hash_including( + 'type' => 'CURRENT_USER_TODOS', + 'currentUserTodos' => { + 'nodes' => match_array( + [done_todo].map { |t| { 'id' => t.to_gid.to_s, 'state' => t.state } } + ) + } + ) + ) + ) + end + end + end + + describe 'award emoji widget' do + let_it_be(:emoji) { create(:award_emoji, name: 'star', awardable: work_item) } + let_it_be(:upvote) { create(:award_emoji, :upvote, awardable: work_item) } + let_it_be(:downvote) { create(:award_emoji, :downvote, awardable: work_item) } + + let(:work_item_fields) do + <<~GRAPHQL + id + widgets { + type + ... on WorkItemWidgetAwardEmoji { + upvotes + downvotes + awardEmoji { + nodes { + name + } + } + } + } + GRAPHQL + end + + it 'returns widget information' do + expect(work_item_data).to include( + 'id' => work_item.to_gid.to_s, + 'widgets' => include( + hash_including( + 'type' => 'AWARD_EMOJI', + 'upvotes' => work_item.upvotes, + 'downvotes' => work_item.downvotes, + 'awardEmoji' => { + 'nodes' => match_array( + [emoji, upvote, downvote].map { |e| { 'name' => e.name } } + ) + } + ) + ) + ) + end + end end context 'when an Issue Global ID is provided' do diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb index 68c3af01e56..58d0e6a1eb5 100644 --- a/spec/requests/api/group_clusters_spec.rb +++ b/spec/requests/api/group_clusters_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::GroupClusters, feature_category: :kubernetes_management do +RSpec.describe API::GroupClusters, feature_category: :deployment_management do include KubernetesHelpers let(:current_user) { create(:user) } diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb index ff20e7ea9dd..6849b087211 100644 --- a/spec/requests/api/group_variables_spec.rb +++ b/spec/requests/api/group_variables_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::GroupVariables, feature_category: :pipeline_composition do +RSpec.describe API::GroupVariables, feature_category: :secrets_management do let_it_be(:group) { create(:group) } let_it_be(:user) { create(:user) } let_it_be(:variable) { create(:ci_group_variable, group: group) } diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index 12a6553f51a..84d48b4edb4 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -6,6 +6,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do include GroupAPIHelpers include UploadHelpers include WorkhorseHelpers + include KeysetPaginationHelpers let_it_be(:user1) { create(:user, can_create_group: false) } let_it_be(:user2) { create(:user) } @@ -39,7 +40,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when invalid' do shared_examples 'invalid file upload request' do - it 'returns 400' do + it 'returns 400', :aggregate_failures do make_upload_request expect(response).to have_gitlab_http_status(:bad_request) @@ -65,7 +66,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end shared_examples 'skips searching in full path' do - it 'does not find groups by full path' do + it 'does not find groups by full path', :aggregate_failures do subgroup = create(:group, parent: parent, path: "#{parent.path}-subgroup") create(:group, parent: parent, path: 'not_matching_path') @@ -79,7 +80,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do describe "GET /groups" do context "when unauthenticated" do - it "returns public groups" do + it "returns public groups", :aggregate_failures do get api("/groups") expect(response).to have_gitlab_http_status(:ok) @@ -93,18 +94,18 @@ RSpec.describe API::Groups, feature_category: :subgroups do it 'avoids N+1 queries', :use_sql_query_cache do control = ActiveRecord::QueryRecorder.new(skip_cached: false) do - get api("/groups", admin) + get api("/groups") end create(:group) expect do - get api("/groups", admin) + get api("/groups") end.not_to exceed_all_query_limit(control) end context 'when statistics are requested' do - it 'does not include statistics' do + it 'does not include statistics', :aggregate_failures do get api("/groups"), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) @@ -116,7 +117,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as user" do - it "normal user: returns an array of groups of user1" do + it "normal user: returns an array of groups of user1", :aggregate_failures do get api("/groups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -127,7 +128,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do .to satisfy_one { |group| group['name'] == group1.name } end - it "does not include runners_token information" do + it "does not include runners_token information", :aggregate_failures do get api("/groups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -137,7 +138,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first).not_to include('runners_token') end - it "does not include statistics" do + it "does not include statistics", :aggregate_failures do get api("/groups", user1), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) @@ -146,7 +147,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first).not_to include 'statistics' end - it "includes a created_at timestamp" do + it "includes a created_at timestamp", :aggregate_failures do get api("/groups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -175,7 +176,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'on making requests below the allowed offset pagination threshold' do - it 'paginates the records' do + it 'paginates the records', :aggregate_failures do get api('/groups'), params: { page: 1, per_page: 1 } expect(response).to have_gitlab_http_status(:ok) @@ -196,25 +197,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'keyset pagination' do - def pagination_links(response) - link = response.headers['LINK'] - return unless link - - link.split(',').map do |link| - match = link.match(/<(?.*)>; rel="(?\w+)"/) - break nil unless match - - { url: match[:url], rel: match[:rel] } - end.compact - end - - def params_for_next_page(response) - next_url = pagination_links(response).find { |link| link[:rel] == 'next' }[:url] - Rack::Utils.parse_query(URI.parse(next_url).query) - end - context 'on making requests with supported ordering structure' do - it 'paginates the records correctly' do + it 'paginates the records correctly', :aggregate_failures do # first page get api('/groups'), params: { pagination: 'keyset', per_page: 1 } @@ -223,7 +207,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(records.size).to eq(1) expect(records.first['id']).to eq(group_1.id) - params_for_next_page = params_for_next_page(response) + params_for_next_page = pagination_params_from_next_url(response) expect(params_for_next_page).to include('cursor') get api('/groups'), params: params_for_next_page @@ -236,7 +220,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'on making requests with unsupported ordering structure' do - it 'returns error' do + it 'returns error', :aggregate_failures do get api('/groups'), params: { pagination: 'keyset', per_page: 1, order_by: 'path', sort: 'desc' } expect(response).to have_gitlab_http_status(:method_not_allowed) @@ -248,8 +232,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as admin" do - it "admin: returns an array of all groups" do - get api("/groups", admin) + it "admin: returns an array of all groups", :aggregate_failures do + get api("/groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -257,8 +241,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.length).to eq(2) end - it "does not include runners_token information" do - get api("/groups", admin) + it "does not include runners_token information", :aggregate_failures do + get api("/groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -267,8 +251,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first).not_to include('runners_token') end - it "does not include statistics by default" do - get api("/groups", admin) + it "does not include statistics by default", :aggregate_failures do + get api("/groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -276,8 +260,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first).not_to include('statistics') end - it "includes a created_at timestamp" do - get api("/groups", admin) + it "includes a created_at timestamp", :aggregate_failures do + get api("/groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -285,7 +269,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['created_at']).to be_present end - it "includes statistics if requested" do + it "includes statistics if requested", :aggregate_failures do attributes = { storage_size: 4093, repository_size: 123, @@ -302,7 +286,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do project1.statistics.update!(attributes) - get api("/groups", admin), params: { statistics: true } + get api("/groups", admin, admin_mode: true), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -313,8 +297,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when using skip_groups in request" do - it "returns all groups excluding skipped groups" do - get api("/groups", admin), params: { skip_groups: [group2.id] } + it "returns all groups excluding skipped groups", :aggregate_failures do + get api("/groups", admin, admin_mode: true), params: { skip_groups: [group2.id] } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -326,7 +310,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context "when using all_available in request" do let(:response_groups) { json_response.map { |group| group['name'] } } - it "returns all groups you have access to" do + it "returns all groups you have access to", :aggregate_failures do public_group = create :group, :public get api("/groups", user1), params: { all_available: true } @@ -348,7 +332,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do subgroup.add_owner(user1) end - it "doesn't return subgroups" do + it "doesn't return subgroups", :aggregate_failures do get api("/groups", user1), params: { top_level_only: true } expect(response).to have_gitlab_http_status(:ok) @@ -373,7 +357,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group5.add_owner(user1) end - it "sorts by name ascending by default" do + it "sorts by name ascending by default", :aggregate_failures do get api("/groups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -382,7 +366,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to eq(groups_visible_to_user(user1).order(:name).pluck(:name)) end - it "sorts in descending order when passed" do + it "sorts in descending order when passed", :aggregate_failures do get api("/groups", user1), params: { sort: "desc" } expect(response).to have_gitlab_http_status(:ok) @@ -391,7 +375,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to eq(groups_visible_to_user(user1).order(name: :desc).pluck(:name)) end - it "sorts by path in order_by param" do + it "sorts by path in order_by param", :aggregate_failures do get api("/groups", user1), params: { order_by: "path" } expect(response).to have_gitlab_http_status(:ok) @@ -400,7 +384,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to eq(groups_visible_to_user(user1).order(:path).pluck(:name)) end - it "sorts by id in the order_by param" do + it "sorts by id in the order_by param", :aggregate_failures do get api("/groups", user1), params: { order_by: "id" } expect(response).to have_gitlab_http_status(:ok) @@ -409,7 +393,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to eq(groups_visible_to_user(user1).order(:id).pluck(:name)) end - it "sorts also by descending id with pagination fix" do + it "sorts also by descending id with pagination fix", :aggregate_failures do get api("/groups", user1), params: { order_by: "id", sort: "desc" } expect(response).to have_gitlab_http_status(:ok) @@ -418,7 +402,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to eq(groups_visible_to_user(user1).order(id: :desc).pluck(:name)) end - it "sorts identical keys by id for good pagination" do + it "sorts identical keys by id for good pagination", :aggregate_failures do get api("/groups", user1), params: { search: "same-name", order_by: "name" } expect(response).to have_gitlab_http_status(:ok) @@ -427,7 +411,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups_ids).to eq(Group.select { |group| group['name'] == 'same-name' }.map { |group| group['id'] }.sort) end - it "sorts descending identical keys by id for good pagination" do + it "sorts descending identical keys by id for good pagination", :aggregate_failures do get api("/groups", user1), params: { search: "same-name", order_by: "name", sort: "desc" } expect(response).to have_gitlab_http_status(:ok) @@ -449,7 +433,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do subject { get api('/groups', user1), params: params } - it 'sorts top level groups before subgroups with exact matches first' do + it 'sorts top level groups before subgroups with exact matches first', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -462,7 +446,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when `search` parameter is not given' do let(:params) { { order_by: 'similarity' } } - it 'sorts items ordered by name' do + it 'sorts items ordered by name', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -480,7 +464,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when using owned in the request' do - it 'returns an array of groups the user owns' do + it 'returns an array of groups the user owns', :aggregate_failures do group1.add_maintainer(user2) get api('/groups', user2), params: { owned: true } @@ -503,7 +487,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'with min_access_level parameter' do - it 'returns an array of groups the user has at least master access' do + it 'returns an array of groups the user has at least master access', :aggregate_failures do get api('/groups', user2), params: { min_access_level: 40 } expect(response).to have_gitlab_http_status(:ok) @@ -512,24 +496,15 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response_groups).to contain_exactly(group2.id, group3.id) end - context 'distinct count with present_groups_select_all feature flag' do + context 'distinct count' do subject { get api('/groups', user2), params: { min_access_level: 40 } } + # Prevent Rails from optimizing the count query and inadvertadly creating a poor performing databse query. + # https://gitlab.com/gitlab-org/gitlab/-/issues/368969 it 'counts with *' do count_sql = /#{Regexp.escape('SELECT count(*)')}/i expect { subject }.to make_queries_matching count_sql end - - context 'when present_groups_select_all feature flag is disabled' do - before do - stub_feature_flags(present_groups_select_all: false) - end - - it 'counts with count_column' do - count_sql = /#{Regexp.escape('SELECT count(count_column)')}/i - expect { subject }.to make_queries_matching count_sql - end - end end end end @@ -541,7 +516,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do subject { get api('/groups', user1), params: { search: group1.path } } - it 'finds also groups with full path matching search param' do + it 'finds also groups with full path matching search param', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -587,7 +562,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response).to have_gitlab_http_status(:not_found) end - it 'returns 200 for a public group' do + it 'returns 200 for a public group', :aggregate_failures do get api("/groups/#{group1.id}") expect(response).to have_gitlab_http_status(:ok) @@ -617,7 +592,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as user" do - it "returns one of user1's groups" do + it "returns one of user1's groups", :aggregate_failures do project = create(:project, namespace: group2, path: 'Foo') create(:project_group_link, project: project, group: group1) group = create(:group) @@ -661,7 +636,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response['shared_projects'][0]['id']).to eq(project.id) end - it "returns one of user1's groups without projects when with_projects option is set to false" do + it "returns one of user1's groups without projects when with_projects option is set to false", :aggregate_failures do project = create(:project, namespace: group2, path: 'Foo') create(:project_group_link, project: project, group: group1) @@ -673,14 +648,14 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response).not_to include('runners_token') end - it "doesn't return runners_token if the user is not the owner of the group" do + it "doesn't return runners_token if the user is not the owner of the group", :aggregate_failures do get api("/groups/#{group1.id}", user3) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include('runners_token') end - it "returns runners_token if the user is the owner of the group" do + it "returns runners_token if the user is the owner of the group", :aggregate_failures do group1.add_owner(user3) get api("/groups/#{group1.id}", user3) @@ -720,8 +695,9 @@ RSpec.describe API::Groups, feature_category: :subgroups do .to contain_exactly(projects[:public].id, projects[:internal].id) end - it 'avoids N+1 queries with project links' do + it 'avoids N+1 queries with project links', :aggregate_failures do get api("/groups/#{group1.id}", user1) + expect(response).to have_gitlab_http_status(:ok) control_count = ActiveRecord::QueryRecorder.new do get api("/groups/#{group1.id}", user1) @@ -754,25 +730,25 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as admin" do - it "returns any existing group" do - get api("/groups/#{group2.id}", admin) + it "returns any existing group", :aggregate_failures do + get api("/groups/#{group2.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq(group2.name) end - it "returns information of the runners_token for the group" do - get api("/groups/#{group2.id}", admin) + it "returns information of the runners_token for the group", :aggregate_failures do + get api("/groups/#{group2.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include('runners_token') end - it "returns runners_token and no projects when with_projects option is set to false" do + it "returns runners_token and no projects when with_projects option is set to false", :aggregate_failures do project = create(:project, namespace: group2, path: 'Foo') create(:project_group_link, project: project, group: group1) - get api("/groups/#{group2.id}", admin), params: { with_projects: false } + get api("/groups/#{group2.id}", admin, admin_mode: true), params: { with_projects: false } expect(response).to have_gitlab_http_status(:ok) expect(json_response['projects']).to be_nil @@ -781,14 +757,14 @@ RSpec.describe API::Groups, feature_category: :subgroups do end it "does not return a non existing group" do - get api("/groups/#{non_existing_record_id}", admin) + get api("/groups/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end end context 'when using group path in URL' do - it 'returns any existing group' do + it 'returns any existing group', :aggregate_failures do get api("/groups/#{group1.path}", admin) expect(response).to have_gitlab_http_status(:ok) @@ -796,7 +772,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end it 'does not return a non existing group' do - get api('/groups/unknown', admin) + get api('/groups/unknown', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -826,7 +802,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it 'limits projects and shared_projects' do + it 'limits projects and shared_projects', :aggregate_failures do get api("/groups/#{group1.id}") expect(json_response['projects'].count).to eq(limit) @@ -843,8 +819,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do subject(:shared_with_groups) { json_response['shared_with_groups'].map { _1['group_id']} } context 'when authenticated as admin' do - it 'returns all groups that share the group' do - get api("/groups/#{shared_group.id}", admin) + it 'returns all groups that share the group', :aggregate_failures do + get api("/groups/#{shared_group.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(shared_with_groups).to contain_exactly(group_link_1.shared_with_group_id, group_link_2.shared_with_group_id) @@ -852,7 +828,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when unauthenticated' do - it 'returns only public groups that share the group' do + it 'returns only public groups that share the group', :aggregate_failures do get api("/groups/#{shared_group.id}") expect(response).to have_gitlab_http_status(:ok) @@ -861,7 +837,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as a member of a parent group that has shared the group' do - it 'returns private group if direct member' do + it 'returns private group if direct member', :aggregate_failures do group2_sub.add_guest(user3) get api("/groups/#{shared_group.id}", user3) @@ -870,7 +846,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(shared_with_groups).to contain_exactly(group_link_1.shared_with_group_id, group_link_2.shared_with_group_id) end - it 'returns private group if inherited member' do + it 'returns private group if inherited member', :aggregate_failures do inherited_guest_member = create(:user) group2.add_guest(inherited_guest_member) @@ -902,7 +878,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as the group owner' do - it 'updates the group' do + it 'updates the group', :aggregate_failures do workhorse_form_with_file( api("/groups/#{group1.id}", user1), method: :put, @@ -942,7 +918,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response['prevent_sharing_groups_outside_hierarchy']).to eq(true) end - it 'removes the group avatar' do + it 'removes the group avatar', :aggregate_failures do put api("/groups/#{group1.id}", user1), params: { avatar: '' } aggregate_failures "testing response" do @@ -952,7 +928,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it 'does not update visibility_level if it is restricted' do + it 'does not update visibility_level if it is restricted', :aggregate_failures do stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) put api("/groups/#{group1.id}", user1), params: { visibility: 'internal' } @@ -967,7 +943,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'for users who have the ability to update default_branch_protection' do - it 'updates the attribute' do + it 'updates the attribute', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -976,7 +952,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'for users who does not have the ability to update default_branch_protection`' do - it 'does not update the attribute' do + it 'does not update the attribute', :aggregate_failures do allow(Ability).to receive(:allowed?).and_call_original allow(Ability).to receive(:allowed?).with(user1, :update_default_branch_protection, group1) { false } @@ -1016,21 +992,21 @@ RSpec.describe API::Groups, feature_category: :subgroups do group3.add_owner(user3) end - it 'does not change visibility when not requested' do + it 'does not change visibility when not requested', :aggregate_failures do put api("/groups/#{group3.id}", user3), params: { description: 'Bug #23083' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['visibility']).to eq('public') end - it 'prevents making private a group containing public subgroups' do + it 'prevents making private a group containing public subgroups', :aggregate_failures do put api("/groups/#{group3.id}", user3), params: { visibility: 'private' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['visibility_level']).to contain_exactly('private is not allowed since there are sub-groups with higher visibility.') end - it 'does not update prevent_sharing_groups_outside_hierarchy' do + it 'does not update prevent_sharing_groups_outside_hierarchy', :aggregate_failures do put api("/groups/#{subgroup.id}", user3), params: { description: 'it works', prevent_sharing_groups_outside_hierarchy: true } expect(response).to have_gitlab_http_status(:ok) @@ -1042,17 +1018,17 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as the admin' do - it 'updates the group' do - put api("/groups/#{group1.id}", admin), params: { name: new_group_name } + it 'updates the group', :aggregate_failures do + put api("/groups/#{group1.id}", admin, admin_mode: true), params: { name: new_group_name } expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq(new_group_name) end - it 'ignores visibility level restrictions' do + it 'ignores visibility level restrictions', :aggregate_failures do stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) - put api("/groups/#{group1.id}", admin), params: { visibility: 'internal' } + put api("/groups/#{group1.id}", admin, admin_mode: true), params: { visibility: 'internal' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['visibility']).to eq('internal') @@ -1094,7 +1070,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it "returns the group's projects" do + it "returns the group's projects", :aggregate_failures do get api("/groups/#{group1.id}/projects", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1106,7 +1082,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'and using archived' do - it "returns the group's archived projects" do + it "returns the group's archived projects", :aggregate_failures do get api("/groups/#{group1.id}/projects?archived=true", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1116,7 +1092,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.map { |project| project['id'] }).to include(archived_project.id) end - it "returns the group's non-archived projects" do + it "returns the group's non-archived projects", :aggregate_failures do get api("/groups/#{group1.id}/projects?archived=false", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1126,7 +1102,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.map { |project| project['id'] }).not_to include(archived_project.id) end - it "returns all of the group's projects" do + it "returns all of the group's projects", :aggregate_failures do get api("/groups/#{group1.id}/projects", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1150,7 +1126,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group_with_projects.add_owner(user1) end - it 'returns items based ordered by similarity' do + it 'returns items based ordered by similarity', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -1166,7 +1142,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do params.delete(:search) end - it 'returns items ordered by name' do + it 'returns items ordered by name', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -1179,7 +1155,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it "returns the group's projects with simple representation" do + it "returns the group's projects with simple representation", :aggregate_failures do get api("/groups/#{group1.id}/projects", user1), params: { simple: true } expect(response).to have_gitlab_http_status(:ok) @@ -1190,7 +1166,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['visibility']).not_to be_present end - it "filters the groups projects" do + it "filters the groups projects", :aggregate_failures do public_project = create(:project, :public, path: 'test1', group: group1) get api("/groups/#{group1.id}/projects", user1), params: { visibility: 'public' } @@ -1202,7 +1178,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['name']).to eq(public_project.name) end - it "returns projects excluding shared" do + it "returns projects excluding shared", :aggregate_failures do create(:project_group_link, project: create(:project), group: group1) create(:project_group_link, project: create(:project), group: group1) create(:project_group_link, project: create(:project), group: group1) @@ -1227,7 +1203,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group1.reload end - it "returns projects including those in subgroups" do + it "returns projects including those in subgroups", :aggregate_failures do get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true } expect(response).to have_gitlab_http_status(:ok) @@ -1236,7 +1212,10 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.length).to eq(6) end - it 'avoids N+1 queries', :use_sql_query_cache, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/383788' do + it 'avoids N+1 queries', :aggregate_failures, :use_sql_query_cache, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/383788' do + get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true } + expect(respone).to have_gitlab_http_status(:ok) + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true } end @@ -1250,7 +1229,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when include_ancestor_groups is true' do - it 'returns ancestors groups projects' do + it 'returns ancestors groups projects', :aggregate_failures do subgroup = create(:group, parent: group1) subgroup_project = create(:project, group: subgroup) @@ -1275,7 +1254,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response).to have_gitlab_http_status(:not_found) end - it "only returns projects to which user has access" do + it "only returns projects to which user has access", :aggregate_failures do project3.add_developer(user3) get api("/groups/#{group1.id}/projects", user3) @@ -1286,7 +1265,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['name']).to eq(project3.name) end - it 'only returns the projects owned by user' do + it 'only returns the projects owned by user', :aggregate_failures do project2.group.add_owner(user3) get api("/groups/#{project2.group.id}/projects", user3), params: { owned: true } @@ -1296,7 +1275,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['name']).to eq(project2.name) end - it 'only returns the projects starred by user' do + it 'only returns the projects starred by user', :aggregate_failures do user1.starred_projects = [project1] get api("/groups/#{group1.id}/projects", user1), params: { starred: true } @@ -1306,8 +1285,9 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['name']).to eq(project1.name) end - it 'avoids N+1 queries' do + it 'avoids N+1 queries', :aggregate_failures do get api("/groups/#{group1.id}/projects", user1) + expect(response).to have_gitlab_http_status(:ok) control_count = ActiveRecord::QueryRecorder.new do get api("/groups/#{group1.id}/projects", user1) @@ -1322,8 +1302,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as admin" do - it "returns any existing group" do - get api("/groups/#{group2.id}/projects", admin) + it "returns any existing group", :aggregate_failures do + get api("/groups/#{group2.id}/projects", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1332,15 +1312,15 @@ RSpec.describe API::Groups, feature_category: :subgroups do end it "does not return a non existing group" do - get api("/groups/#{non_existing_record_id}/projects", admin) + get api("/groups/#{non_existing_record_id}/projects", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end end context 'when using group path in URL' do - it 'returns any existing group' do - get api("/groups/#{group1.path}/projects", admin) + it 'returns any existing group', :aggregate_failures do + get api("/groups/#{group1.path}/projects", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1349,7 +1329,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end it 'does not return a non existing group' do - get api('/groups/unknown/projects', admin) + get api('/groups/unknown/projects', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -1375,7 +1355,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as user' do - it 'returns the shared projects in the group' do + it 'returns the shared projects in the group', :aggregate_failures do get api(path, user1) expect(response).to have_gitlab_http_status(:ok) @@ -1386,7 +1366,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['visibility']).to be_present end - it 'returns shared projects with min access level or higher' do + it 'returns shared projects with min access level or higher', :aggregate_failures do user = create(:user) project2.add_guest(user) @@ -1399,7 +1379,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['id']).to eq(project4.id) end - it 'returns the shared projects of the group with simple representation' do + it 'returns the shared projects of the group with simple representation', :aggregate_failures do get api(path, user1), params: { simple: true } expect(response).to have_gitlab_http_status(:ok) @@ -1410,7 +1390,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['visibility']).not_to be_present end - it 'filters the shared projects in the group based on visibility' do + it 'filters the shared projects in the group based on visibility', :aggregate_failures do internal_project = create(:project, :internal, namespace: create(:group)) create(:project_group_link, project: internal_project, group: group1) @@ -1424,7 +1404,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['id']).to eq(internal_project.id) end - it 'filters the shared projects in the group based on search params' do + it 'filters the shared projects in the group based on search params', :aggregate_failures do get api(path, user1), params: { search: 'test_project' } expect(response).to have_gitlab_http_status(:ok) @@ -1434,7 +1414,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['id']).to eq(project4.id) end - it 'does not return the projects owned by the group' do + it 'does not return the projects owned by the group', :aggregate_failures do get api(path, user1) expect(response).to have_gitlab_http_status(:ok) @@ -1459,7 +1439,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response).to have_gitlab_http_status(:not_found) end - it 'only returns shared projects to which user has access' do + it 'only returns shared projects to which user has access', :aggregate_failures do project4.add_developer(user3) get api(path, user3) @@ -1470,7 +1450,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response.first['id']).to eq(project4.id) end - it 'only returns the projects starred by user' do + it 'only returns the projects starred by user', :aggregate_failures do user1.starred_projects = [project2] get api(path, user1), params: { starred: true } @@ -1482,9 +1462,9 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as admin" do - subject { get api(path, admin) } + subject { get api(path, admin, admin_mode: true) } - it "returns shared projects of an existing group" do + it "returns shared projects of an existing group", :aggregate_failures do subject expect(response).to have_gitlab_http_status(:ok) @@ -1504,7 +1484,10 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it 'avoids N+1 queries' do + it 'avoids N+1 queries', :aggregate_failures, :use_sql_query_cache do + subject + expect(response).to have_gitlab_http_status(:ok) + control_count = ActiveRecord::QueryRecorder.new do subject end.count @@ -1520,8 +1503,8 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when using group path in URL' do let(:path) { "/groups/#{group1.path}/projects/shared" } - it 'returns the right details' do - get api(path, admin) + it 'returns the right details', :aggregate_failures do + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1531,7 +1514,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end it 'returns 404 for a non-existent group' do - get api('/groups/unknown/projects/shared', admin) + get api('/groups/unknown/projects/shared', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -1544,7 +1527,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do let!(:subgroup3) { create(:group, :private, parent: group2) } context 'when unauthenticated' do - it 'returns only public subgroups' do + it 'returns only public subgroups', :aggregate_failures do get api("/groups/#{group1.id}/subgroups") expect(response).to have_gitlab_http_status(:ok) @@ -1562,7 +1545,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when statistics are requested' do - it 'does not include statistics' do + it 'does not include statistics', :aggregate_failures do get api("/groups/#{group1.id}/subgroups"), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) @@ -1575,7 +1558,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when authenticated as user' do context 'when user is not member of a public group' do - it 'returns no subgroups for the public group' do + it 'returns no subgroups for the public group', :aggregate_failures do get api("/groups/#{group1.id}/subgroups", user2) expect(response).to have_gitlab_http_status(:ok) @@ -1584,7 +1567,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when using all_available in request' do - it 'returns public subgroups' do + it 'returns public subgroups', :aggregate_failures do get api("/groups/#{group1.id}/subgroups", user2), params: { all_available: true } expect(response).to have_gitlab_http_status(:ok) @@ -1609,7 +1592,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group1.add_guest(user2) end - it 'returns private subgroups' do + it 'returns private subgroups', :aggregate_failures do get api("/groups/#{group1.id}/subgroups", user2) expect(response).to have_gitlab_http_status(:ok) @@ -1623,7 +1606,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when using statistics in request' do - it 'does not include statistics' do + it 'does not include statistics', :aggregate_failures do get api("/groups/#{group1.id}/subgroups", user2), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) @@ -1638,7 +1621,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group2.add_guest(user1) end - it 'returns subgroups' do + it 'returns subgroups', :aggregate_failures do get api("/groups/#{group2.id}/subgroups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1651,32 +1634,32 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as admin' do - it 'returns private subgroups of a public group' do - get api("/groups/#{group1.id}/subgroups", admin) + it 'returns private subgroups of a public group', :aggregate_failures do + get api("/groups/#{group1.id}/subgroups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.length).to eq(2) end - it 'returns subgroups of a private group' do - get api("/groups/#{group2.id}/subgroups", admin) + it 'returns subgroups of a private group', :aggregate_failures do + get api("/groups/#{group2.id}/subgroups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.length).to eq(1) end - it 'does not include statistics by default' do - get api("/groups/#{group1.id}/subgroups", admin) + it 'does not include statistics by default', :aggregate_failures do + get api("/groups/#{group1.id}/subgroups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.first).not_to include('statistics') end - it 'includes statistics if requested' do - get api("/groups/#{group1.id}/subgroups", admin), params: { statistics: true } + it 'includes statistics if requested', :aggregate_failures do + get api("/groups/#{group1.id}/subgroups", admin, admin_mode: true), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -1700,7 +1683,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do let(:response_groups) { json_response.map { |group| group['name'] } } context 'when unauthenticated' do - it 'returns only public descendants' do + it 'returns only public descendants', :aggregate_failures do get api("/groups/#{group1.id}/descendant_groups") expect(response).to have_gitlab_http_status(:ok) @@ -1719,7 +1702,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when authenticated as user' do context 'when user is not member of a public group' do - it 'returns no descendants for the public group' do + it 'returns no descendants for the public group', :aggregate_failures do get api("/groups/#{group1.id}/descendant_groups", user2) expect(response).to have_gitlab_http_status(:ok) @@ -1728,7 +1711,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when using all_available in request' do - it 'returns public descendants' do + it 'returns public descendants', :aggregate_failures do get api("/groups/#{group1.id}/descendant_groups", user2), params: { all_available: true } expect(response).to have_gitlab_http_status(:ok) @@ -1752,7 +1735,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group1.add_guest(user2) end - it 'returns private descendants' do + it 'returns private descendants', :aggregate_failures do get api("/groups/#{group1.id}/descendant_groups", user2) expect(response).to have_gitlab_http_status(:ok) @@ -1763,7 +1746,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when using statistics in request' do - it 'does not include statistics' do + it 'does not include statistics', :aggregate_failures do get api("/groups/#{group1.id}/descendant_groups", user2), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) @@ -1778,7 +1761,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do group2.add_guest(user1) end - it 'returns descendants' do + it 'returns descendants', :aggregate_failures do get api("/groups/#{group2.id}/descendant_groups", user1) expect(response).to have_gitlab_http_status(:ok) @@ -1790,32 +1773,32 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when authenticated as admin' do - it 'returns private descendants of a public group' do - get api("/groups/#{group1.id}/descendant_groups", admin) + it 'returns private descendants of a public group', :aggregate_failures do + get api("/groups/#{group1.id}/descendant_groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.length).to eq(3) end - it 'returns descendants of a private group' do - get api("/groups/#{group2.id}/descendant_groups", admin) + it 'returns descendants of a private group', :aggregate_failures do + get api("/groups/#{group2.id}/descendant_groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.length).to eq(2) end - it 'does not include statistics by default' do - get api("/groups/#{group1.id}/descendant_groups", admin) + it 'does not include statistics by default', :aggregate_failures do + get api("/groups/#{group1.id}/descendant_groups", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array expect(json_response.first).not_to include('statistics') end - it 'includes statistics if requested' do - get api("/groups/#{group1.id}/descendant_groups", admin), params: { statistics: true } + it 'includes statistics if requested', :aggregate_failures do + get api("/groups/#{group1.id}/descendant_groups", admin, admin_mode: true), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -1880,7 +1863,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context "when authenticated as user with group permissions" do - it "creates group" do + it "creates group", :aggregate_failures do group = attributes_for_group_api request_access_enabled: false post api("/groups", user3), params: group @@ -1893,7 +1876,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(json_response["visibility"]).to eq(Gitlab::VisibilityLevel.string_level(Gitlab::CurrentSettings.current_application_settings.default_group_visibility)) end - it "creates a nested group" do + it "creates a nested group", :aggregate_failures do parent = create(:group) parent.add_owner(user3) group = attributes_for_group_api parent_id: parent.id @@ -1926,7 +1909,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do subject { post api("/groups", user3), params: params } context 'for users who have the ability to create a group with `default_branch_protection`' do - it 'creates group with the specified branch protection level' do + it 'creates group with the specified branch protection level', :aggregate_failures do subject expect(response).to have_gitlab_http_status(:created) @@ -1935,7 +1918,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'for users who do not have the ability to create a group with `default_branch_protection`' do - it 'does not create the group with the specified branch protection level' do + it 'does not create the group with the specified branch protection level', :aggregate_failures do allow(Ability).to receive(:allowed?).and_call_original allow(Ability).to receive(:allowed?).with(user3, :create_group_with_default_branch_protection) { false } @@ -1947,7 +1930,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it "does not create group, duplicate" do + it "does not create group, duplicate", :aggregate_failures do post api("/groups", user3), params: { name: 'Duplicate Test', path: group2.path } expect(response).to have_gitlab_http_status(:bad_request) @@ -2007,13 +1990,13 @@ RSpec.describe API::Groups, feature_category: :subgroups do context "when authenticated as admin" do it "removes any existing group" do - delete api("/groups/#{group2.id}", admin) + delete api("/groups/#{group2.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:accepted) end it "does not remove a non existing group" do - delete api("/groups/#{non_existing_record_id}", admin) + delete api("/groups/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -2040,7 +2023,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context "when authenticated as admin" do it "transfers project to group" do - post api("/groups/#{group1.id}/projects/#{project.id}", admin) + post api("/groups/#{group1.id}/projects/#{project.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) end @@ -2048,7 +2031,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when using project path in URL' do context 'with a valid project path' do it "transfers project to group" do - post api("/groups/#{group1.id}/projects/#{project_path}", admin) + post api("/groups/#{group1.id}/projects/#{project_path}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) end @@ -2056,7 +2039,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'with a non-existent project path' do it "does not transfer project to group" do - post api("/groups/#{group1.id}/projects/nogroup%2Fnoproject", admin) + post api("/groups/#{group1.id}/projects/nogroup%2Fnoproject", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -2066,7 +2049,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when using a group path in URL' do context 'with a valid group path' do it "transfers project to group" do - post api("/groups/#{group1.path}/projects/#{project_path}", admin) + post api("/groups/#{group1.path}/projects/#{project_path}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) end @@ -2074,7 +2057,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'with a non-existent group path' do it "does not transfer project to group" do - post api("/groups/noexist/projects/#{project_path}", admin) + post api("/groups/noexist/projects/#{project_path}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -2183,7 +2166,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do context 'when promoting a subgroup to a root group' do shared_examples_for 'promotes the subgroup to a root group' do - it 'returns success' do + it 'returns success', :aggregate_failures do make_request(user) expect(response).to have_gitlab_http_status(:created) @@ -2207,7 +2190,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do let(:group) { create(:group) } let(:params) { { group_id: '' } } - it 'returns error' do + it 'returns error', :aggregate_failures do make_request(user) expect(response).to have_gitlab_http_status(:bad_request) @@ -2258,7 +2241,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end end - it 'returns error' do + it 'returns error', :aggregate_failures do make_request(user) expect(response).to have_gitlab_http_status(:bad_request) @@ -2267,7 +2250,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do end context 'when the transfer succceds' do - it 'returns success' do + it 'returns success', :aggregate_failures do make_request(user) expect(response).to have_gitlab_http_status(:created) @@ -2289,11 +2272,13 @@ RSpec.describe API::Groups, feature_category: :subgroups do describe "POST /groups/:id/share" do shared_examples 'shares group with group' do - it "shares group with group" do + let_it_be(:admin_mode) { false } + + it "shares group with group", :aggregate_failures do expires_at = 10.days.from_now.to_date expect do - post api("/groups/#{group.id}/share", user), params: { group_id: shared_with_group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at } + post api("/groups/#{group.id}/share", user, admin_mode: admin_mode), params: { group_id: shared_with_group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at } end.to change { group.shared_with_group_links.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -2322,7 +2307,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do expect(response).to have_gitlab_http_status(:not_found) end - it "returns a 400 error when wrong params passed" do + it "returns a 400 error when wrong params passed", :aggregate_failures do post api("/groups/#{group.id}/share", user), params: { group_id: shared_with_group.id, group_access: non_existing_record_access_level } expect(response).to have_gitlab_http_status(:bad_request) @@ -2375,15 +2360,18 @@ RSpec.describe API::Groups, feature_category: :subgroups do let(:user) { admin } let(:group) { create(:group) } let(:shared_with_group) { create(:group) } + let(:admin_mode) { true } end end end describe 'DELETE /groups/:id/share/:group_id' do shared_examples 'deletes group share' do - it 'deletes a group share' do + let_it_be(:admin_mode) { false } + + it 'deletes a group share', :aggregate_failures do expect do - delete api("/groups/#{shared_group.id}/share/#{shared_with_group.id}", user) + delete api("/groups/#{shared_group.id}/share/#{shared_with_group.id}", user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:no_content) expect(shared_group.shared_with_group_links).to be_empty @@ -2432,7 +2420,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do create(:group_group_link, shared_group: group1, shared_with_group: group_a) end - it 'does not remove group share' do + it 'does not remove group share', :aggregate_failures do expect do delete api("/groups/#{group1.id}/share/#{group_a.id}", user4) @@ -2452,6 +2440,7 @@ RSpec.describe API::Groups, feature_category: :subgroups do let(:user) { admin } let(:shared_group) { group2 } let(:shared_with_group) { group_b } + let(:admin_mode) { true } end end end diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb index 0d75bb94144..9b5ae72526c 100644 --- a/spec/requests/api/import_github_spec.rb +++ b/spec/requests/api/import_github_spec.rb @@ -174,72 +174,54 @@ RSpec.describe API::ImportGithub, feature_category: :importers do let_it_be(:user) { create(:user) } let(:params) { { personal_access_token: token } } - context 'when feature github_import_gists is enabled' do + context 'when gists import was started' do before do - stub_feature_flags(github_import_gists: true) + allow(Import::Github::GistsImportService) + .to receive(:new).with(user, client, access_params) + .and_return(double(execute: { status: :success })) end - context 'when gists import was started' do - before do - allow(Import::Github::GistsImportService) - .to receive(:new).with(user, client, access_params) - .and_return(double(execute: { status: :success })) - end - - it 'returns 202' do - post api('/import/github/gists', user), params: params + it 'returns 202' do + post api('/import/github/gists', user), params: params - expect(response).to have_gitlab_http_status(:accepted) - end + expect(response).to have_gitlab_http_status(:accepted) end + end - context 'when gists import is in progress' do - before do - allow(Import::Github::GistsImportService) - .to receive(:new).with(user, client, access_params) - .and_return(double(execute: { status: :error, message: 'Import already in progress', http_status: :unprocessable_entity })) - end - - it 'returns 422 error' do - post api('/import/github/gists', user), params: params - - expect(response).to have_gitlab_http_status(:unprocessable_entity) - expect(json_response['errors']).to eq('Import already in progress') - end + context 'when gists import is in progress' do + before do + allow(Import::Github::GistsImportService) + .to receive(:new).with(user, client, access_params) + .and_return(double(execute: { status: :error, message: 'Import already in progress', http_status: :unprocessable_entity })) end - context 'when unauthenticated user' do - it 'returns 403 error' do - post api('/import/github/gists'), params: params + it 'returns 422 error' do + post api('/import/github/gists', user), params: params - expect(response).to have_gitlab_http_status(:unauthorized) - end + expect(response).to have_gitlab_http_status(:unprocessable_entity) + expect(json_response['errors']).to eq('Import already in progress') end + end - context 'when rate limit reached' do - before do - allow(Import::Github::GistsImportService) - .to receive(:new).with(user, client, access_params) - .and_raise(Gitlab::GithubImport::RateLimitError) - end - - it 'returns 429 error' do - post api('/import/github/gists', user), params: params + context 'when unauthenticated user' do + it 'returns 403 error' do + post api('/import/github/gists'), params: params - expect(response).to have_gitlab_http_status(:too_many_requests) - end + expect(response).to have_gitlab_http_status(:unauthorized) end end - context 'when feature github_import_gists is disabled' do + context 'when rate limit reached' do before do - stub_feature_flags(github_import_gists: false) + allow(Import::Github::GistsImportService) + .to receive(:new).with(user, client, access_params) + .and_raise(Gitlab::GithubImport::RateLimitError) end - it 'returns 404 error' do + it 'returns 429 error' do post api('/import/github/gists', user), params: params - expect(response).to have_gitlab_http_status(:not_found) + expect(response).to have_gitlab_http_status(:too_many_requests) end end end diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb index c35b9bab0ec..de5cb81190f 100644 --- a/spec/requests/api/integrations_spec.rb +++ b/spec/requests/api/integrations_spec.rb @@ -62,7 +62,7 @@ RSpec.describe API::Integrations, feature_category: :integrations do datadog: %i[archive_trace_events], discord: %i[branches_to_be_notified notify_only_broken_pipelines], hangouts_chat: %i[notify_only_broken_pipelines], - jira: %i[issues_enabled project_key vulnerabilities_enabled vulnerabilities_issuetype], + jira: %i[issues_enabled project_key jira_issue_regex jira_issue_prefix vulnerabilities_enabled vulnerabilities_issuetype], mattermost: %i[deployment_channel labels_to_be_notified], mock_ci: %i[enable_ssl_verification], prometheus: %i[manual_configuration], diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb index 547b9071f94..6d7f2657e1e 100644 --- a/spec/requests/api/internal/kubernetes_spec.rb +++ b/spec/requests/api/internal/kubernetes_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Internal::Kubernetes, feature_category: :kubernetes_management do +RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_management do let(:jwt_auth_headers) do jwt_token = JWT.encode({ 'iss' => Gitlab::Kas::JWT_ISSUER }, Gitlab::Kas.secret, 'HS256') @@ -147,6 +147,14 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :kubernetes_manageme projects: [ { id: project.full_path, default_namespace: 'staging' } ] + }, + user_access: { + groups: [ + { id: group.full_path } + ], + projects: [ + { id: project.full_path } + ] } } end @@ -158,8 +166,10 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :kubernetes_manageme send_request(params: { agent_id: agent.id, agent_config: config }) expect(response).to have_gitlab_http_status(:no_content) - expect(agent.authorized_groups).to contain_exactly(group) - expect(agent.authorized_projects).to contain_exactly(project) + expect(agent.ci_access_authorized_groups).to contain_exactly(group) + expect(agent.ci_access_authorized_projects).to contain_exactly(project) + expect(agent.user_access_authorized_groups).to contain_exactly(group) + expect(agent.user_access_authorized_projects).to contain_exactly(project) end end diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb index 20fb9100ebb..a816b3870a1 100644 --- a/spec/requests/api/internal/pages_spec.rb +++ b/spec/requests/api/internal/pages_spec.rb @@ -117,7 +117,8 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do 'file_size' => deployment.size, 'file_count' => deployment.file_count }, - 'unique_domain' => nil + 'unique_host' => nil, + 'root_directory' => deployment.root_directory } ] ) @@ -206,7 +207,8 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do 'file_size' => deployment.size, 'file_count' => deployment.file_count }, - 'unique_domain' => 'unique-domain' + 'unique_host' => 'unique-domain.example.com', + 'root_directory' => 'public' } ] ) @@ -262,7 +264,8 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do 'file_size' => deployment.size, 'file_count' => deployment.file_count }, - 'unique_domain' => nil + 'unique_host' => nil, + 'root_directory' => 'public' } ] ) @@ -310,7 +313,8 @@ RSpec.describe API::Internal::Pages, feature_category: :pages do 'file_size' => deployment.size, 'file_count' => deployment.file_count }, - 'unique_domain' => nil + 'unique_host' => nil, + 'root_directory' => 'public' } ] ) diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb index 915b8fff75e..137fba66eaa 100644 --- a/spec/requests/api/issues/get_project_issues_spec.rb +++ b/spec/requests/api/issues/get_project_issues_spec.rb @@ -638,6 +638,12 @@ RSpec.describe API::Issues, feature_category: :team_planning do end describe 'GET /projects/:id/issues/:issue_iid' do + let(:path) { "/projects/#{project.id}/issues/#{confidential_issue.iid}" } + + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + context 'when unauthenticated' do it 'returns public issues' do get api("/projects/#{project.id}/issues/#{issue.iid}") @@ -727,19 +733,19 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'confidential issues' do it 'returns 404 for non project members' do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", non_member) + get api(path, non_member) expect(response).to have_gitlab_http_status(:not_found) end it 'returns 404 for project members with guest role' do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", guest) + get api(path, guest) expect(response).to have_gitlab_http_status(:not_found) end it 'returns confidential issue for project members', :aggregate_failures do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(confidential_issue.title) @@ -747,7 +753,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end it 'returns confidential issue for author', :aggregate_failures do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", author) + get api(path, author) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(confidential_issue.title) @@ -755,7 +761,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end it 'returns confidential issue for assignee', :aggregate_failures do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", assignee) + get api(path, assignee) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(confidential_issue.title) @@ -763,7 +769,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end it 'returns confidential issue for admin', :aggregate_failures do - get api("/projects/#{project.id}/issues/#{confidential_issue.iid}", admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(confidential_issue.title) @@ -890,6 +896,10 @@ RSpec.describe API::Issues, feature_category: :team_planning do describe 'GET /projects/:id/issues/:issue_iid/user_agent_detail' do let!(:user_agent_detail) { create(:user_agent_detail, subject: issue) } + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { "/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail" } + end + context 'when unauthenticated' do it 'returns unauthorized' do get api("/projects/#{project.id}/issues/#{issue.iid}/user_agent_detail") diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb index 33f49cefc69..15a89527677 100644 --- a/spec/requests/api/issues/issues_spec.rb +++ b/spec/requests/api/issues/issues_spec.rb @@ -90,9 +90,13 @@ RSpec.describe API::Issues, feature_category: :team_planning do end describe 'GET /issues/:id' do + let(:path) { "/issues/#{issue.id}" } + + it_behaves_like 'GET request permissions for admin mode' + context 'when unauthorized' do it 'returns unauthorized' do - get api("/issues/#{issue.id}") + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -101,7 +105,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'when authorized' do context 'as a normal user' do it 'returns forbidden' do - get api("/issues/#{issue.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:forbidden) end @@ -110,7 +114,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'as an admin' do context 'when issue exists' do it 'returns the issue', :aggregate_failures do - get api("/issues/#{issue.id}", admin, admin_mode: true) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response.dig('author', 'id')).to eq(issue.author.id) @@ -121,7 +125,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'when issue does not exist' do it 'returns 404' do - get api("/issues/0", admin, admin_mode: true) + get api("/issues/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -1169,6 +1173,11 @@ RSpec.describe API::Issues, feature_category: :team_planning do let(:entity) { issue } end + it_behaves_like 'PUT request permissions for admin mode' do + let(:path) { "/projects/#{project.id}/issues/#{issue.iid}" } + let(:params) { { labels: 'label1', updated_at: Time.new(2000, 1, 1) } } + end + describe 'updated_at param' do let(:fixed_time) { Time.new(2001, 1, 1) } let(:updated_at) { Time.new(2000, 1, 1) } diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb index a17c1389e83..5a15a0b6dad 100644 --- a/spec/requests/api/issues/post_projects_issues_spec.rb +++ b/spec/requests/api/issues/post_projects_issues_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Issues, feature_category: :team_planning do +RSpec.describe API::Issues, :aggregate_failures, feature_category: :team_planning do let_it_be(:user) { create(:user) } let_it_be(:project, reload: true) do create(:project, :public, creator_id: user.id, namespace: user.namespace) @@ -75,7 +75,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do describe 'POST /projects/:id/issues' do context 'support for deprecated assignee_id' do - it 'creates a new project issue', :aggregate_failures do + it 'creates a new project issue' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', assignee_id: user2.id } @@ -85,7 +85,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['assignees'].first['name']).to eq(user2.name) end - it 'creates a new project issue when assignee_id is empty', :aggregate_failures do + it 'creates a new project issue when assignee_id is empty' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', assignee_id: '' } @@ -96,7 +96,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'single assignee restrictions' do - it 'creates a new project issue with no more than one assignee', :aggregate_failures do + it 'creates a new project issue with no more than one assignee' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', assignee_ids: [user2.id, guest.id] } @@ -122,7 +122,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'an internal ID is provided' do context 'by an admin' do - it 'sets the internal ID on the new issue', :aggregate_failures do + it 'sets the internal ID on the new issue' do post api("/projects/#{project.id}/issues", admin, admin_mode: true), params: { title: 'new issue', iid: 9001 } @@ -132,7 +132,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by an owner' do - it 'sets the internal ID on the new issue', :aggregate_failures do + it 'sets the internal ID on the new issue' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', iid: 9001 } @@ -145,7 +145,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do let(:group) { create(:group) } let(:group_project) { create(:project, :public, namespace: group) } - it 'sets the internal ID on the new issue', :aggregate_failures do + it 'sets the internal ID on the new issue' do group.add_owner(user2) post api("/projects/#{group_project.id}/issues", user2), params: { title: 'new issue', iid: 9001 } @@ -156,7 +156,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by another user' do - it 'ignores the given internal ID', :aggregate_failures do + it 'ignores the given internal ID' do post api("/projects/#{project.id}/issues", user2), params: { title: 'new issue', iid: 9001 } @@ -166,7 +166,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when an issue with the same IID exists on database' do - it 'returns 409', :aggregate_failures do + it 'returns 409' do post api("/projects/#{project.id}/issues", admin, admin_mode: true), params: { title: 'new issue', iid: issue.iid } @@ -176,7 +176,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end end - it 'creates a new project issue', :aggregate_failures do + it 'creates a new project issue' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', labels: 'label, label2', weight: 3, assignee_ids: [user2.id] } @@ -189,7 +189,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['assignees'].first['name']).to eq(user2.name) end - it 'creates a new project issue with labels param as array', :aggregate_failures do + it 'creates a new project issue with labels param as array' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', labels: %w(label label2), weight: 3, assignee_ids: [user2.id] } @@ -202,7 +202,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['assignees'].first['name']).to eq(user2.name) end - it 'creates a new confidential project issue', :aggregate_failures do + it 'creates a new confidential project issue' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', confidential: true } @@ -211,7 +211,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['confidential']).to be_truthy end - it 'creates a new confidential project issue with a different param', :aggregate_failures do + it 'creates a new confidential project issue with a different param' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', confidential: 'y' } @@ -220,7 +220,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['confidential']).to be_truthy end - it 'creates a public issue when confidential param is false', :aggregate_failures do + it 'creates a public issue when confidential param is false' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', confidential: false } @@ -229,7 +229,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['confidential']).to be_falsy end - it 'creates a public issue when confidential param is invalid', :aggregate_failures do + it 'creates a public issue when confidential param is invalid' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', confidential: 'foo' } @@ -242,7 +242,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(response).to have_gitlab_http_status(:bad_request) end - it 'allows special label names', :aggregate_failures do + it 'allows special label names' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', @@ -256,7 +256,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['labels']).to include '&' end - it 'allows special label names with labels param as array', :aggregate_failures do + it 'allows special label names with labels param as array' do post api("/projects/#{project.id}/issues", user), params: { title: 'new issue', @@ -270,7 +270,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect(json_response['labels']).to include '&' end - it 'returns 400 if title is too long', :aggregate_failures do + it 'returns 400 if title is too long' do post api("/projects/#{project.id}/issues", user), params: { title: 'g' * 256 } expect(response).to have_gitlab_http_status(:bad_request) @@ -313,7 +313,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'with due date' do - it 'creates a new project issue', :aggregate_failures do + it 'creates a new project issue' do due_date = 2.weeks.from_now.strftime('%Y-%m-%d') post api("/projects/#{project.id}/issues", user), @@ -336,7 +336,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by an admin' do - it 'sets the creation time on the new issue', :aggregate_failures do + it 'sets the creation time on the new issue' do post api("/projects/#{project.id}/issues", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:created) @@ -346,7 +346,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by a project owner' do - it 'sets the creation time on the new issue', :aggregate_failures do + it 'sets the creation time on the new issue' do post api("/projects/#{project.id}/issues", user), params: params expect(response).to have_gitlab_http_status(:created) @@ -356,7 +356,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by a group owner' do - it 'sets the creation time on the new issue', :aggregate_failures do + it 'sets the creation time on the new issue' do group = create(:group) group_project = create(:project, :public, namespace: group) group.add_owner(user2) @@ -370,7 +370,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'by another user' do - it 'ignores the given creation time', :aggregate_failures do + it 'ignores the given creation time' do project.add_developer(user2) post api("/projects/#{project.id}/issues", user2), params: params @@ -397,7 +397,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when request exceeds the rate limit' do - it 'prevents users from creating more issues', :aggregate_failures do + it 'prevents users from creating more issues' do allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true) post api("/projects/#{project.id}/issues", user), @@ -437,7 +437,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect { post_issue }.not_to change(Issue, :count) end - it 'returns correct status and message', :aggregate_failures do + it 'returns correct status and message' do post_issue expect(response).to have_gitlab_http_status(:bad_request) @@ -475,9 +475,15 @@ RSpec.describe API::Issues, feature_category: :team_planning do describe '/projects/:id/issues/:issue_iid/move' do let!(:target_project) { create(:project, creator_id: user.id, namespace: user.namespace) } let!(:target_project2) { create(:project, creator_id: non_member.id, namespace: non_member.namespace) } + let(:path) { "/projects/#{project.id}/issues/#{issue.iid}/move" } - it 'moves an issue', :aggregate_failures do - post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { { to_project_id: target_project2.id } } + let(:failed_status_code) { 400 } + end + + it 'moves an issue' do + post api(path, user), params: { to_project_id: target_project.id } expect(response).to have_gitlab_http_status(:created) @@ -485,8 +491,8 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when source and target projects are the same' do - it 'returns 400 when trying to move an issue', :aggregate_failures do - post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), + it 'returns 400 when trying to move an issue' do + post api(path, user), params: { to_project_id: project.id } expect(response).to have_gitlab_http_status(:bad_request) @@ -495,8 +501,8 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when the user does not have the permission to move issues' do - it 'returns 400 when trying to move an issue', :aggregate_failures do - post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), + it 'returns 400 when trying to move an issue' do + post api(path, user), params: { to_project_id: target_project2.id } expect(response).to have_gitlab_http_status(:bad_request) @@ -504,8 +510,8 @@ RSpec.describe API::Issues, feature_category: :team_planning do end end - it 'moves the issue to another namespace if I am admin', :aggregate_failures do - post api("/projects/#{project.id}/issues/#{issue.iid}/move", admin, admin_mode: true), + it 'moves the issue to another namespace if I am admin' do + post api(path, admin, admin_mode: true), params: { to_project_id: target_project2.id } expect(response).to have_gitlab_http_status(:created) @@ -513,7 +519,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when using the issue ID instead of iid' do - it 'returns 404 when trying to move an issue', :aggregate_failures, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do + it 'returns 404 when trying to move an issue', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do post api("/projects/#{project.id}/issues/#{issue.id}/move", user), params: { to_project_id: target_project.id } @@ -523,7 +529,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when issue does not exist' do - it 'returns 404 when trying to move an issue', :aggregate_failures do + it 'returns 404 when trying to move an issue' do post api("/projects/#{project.id}/issues/123/move", user), params: { to_project_id: target_project.id } @@ -533,7 +539,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when source project does not exist' do - it 'returns 404 when trying to move an issue', :aggregate_failures do + it 'returns 404 when trying to move an issue' do post api("/projects/0/issues/#{issue.iid}/move", user), params: { to_project_id: target_project.id } @@ -544,7 +550,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'when target project does not exist' do it 'returns 404 when trying to move an issue' do - post api("/projects/#{project.id}/issues/#{issue.iid}/move", user), + post api(path, user), params: { to_project_id: 0 } expect(response).to have_gitlab_http_status(:not_found) @@ -562,7 +568,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do context 'when user can admin the issue' do context 'when the user can admin the target project' do - it 'clones the issue', :aggregate_failures do + it 'clones the issue' do expect do post_clone_issue(user, issue, valid_target_project) end.to change { valid_target_project.issues.count }.by(1) @@ -577,7 +583,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when target project is the same source project' do - it 'clones the issue', :aggregate_failures do + it 'clones the issue' do expect do post_clone_issue(user, issue, issue.project) end.to change { issue.reset.project.issues.count }.by(1) @@ -595,7 +601,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when the user does not have the permission to clone issues' do - it 'returns 400', :aggregate_failures do + it 'returns 400' do post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user), params: { to_project_id: invalid_target_project.id } @@ -605,7 +611,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when using the issue ID instead of iid' do - it 'returns 404', :aggregate_failures, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do + it 'returns 404', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341520' do post api("/projects/#{project.id}/issues/#{issue.id}/clone", user), params: { to_project_id: valid_target_project.id } @@ -615,7 +621,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when issue does not exist' do - it 'returns 404', :aggregate_failures do + it 'returns 404' do post api("/projects/#{project.id}/issues/12300/clone", user), params: { to_project_id: valid_target_project.id } @@ -625,7 +631,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when source project does not exist' do - it 'returns 404', :aggregate_failures do + it 'returns 404' do post api("/projects/0/issues/#{issue.iid}/clone", user), params: { to_project_id: valid_target_project.id } @@ -635,7 +641,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end context 'when target project does not exist' do - it 'returns 404', :aggregate_failures do + it 'returns 404' do post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user), params: { to_project_id: 0 } @@ -644,7 +650,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end end - it 'clones the issue with notes when with_notes is true', :aggregate_failures do + it 'clones the issue with notes when with_notes is true' do expect do post api("/projects/#{project.id}/issues/#{issue.iid}/clone", user), params: { to_project_id: valid_target_project.id, with_notes: true } @@ -661,7 +667,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end describe 'POST :id/issues/:issue_iid/subscribe' do - it 'subscribes to an issue', :aggregate_failures do + it 'subscribes to an issue' do post api("/projects/#{project.id}/issues/#{issue.iid}/subscribe", user2) expect(response).to have_gitlab_http_status(:created) @@ -694,7 +700,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end describe 'POST :id/issues/:issue_id/unsubscribe' do - it 'unsubscribes from an issue', :aggregate_failures do + it 'unsubscribes from an issue' do post api("/projects/#{project.id}/issues/#{issue.iid}/unsubscribe", user) expect(response).to have_gitlab_http_status(:created) diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb index 6cc639c0bcc..217788c519f 100644 --- a/spec/requests/api/issues/put_projects_issues_spec.rb +++ b/spec/requests/api/issues/put_projects_issues_spec.rb @@ -80,6 +80,11 @@ RSpec.describe API::Issues, feature_category: :team_planning do end describe 'PUT /projects/:id/issues/:issue_iid to update only title' do + it_behaves_like 'PUT request permissions for admin mode' do + let(:path) { "/projects/#{project.id}/issues/#{confidential_issue.iid}" } + let(:params) { { title: updated_title } } + end + it 'updates a project issue', :aggregate_failures do put api_for_user, params: { title: updated_title } @@ -88,7 +93,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do end it 'returns 404 error if issue iid not found' do - put api("/projects/#{project.id}/issues/44444", user), params: { title: updated_title } + put api("/projects/#{project.id}/issues/#{non_existing_record_id}", user), params: { title: updated_title } expect(response).to have_gitlab_http_status(:not_found) end diff --git a/spec/requests/api/keys_spec.rb b/spec/requests/api/keys_spec.rb index 0ca1a7d030f..3f600d24891 100644 --- a/spec/requests/api/keys_spec.rb +++ b/spec/requests/api/keys_spec.rb @@ -2,31 +2,35 @@ require 'spec_helper' -RSpec.describe API::Keys, feature_category: :system_access do +RSpec.describe API::Keys, :aggregate_failures, feature_category: :system_access do let_it_be(:user) { create(:user) } let_it_be(:admin) { create(:admin) } let_it_be(:email) { create(:email, user: user) } let_it_be(:key) { create(:rsa_key_4096, user: user, expires_at: 1.day.from_now) } let_it_be(:fingerprint_md5) { 'df:73:db:29:3c:a5:32:cf:09:17:7e:8e:9d:de:d7:f7' } + let_it_be(:path) { "/keys/#{key.id}" } describe 'GET /keys/:uid' do + it_behaves_like 'GET request permissions for admin mode' + context 'when unauthenticated' do it 'returns authentication error' do - get api("/keys/#{key.id}") + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end end context 'when authenticated' do it 'returns 404 for non-existing key' do - get api('/keys/0', admin) + get api('/keys/0', admin, admin_mode: true) + expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Not found') end it 'returns single ssh key with user information' do - get api("/keys/#{key.id}", admin) - expect(response).to have_gitlab_http_status(:ok) + get api(path, admin, admin_mode: true) + expect(json_response['title']).to eq(key.title) expect(Time.parse(json_response['expires_at'])).to be_like_time(key.expires_at) expect(json_response['user']['id']).to eq(user.id) @@ -34,7 +38,7 @@ RSpec.describe API::Keys, feature_category: :system_access do end it "does not include the user's `is_admin` flag" do - get api("/keys/#{key.id}", admin) + get api(path, admin, admin_mode: true) expect(json_response['user']['is_admin']).to be_nil end @@ -42,31 +46,28 @@ RSpec.describe API::Keys, feature_category: :system_access do end describe 'GET /keys?fingerprint=' do - it 'returns authentication error' do - get api("/keys?fingerprint=#{fingerprint_md5}") + let_it_be(:path) { "/keys?fingerprint=#{fingerprint_md5}" } - expect(response).to have_gitlab_http_status(:unauthorized) - end + it_behaves_like 'GET request permissions for admin mode' - it 'returns authentication error when authenticated as user' do - get api("/keys?fingerprint=#{fingerprint_md5}", user) + it 'returns authentication error' do + get api(path, admin_mode: true) - expect(response).to have_gitlab_http_status(:forbidden) + expect(response).to have_gitlab_http_status(:unauthorized) end context 'when authenticated as admin' do context 'MD5 fingerprint' do it 'returns 404 for non-existing SSH md5 fingerprint' do - get api("/keys?fingerprint=11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11", admin) + get api("/keys?fingerprint=11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Key Not Found') end it 'returns user if SSH md5 fingerprint found' do - get api("/keys?fingerprint=#{fingerprint_md5}", admin) + get api(path, admin, admin_mode: true) - expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(key.title) expect(json_response['user']['id']).to eq(user.id) expect(json_response['user']['username']).to eq(user.username) @@ -74,14 +75,14 @@ RSpec.describe API::Keys, feature_category: :system_access do context 'with FIPS mode', :fips_mode do it 'returns 404 for non-existing SSH md5 fingerprint' do - get api("/keys?fingerprint=11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11", admin) + get api("/keys?fingerprint=11:11:11:11:11:11:11:11:11:11:11:11:11:11:11:11", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']).to eq('Failed to return the key') end it 'returns 404 for existing SSH md5 fingerprint' do - get api("/keys?fingerprint=#{fingerprint_md5}", admin) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']).to eq('Failed to return the key') @@ -90,14 +91,14 @@ RSpec.describe API::Keys, feature_category: :system_access do end it 'returns 404 for non-existing SSH sha256 fingerprint' do - get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo1lCg")}", admin) + get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo1lCg")}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Key Not Found') end it 'returns user if SSH sha256 fingerprint found' do - get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + key.fingerprint_sha256)}", admin) + get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + key.fingerprint_sha256)}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(key.title) @@ -106,7 +107,7 @@ RSpec.describe API::Keys, feature_category: :system_access do end it 'returns user if SSH sha256 fingerprint found' do - get api("/keys?fingerprint=#{URI.encode_www_form_component("sha256:" + key.fingerprint_sha256)}", admin) + get api("/keys?fingerprint=#{URI.encode_www_form_component("sha256:" + key.fingerprint_sha256)}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(key.title) @@ -115,7 +116,7 @@ RSpec.describe API::Keys, feature_category: :system_access do end it "does not include the user's `is_admin` flag" do - get api("/keys?fingerprint=#{URI.encode_www_form_component("sha256:" + key.fingerprint_sha256)}", admin) + get api("/keys?fingerprint=#{URI.encode_www_form_component("sha256:" + key.fingerprint_sha256)}", admin, admin_mode: true) expect(json_response['user']['is_admin']).to be_nil end @@ -136,7 +137,7 @@ RSpec.describe API::Keys, feature_category: :system_access do it 'returns user and projects if SSH sha256 fingerprint for DeployKey found' do user.keys << deploy_key - get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + deploy_key.fingerprint_sha256)}", admin) + get api("/keys?fingerprint=#{URI.encode_www_form_component("SHA256:" + deploy_key.fingerprint_sha256)}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['title']).to eq(deploy_key.title) diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb index 3f131862a41..bc535e7e38a 100644 --- a/spec/requests/api/lint_spec.rb +++ b/spec/requests/api/lint_spec.rb @@ -4,168 +4,139 @@ require 'spec_helper' RSpec.describe API::Lint, feature_category: :pipeline_composition do describe 'POST /ci/lint' do - context 'when signup settings are disabled' do - before do - Gitlab::CurrentSettings.signup_enabled = false - end + it 'responds with a 410' do + user = create(:user) - context 'when unauthenticated' do - it 'returns authentication error' do - post api('/ci/lint'), params: { content: 'content' } + post api('/ci/lint', user), params: { content: "test_job:\n script: ls" } - expect(response).to have_gitlab_http_status(:unauthorized) - end - end - - context 'when authenticated' do - let_it_be(:api_user) { create(:user) } - - it 'returns authorized' do - post api('/ci/lint', api_user), params: { content: 'content' } + expect(response).to have_gitlab_http_status(:gone) + end - expect(response).to have_gitlab_http_status(:ok) - end + context 'when ci_remove_post_lint is disabled' do + before do + stub_feature_flags(ci_remove_post_lint: false) end - context 'when authenticated as external user' do - let(:project) { create(:project) } - let(:api_user) { create(:user, :external) } - - context 'when reporter in a project' do - before do - project.add_reporter(api_user) - end + context 'when signup settings are disabled' do + before do + Gitlab::CurrentSettings.signup_enabled = false + end - it 'returns authorization failure' do - post api('/ci/lint', api_user), params: { content: 'content' } + context 'when unauthenticated' do + it 'returns authentication error' do + post api('/ci/lint'), params: { content: 'content' } expect(response).to have_gitlab_http_status(:unauthorized) end end - context 'when developer in a project' do - before do - project.add_developer(api_user) - end + context 'when authenticated' do + let_it_be(:api_user) { create(:user) } - it 'returns authorization success' do + it 'returns authorized' do post api('/ci/lint', api_user), params: { content: 'content' } expect(response).to have_gitlab_http_status(:ok) end end - end - end - context 'when signup is enabled and not limited' do - before do - Gitlab::CurrentSettings.signup_enabled = true - stub_application_setting(domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false) - end + context 'when authenticated as external user' do + let(:project) { create(:project) } + let(:api_user) { create(:user, :external) } - context 'when unauthenticated' do - it 'returns authorized success' do - post api('/ci/lint'), params: { content: 'content' } + context 'when reporter in a project' do + before do + project.add_reporter(api_user) + end - expect(response).to have_gitlab_http_status(:ok) - end - end + it 'returns authorization failure' do + post api('/ci/lint', api_user), params: { content: 'content' } - context 'when authenticated' do - let_it_be(:api_user) { create(:user) } + expect(response).to have_gitlab_http_status(:unauthorized) + end + end - it 'returns authentication success' do - post api('/ci/lint', api_user), params: { content: 'content' } + context 'when developer in a project' do + before do + project.add_developer(api_user) + end - expect(response).to have_gitlab_http_status(:ok) + it 'returns authorization success' do + post api('/ci/lint', api_user), params: { content: 'content' } + + expect(response).to have_gitlab_http_status(:ok) + end + end end end - end - context 'when limited signup is enabled' do - before do - stub_application_setting(domain_allowlist: ['www.gitlab.com']) - Gitlab::CurrentSettings.signup_enabled = true - end + context 'when signup is enabled and not limited' do + before do + Gitlab::CurrentSettings.signup_enabled = true + stub_application_setting(domain_allowlist: [], email_restrictions_enabled: false, require_admin_approval_after_user_signup: false) + end - context 'when unauthenticated' do - it 'returns unauthorized' do - post api('/ci/lint'), params: { content: 'content' } + context 'when unauthenticated' do + it 'returns authorized success' do + post api('/ci/lint'), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:unauthorized) + expect(response).to have_gitlab_http_status(:ok) + end end - end - context 'when authenticated' do - let_it_be(:api_user) { create(:user) } + context 'when authenticated' do + let_it_be(:api_user) { create(:user) } - it 'returns authentication success' do - post api('/ci/lint', api_user), params: { content: 'content' } + it 'returns authentication success' do + post api('/ci/lint', api_user), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:ok) + expect(response).to have_gitlab_http_status(:ok) + end end end - end - context 'when authenticated' do - let_it_be(:api_user) { create(:user) } - - context 'with valid .gitlab-ci.yml content' do - let(:yaml_content) do - File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + context 'when limited signup is enabled' do + before do + stub_application_setting(domain_allowlist: ['www.gitlab.com']) + Gitlab::CurrentSettings.signup_enabled = true end - it 'passes validation without warnings or errors' do - post api('/ci/lint', api_user), params: { content: yaml_content } + context 'when unauthenticated' do + it 'returns unauthorized' do + post api('/ci/lint'), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to be_an Hash - expect(json_response['status']).to eq('valid') - expect(json_response['warnings']).to match_array([]) - expect(json_response['errors']).to match_array([]) - expect(json_response['includes']).to eq([]) + expect(response).to have_gitlab_http_status(:unauthorized) + end end - it 'outputs expanded yaml content' do - post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true } - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to have_key('merged_yaml') - end + context 'when authenticated' do + let_it_be(:api_user) { create(:user) } - it 'outputs jobs' do - post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true } + it 'returns authentication success' do + post api('/ci/lint', api_user), params: { content: 'content' } - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to have_key('jobs') + expect(response).to have_gitlab_http_status(:ok) + end end end - context 'with valid .gitlab-ci.yml with warnings' do - let(:yaml_content) { { job: { script: 'ls', rules: [{ when: 'always' }] } }.to_yaml } - - it 'passes validation but returns warnings' do - post api('/ci/lint', api_user), params: { content: yaml_content } - - expect(response).to have_gitlab_http_status(:ok) - expect(json_response['status']).to eq('valid') - expect(json_response['warnings']).not_to be_empty - expect(json_response['errors']).to match_array([]) - end - end + context 'when authenticated' do + let_it_be(:api_user) { create(:user) } - context 'with an invalid .gitlab-ci.yml' do - context 'with invalid syntax' do - let(:yaml_content) { 'invalid content' } + context 'with valid .gitlab-ci.yml content' do + let(:yaml_content) do + File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) + end - it 'responds with errors about invalid syntax' do + it 'passes validation without warnings or errors' do post api('/ci/lint', api_user), params: { content: yaml_content } expect(response).to have_gitlab_http_status(:ok) - expect(json_response['status']).to eq('invalid') - expect(json_response['warnings']).to eq([]) - expect(json_response['errors']).to eq(['Invalid configuration format']) - expect(json_response['includes']).to eq(nil) + expect(json_response).to be_an Hash + expect(json_response['status']).to eq('valid') + expect(json_response['warnings']).to match_array([]) + expect(json_response['errors']).to match_array([]) + expect(json_response['includes']).to eq([]) end it 'outputs expanded yaml content' do @@ -183,41 +154,84 @@ RSpec.describe API::Lint, feature_category: :pipeline_composition do end end - context 'with invalid configuration' do - let(:yaml_content) { '{ image: "image:1.0", services: ["postgres"] }' } + context 'with valid .gitlab-ci.yml with warnings' do + let(:yaml_content) { { job: { script: 'ls', rules: [{ when: 'always' }] } }.to_yaml } - it 'responds with errors about invalid configuration' do + it 'passes validation but returns warnings' do post api('/ci/lint', api_user), params: { content: yaml_content } expect(response).to have_gitlab_http_status(:ok) - expect(json_response['status']).to eq('invalid') - expect(json_response['warnings']).to eq([]) - expect(json_response['errors']).to eq(['jobs config should contain at least one visible job']) - expect(json_response['includes']).to eq([]) + expect(json_response['status']).to eq('valid') + expect(json_response['warnings']).not_to be_empty + expect(json_response['errors']).to match_array([]) end + end - it 'outputs expanded yaml content' do - post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true } + context 'with an invalid .gitlab-ci.yml' do + context 'with invalid syntax' do + let(:yaml_content) { 'invalid content' } - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to have_key('merged_yaml') + it 'responds with errors about invalid syntax' do + post api('/ci/lint', api_user), params: { content: yaml_content } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['status']).to eq('invalid') + expect(json_response['warnings']).to eq([]) + expect(json_response['errors']).to eq(['Invalid configuration format']) + expect(json_response['includes']).to eq(nil) + end + + it 'outputs expanded yaml content' do + post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to have_key('merged_yaml') + end + + it 'outputs jobs' do + post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to have_key('jobs') + end end - it 'outputs jobs' do - post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true } + context 'with invalid configuration' do + let(:yaml_content) { '{ image: "image:1.0", services: ["postgres"] }' } - expect(response).to have_gitlab_http_status(:ok) - expect(json_response).to have_key('jobs') + it 'responds with errors about invalid configuration' do + post api('/ci/lint', api_user), params: { content: yaml_content } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['status']).to eq('invalid') + expect(json_response['warnings']).to eq([]) + expect(json_response['errors']).to eq(['jobs config should contain at least one visible job']) + expect(json_response['includes']).to eq([]) + end + + it 'outputs expanded yaml content' do + post api('/ci/lint', api_user), params: { content: yaml_content, include_merged_yaml: true } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to have_key('merged_yaml') + end + + it 'outputs jobs' do + post api('/ci/lint', api_user), params: { content: yaml_content, include_jobs: true } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response).to have_key('jobs') + end end end - end - context 'without the content parameter' do - it 'responds with validation error about missing content' do - post api('/ci/lint', api_user) + context 'without the content parameter' do + it 'responds with validation error about missing content' do + post api('/ci/lint', api_user) - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response['error']).to eq('content is missing') + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to eq('content is missing') + end end end end diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb index 7b850fed79c..b6eb14685b4 100644 --- a/spec/requests/api/maven_packages_spec.rb +++ b/spec/requests/api/maven_packages_spec.rb @@ -921,6 +921,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do end it 'creates package and stores package file' do + expect_use_primary + expect { upload_file_with_token(params: params) }.to change { project.packages.count }.by(1) .and change { Packages::Maven::Metadatum.count }.by(1) .and change { Packages::PackageFile.count }.by(1) @@ -1062,6 +1064,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do end it 'returns no content' do + expect_use_primary + upload expect(response).to have_gitlab_http_status(:no_content) @@ -1091,6 +1095,8 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do subject { upload_file_with_token(params: params, file_extension: 'jar.md5') } it 'returns an empty body' do + expect_use_primary + subject expect(response.body).to eq('') @@ -1105,6 +1111,52 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do end end end + + context 'reading fingerprints from UploadedFile instance' do + let(:file) { Packages::Package.last.package_files.with_format('%.jar').last } + + subject { upload_file_with_token(params: params) } + + before do + allow_next_instance_of(UploadedFile) do |uploaded_file| + allow(uploaded_file).to receive(:size).and_return(123) + allow(uploaded_file).to receive(:sha1).and_return('sha1') + allow(uploaded_file).to receive(:md5).and_return('md5') + end + end + + context 'when feature flag is enabled' do + it 'sets size, sha1 and md5 fingerprints from uploaded file' do + subject + + expect(file.size).to eq(123) + expect(file.file_sha1).to eq('sha1') + expect(file.file_md5).to eq('md5') + end + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(read_fingerprints_from_uploaded_file_in_maven_upload: false) + end + + it 'does not read fingerprints from uploaded file' do + subject + + expect(file.size).not_to eq(123) + expect(file.file_sha1).not_to eq('sha1') + expect(file.file_md5).not_to eq('md5') + end + end + end + + def expect_use_primary + lb_session = ::Gitlab::Database::LoadBalancing::Session.current + + expect(lb_session).to receive(:use_primary).and_call_original + + allow(::Gitlab::Database::LoadBalancing::Session).to receive(:current).and_return(lb_session) + end end def upload_file(params: {}, request_headers: headers, file_extension: 'jar', file_name: 'my-app-1.0-20180724.124855-1') diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 81815fdab62..d705234b616 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" -RSpec.describe API::MergeRequests, feature_category: :source_code_management do +RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :source_code_management do include ProjectForksHelper let_it_be(:base_time) { Time.now } @@ -50,6 +50,27 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do expect_successful_response_with_paginated_array end + context 'when merge request is unchecked' do + let(:check_service_class) { MergeRequests::MergeabilityCheckService } + let(:mr_entity) { json_response.find { |mr| mr['id'] == merge_request.id } } + let(:merge_request) { create(:merge_request, :simple, author: user, source_project: project, title: "Test") } + + before do + merge_request.mark_as_unchecked! + end + + context 'with merge status recheck projection' do + it 'does not enqueue a merge status recheck' do + expect(check_service_class).not_to receive(:new) + + get(api(endpoint_path), params: { with_merge_status_recheck: true }) + + expect_successful_response_with_paginated_array + expect(mr_entity['merge_status']).to eq('unchecked') + end + end + end + it_behaves_like 'issuable API rate-limited search' do let(:url) { endpoint_path } let(:issuable) { merge_request } @@ -85,28 +106,67 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do merge_request.mark_as_unchecked! end - context 'with merge status recheck projection' do - it 'checks mergeability asynchronously' do - expect_next_instances_of(check_service_class, (1..2)) do |service| - expect(service).not_to receive(:execute) - expect(service).to receive(:async_execute).and_call_original + context 'with a developer+ role' do + before do + project.add_developer(user2) + end + + context 'with merge status recheck projection' do + it 'checks mergeability asynchronously' do + expect_next_instances_of(check_service_class, (1..2)) do |service| + expect(service).not_to receive(:execute) + expect(service).to receive(:async_execute).and_call_original + end + + get(api(endpoint_path, user2), params: { with_merge_status_recheck: true }) + + expect_successful_response_with_paginated_array + expect(mr_entity['merge_status']).to eq('checking') end + end - get(api(endpoint_path, user), params: { with_merge_status_recheck: true }) + context 'without merge status recheck projection' do + it 'does not enqueue a merge status recheck' do + expect(check_service_class).not_to receive(:new) - expect_successful_response_with_paginated_array - expect(mr_entity['merge_status']).to eq('checking') + get api(endpoint_path, user2) + + expect_successful_response_with_paginated_array + expect(mr_entity['merge_status']).to eq('unchecked') + end end end - context 'without merge status recheck projection' do - it 'does not enqueue a merge status recheck' do - expect(check_service_class).not_to receive(:new) + context 'with a reporter role' do + context 'with merge status recheck projection' do + it 'does not enqueue a merge status recheck' do + expect(check_service_class).not_to receive(:new) - get api(endpoint_path, user) + get(api(endpoint_path, user2), params: { with_merge_status_recheck: true }) - expect_successful_response_with_paginated_array - expect(mr_entity['merge_status']).to eq('unchecked') + expect_successful_response_with_paginated_array + expect(mr_entity['merge_status']).to eq('unchecked') + end + end + + context 'when restrict_merge_status_recheck FF is disabled' do + before do + stub_feature_flags(restrict_merge_status_recheck: false) + end + + context 'with merge status recheck projection' do + it 'does enqueue a merge status recheck' do + expect_next_instances_of(check_service_class, (1..2)) do |service| + expect(service).not_to receive(:execute) + expect(service).to receive(:async_execute).and_call_original + end + + get(api(endpoint_path, user2), params: { with_merge_status_recheck: true }) + + expect_successful_response_with_paginated_array + expect(mr_entity['merge_status']).to eq('checking') + end + end end end end @@ -249,6 +309,35 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do expect(response).to match_response_schema('public_api/v4/merge_requests') end + context 'with approved param' do + let(:approved_mr) { create(:merge_request, target_project: project, source_project: project) } + + before do + create(:approval, merge_request: approved_mr) + end + + it 'returns only approved merge requests' do + path = endpoint_path + '?approved=yes' + + get api(path, user) + + expect_paginated_array_response([approved_mr.id]) + end + + it 'returns only non-approved merge requests' do + path = endpoint_path + '?approved=no' + + get api(path, user) + + expect_paginated_array_response([ + merge_request_merged.id, + merge_request_locked.id, + merge_request_closed.id, + merge_request.id + ]) + end + end + it 'returns an empty array if no issue matches milestone' do get api(endpoint_path, user), params: { milestone: '1.0.0' } @@ -494,7 +583,7 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do create(:label_link, label: label2, target: merge_request2) end - it 'returns merge requests without any of the labels given', :aggregate_failures do + it 'returns merge requests without any of the labels given' do get api(endpoint_path, user), params: { not: { labels: ["#{label.title}, #{label2.title}"] } } expect(response).to have_gitlab_http_status(:ok) @@ -505,7 +594,7 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do end end - it 'returns merge requests without any of the milestones given', :aggregate_failures do + it 'returns merge requests without any of the milestones given' do get api(endpoint_path, user), params: { not: { milestone: milestone.title } } expect(response).to have_gitlab_http_status(:ok) @@ -516,7 +605,7 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do end end - it 'returns merge requests without the author given', :aggregate_failures do + it 'returns merge requests without the author given' do get api(endpoint_path, user), params: { not: { author_id: user2.id } } expect(response).to have_gitlab_http_status(:ok) @@ -527,7 +616,7 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do end end - it 'returns merge requests without the assignee given', :aggregate_failures do + it 'returns merge requests without the assignee given' do get api(endpoint_path, user), params: { not: { assignee_id: user2.id } } expect(response).to have_gitlab_http_status(:ok) @@ -3448,8 +3537,13 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do end describe 'POST :id/merge_requests/:merge_request_iid/subscribe' do + it_behaves_like 'POST request permissions for admin mode' do + let(:path) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe" } + let(:params) { {} } + end + it 'subscribes to a merge request' do - post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe", admin) + post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/subscribe", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) expect(json_response['subscribed']).to eq(true) @@ -3492,7 +3586,7 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do end it 'returns 304 if not subscribed' do - post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/unsubscribe", admin) + post api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/unsubscribe", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_modified) end diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb index 7932dd29e4d..cefd5896158 100644 --- a/spec/requests/api/metrics/dashboard/annotations_spec.rb +++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb @@ -35,7 +35,7 @@ RSpec.describe API::Metrics::Dashboard::Annotations, feature_category: :metrics end context 'with invalid parameters' do - it 'returns error messsage' do + it 'returns error message' do post api(url, user), params: { dashboard_path: '', starting_at: nil, description: nil } expect(response).to have_gitlab_http_status(:bad_request) diff --git a/spec/requests/api/ml/mlflow_spec.rb b/spec/requests/api/ml/mlflow_spec.rb index fdf115f7e92..5c6289948cc 100644 --- a/spec/requests/api/ml/mlflow_spec.rb +++ b/spec/requests/api/ml/mlflow_spec.rb @@ -18,7 +18,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do let_it_be(:candidate) do create(:ml_candidates, :with_metrics_and_params, :with_metadata, - user: experiment.user, start_time: 1234, experiment: experiment) + user: experiment.user, start_time: 1234, experiment: experiment, project: project) end let_it_be(:tokens) do @@ -402,14 +402,14 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/runs/get' do let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/get" } - let(:default_params) { { 'run_id' => candidate.iid } } + let(:default_params) { { 'run_id' => candidate.eid } } it 'gets the run', :aggregate_failures do expected_properties = { 'experiment_id' => candidate.experiment.iid.to_s, 'user_id' => candidate.user.id.to_s, 'start_time' => candidate.start_time, - 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.id}/-/", + 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/", 'status' => "RUNNING", 'lifecycle_stage' => "active" } @@ -442,7 +442,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do end describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/update' do - let(:default_params) { { run_id: candidate.iid.to_s, status: 'FAILED', end_time: Time.now.to_i } } + let(:default_params) { { run_id: candidate.eid.to_s, status: 'FAILED', end_time: Time.now.to_i } } let(:request) { post api(route), params: params, headers: headers } let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/update" } @@ -452,7 +452,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do 'user_id' => candidate.user.id.to_s, 'start_time' => candidate.start_time, 'end_time' => params[:end_time], - 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.id}/-/", + 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_experiment_#{experiment.iid}/#{candidate.iid}/", 'status' => 'FAILED', 'lifecycle_stage' => 'active' } @@ -483,7 +483,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-metric' do let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-metric" } - let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 10.0, timestamp: Time.now.to_i } } + let(:default_params) { { run_id: candidate.eid.to_s, key: 'some_key', value: 10.0, timestamp: Time.now.to_i } } let(:request) { post api(route), params: params, headers: headers } it 'logs the metric', :aggregate_failures do @@ -504,7 +504,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-parameter' do let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-parameter" } - let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 'value' } } + let(:default_params) { { run_id: candidate.eid.to_s, key: 'some_key', value: 'value' } } let(:request) { post api(route), params: params, headers: headers } it 'logs the parameter', :aggregate_failures do @@ -531,7 +531,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/set-tag' do let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/set-tag" } - let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 'value' } } + let(:default_params) { { run_id: candidate.eid.to_s, key: 'some_key', value: 'value' } } let(:request) { post api(route), params: params, headers: headers } it 'logs the tag', :aggregate_failures do @@ -556,13 +556,13 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-batch' do let(:candidate2) do - create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment) + create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment, project: project) end let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-batch" } let(:default_params) do { - run_id: candidate2.iid.to_s, + run_id: candidate2.eid.to_s, metrics: [ { key: 'mae', value: 2.5, timestamp: 1552550804 }, { key: 'rmse', value: 2.7, timestamp: 1552550804 } diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb index 44574caf54a..f268a092034 100644 --- a/spec/requests/api/namespaces_spec.rb +++ b/spec/requests/api/namespaces_spec.rb @@ -2,25 +2,27 @@ require 'spec_helper' -RSpec.describe API::Namespaces, feature_category: :subgroups do +RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :subgroups do let_it_be(:admin) { create(:admin) } let_it_be(:user) { create(:user) } let_it_be(:group1) { create(:group, name: 'group.one') } let_it_be(:group2) { create(:group, :nested) } let_it_be(:project) { create(:project, namespace: group2, name: group2.name, path: group2.path) } let_it_be(:project_namespace) { project.project_namespace } + let_it_be(:path) { "/namespaces" } describe "GET /namespaces" do context "when unauthenticated" do it "returns authentication error" do - get api("/namespaces") + get api(path) + expect(response).to have_gitlab_http_status(:unauthorized) end end context "when authenticated as admin" do it "returns correct attributes" do - get api("/namespaces", admin) + get api(path, admin, admin_mode: true) group_kind_json_response = json_response.find { |resource| resource['kind'] == 'group' } user_kind_json_response = json_response.find { |resource| resource['kind'] == 'user' } @@ -34,7 +36,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it "admin: returns an array of all namespaces" do - get api("/namespaces", admin) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -44,7 +46,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it "admin: returns an array of matched namespaces" do - get api("/namespaces?search=#{group2.name}", admin) + get api("/namespaces?search=#{group2.name}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -59,7 +61,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do it "returns correct attributes when user can admin group" do group1.add_owner(user) - get api("/namespaces", user) + get api(path, user) owned_group_response = json_response.find { |resource| resource['id'] == group1.id } @@ -70,7 +72,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do it "returns correct attributes when user cannot admin group" do group1.add_guest(user) - get api("/namespaces", user) + get api(path, user) guest_group_response = json_response.find { |resource| resource['id'] == group1.id } @@ -78,7 +80,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it "user: returns an array of namespaces" do - get api("/namespaces", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -115,9 +117,19 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let_it_be(:user2) { create(:user) } - shared_examples 'can access namespace' do + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { "/namespaces/#{group2.id}" } + let(:failed_status_code) { :not_found } + end + + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { "/namespaces/#{user2.namespace.id}" } + let(:failed_status_code) { :not_found } + end + + shared_examples 'can access namespace' do |admin_mode: false| it 'returns namespace details' do - get api("/namespaces/#{namespace_id}", request_actor) + get api("#{path}/#{namespace_id}", request_actor, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) @@ -153,7 +165,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let(:namespace_id) { project_namespace.id } it 'returns not-found' do - get api("/namespaces/#{namespace_id}", request_actor) + get api("#{path}/#{namespace_id}", request_actor) expect(response).to have_gitlab_http_status(:not_found) end @@ -188,7 +200,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do context "when namespace doesn't exist" do it 'returns not-found' do - get api('/namespaces/0', request_actor) + get api("#{path}/0", request_actor) expect(response).to have_gitlab_http_status(:not_found) end @@ -197,13 +209,13 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do context 'when unauthenticated' do it 'returns authentication error' do - get api("/namespaces/#{group1.id}") + get api("#{path}/#{group1.id}") expect(response).to have_gitlab_http_status(:unauthorized) end it 'returns authentication error' do - get api("/namespaces/#{project_namespace.id}") + get api("#{path}/#{project_namespace.id}") expect(response).to have_gitlab_http_status(:unauthorized) end @@ -215,7 +227,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do context 'when requested namespace is not owned by user' do context 'when requesting group' do it 'returns not-found' do - get api("/namespaces/#{group2.id}", request_actor) + get api("#{path}/#{group2.id}", request_actor) expect(response).to have_gitlab_http_status(:not_found) end @@ -223,7 +235,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do context 'when requesting personal namespace' do it 'returns not-found' do - get api("/namespaces/#{user2.namespace.id}", request_actor) + get api("#{path}/#{user2.namespace.id}", request_actor) expect(response).to have_gitlab_http_status(:not_found) end @@ -243,14 +255,14 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let(:namespace_id) { group2.id } let(:requested_namespace) { group2 } - it_behaves_like 'can access namespace' + it_behaves_like 'can access namespace', admin_mode: true end context 'when requesting personal namespace' do let(:namespace_id) { user2.namespace.id } let(:requested_namespace) { user2.namespace } - it_behaves_like 'can access namespace' + it_behaves_like 'can access namespace', admin_mode: true end end @@ -269,7 +281,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do context 'when unauthenticated' do it 'returns authentication error' do - get api("/namespaces/#{namespace1.path}/exists") + get api("#{path}/#{namespace1.path}/exists") expect(response).to have_gitlab_http_status(:unauthorized) end @@ -278,7 +290,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let(:namespace_id) { project_namespace.id } it 'returns authentication error' do - get api("/namespaces/#{project_namespace.path}/exists"), params: { parent_id: group2.id } + get api("#{path}/#{project_namespace.path}/exists"), params: { parent_id: group2.id } expect(response).to have_gitlab_http_status(:unauthorized) end @@ -290,12 +302,12 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let(:current_user) { user } def request - get api("/namespaces/#{namespace1.path}/exists", current_user) + get api("#{path}/#{namespace1.path}/exists", current_user) end end it 'returns JSON indicating the namespace exists and a suggestion' do - get api("/namespaces/#{namespace1.path}/exists", user) + get api("#{path}/#{namespace1.path}/exists", user) expected_json = { exists: true, suggests: ["#{namespace1.path}1"] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -303,7 +315,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'supports dot in namespace path' do - get api("/namespaces/#{namespace_with_dot.path}/exists", user) + get api("#{path}/#{namespace_with_dot.path}/exists", user) expected_json = { exists: true, suggests: ["#{namespace_with_dot.path}1"] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -311,7 +323,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'returns JSON indicating the namespace does not exist without a suggestion' do - get api("/namespaces/non-existing-namespace/exists", user) + get api("#{path}/non-existing-namespace/exists", user) expected_json = { exists: false, suggests: [] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -319,7 +331,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'checks the existence of a namespace in case-insensitive manner' do - get api("/namespaces/#{namespace1.path.upcase}/exists", user) + get api("#{path}/#{namespace1.path.upcase}/exists", user) expected_json = { exists: true, suggests: ["#{namespace1.path.upcase}1"] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -327,7 +339,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'checks the existence within the parent namespace only' do - get api("/namespaces/#{namespace1sub.path}/exists", user), params: { parent_id: namespace1.id } + get api("#{path}/#{namespace1sub.path}/exists", user), params: { parent_id: namespace1.id } expected_json = { exists: true, suggests: ["#{namespace1sub.path}1"] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -335,7 +347,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'ignores nested namespaces when checking for top-level namespace' do - get api("/namespaces/#{namespace1sub.path}/exists", user) + get api("#{path}/#{namespace1sub.path}/exists", user) expected_json = { exists: false, suggests: [] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -349,7 +361,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do create(:group, name: 'mygroup', path: 'mygroup', parent: namespace1) - get api("/namespaces/mygroup/exists", user), params: { parent_id: namespace1.id } + get api("#{path}/mygroup/exists", user), params: { parent_id: namespace1.id } # if the paths of groups present in hierachies aren't ignored, the suggestion generated would have # been `mygroup3`, just because groups with path `mygroup1` and `mygroup2` exists somewhere else. @@ -361,7 +373,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'ignores top-level namespaces when checking with parent_id' do - get api("/namespaces/#{namespace1.path}/exists", user), params: { parent_id: namespace1.id } + get api("#{path}/#{namespace1.path}/exists", user), params: { parent_id: namespace1.id } expected_json = { exists: false, suggests: [] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -369,7 +381,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do end it 'ignores namespaces of other parent namespaces when checking with parent_id' do - get api("/namespaces/#{namespace2sub.path}/exists", user), params: { parent_id: namespace1.id } + get api("#{path}/#{namespace2sub.path}/exists", user), params: { parent_id: namespace1.id } expected_json = { exists: false, suggests: [] }.to_json expect(response).to have_gitlab_http_status(:ok) @@ -380,7 +392,7 @@ RSpec.describe API::Namespaces, feature_category: :subgroups do let(:namespace_id) { project_namespace.id } it 'returns JSON indicating the namespace does not exist without a suggestion' do - get api("/namespaces/#{project_namespace.path}/exists", user), params: { parent_id: group2.id } + get api("#{path}/#{project_namespace.path}/exists", user), params: { parent_id: group2.id } expected_json = { exists: false, suggests: [] }.to_json expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb index c0276e02eb7..d535629ea0d 100644 --- a/spec/requests/api/notes_spec.rb +++ b/spec/requests/api/notes_spec.rb @@ -70,7 +70,7 @@ RSpec.describe API::Notes, feature_category: :team_planning do describe "GET /projects/:id/noteable/:noteable_id/notes" do context "current user cannot view the notes" do - it "returns an empty array" do + it "returns an empty array", :aggregate_failures do get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes", user) expect(response).to have_gitlab_http_status(:ok) @@ -93,7 +93,7 @@ RSpec.describe API::Notes, feature_category: :team_planning do end context "current user can view the note" do - it "returns a non-empty array" do + it "returns a non-empty array", :aggregate_failures do get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes", private_user) expect(response).to have_gitlab_http_status(:ok) @@ -114,7 +114,7 @@ RSpec.describe API::Notes, feature_category: :team_planning do let(:test_url) { "/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes" } shared_examples 'a notes request' do - it 'is a note array response' do + it 'is a note array response', :aggregate_failures do expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers expect(json_response).to be_an Array @@ -164,7 +164,7 @@ RSpec.describe API::Notes, feature_category: :team_planning do it_behaves_like 'a notes request' - it "properly filters the returned notables" do + it "properly filters the returned notables", :aggregate_failures do expect(json_response.count).to eq(count) expect(json_response.first["system"]).to be system_notable end @@ -195,7 +195,7 @@ RSpec.describe API::Notes, feature_category: :team_planning do end context "current user can view the note" do - it "returns an issue note by id" do + it "returns an issue note by id", :aggregate_failures do get api("/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes/#{cross_reference_note.id}", private_user) expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb index 2f67e1e8eea..f621af5d968 100644 --- a/spec/requests/api/npm_project_packages_spec.rb +++ b/spec/requests/api/npm_project_packages_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do + include ExclusiveLeaseHelpers + include_context 'npm api setup' shared_examples 'accept get request on private project with access to package registry for everyone' do @@ -224,15 +226,7 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do context 'with access token' do it_behaves_like 'a package tracking event', 'API::NpmPackages', 'push_package' - it 'creates npm package with file' do - expect { subject } - .to change { project.packages.count }.by(1) - .and change { Packages::PackageFile.count }.by(1) - .and change { Packages::Tag.count }.by(1) - .and change { Packages::Npm::Metadatum.count }.by(1) - - expect(response).to have_gitlab_http_status(:ok) - end + it_behaves_like 'a successful package creation' end it 'creates npm package with file with job token' do @@ -368,12 +362,13 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do end end - context 'with a too large metadata structure' do - let(:package_name) { "@#{group.path}/my_package_name" } - let(:params) do - upload_params(package_name: package_name, package_version: '1.2.3').tap do |h| - h['versions']['1.2.3']['test'] = 'test' * 10000 - end + context 'when the lease to create a package is already taken' do + let(:version) { '1.0.1' } + let(:params) { upload_params(package_name: package_name, package_version: version) } + let(:lease_key) { "packages:npm:create_package_service:packages:#{project.id}_#{package_name}_#{version}" } + + before do + stub_exclusive_lease_taken(lease_key, timeout: Packages::Npm::CreatePackageService::DEFAULT_LEASE_TIMEOUT) end it_behaves_like 'not a package tracking event' @@ -383,7 +378,95 @@ RSpec.describe API::NpmProjectPackages, feature_category: :package_registry do .not_to change { project.packages.count } expect(response).to have_gitlab_http_status(:bad_request) - expect(response.body).to include('Validation failed: Package json structure is too large') + expect(response.body).to include('Could not obtain package lease.') + end + end + + context 'with a too large metadata structure' do + let(:package_name) { "@#{group.path}/my_package_name" } + + ::Packages::Npm::CreatePackageService::PACKAGE_JSON_NOT_ALLOWED_FIELDS.each do |field| + context "when a large value for #{field} is set" do + let(:params) do + upload_params(package_name: package_name, package_version: '1.2.3').tap do |h| + h['versions']['1.2.3'][field] = 'test' * 10000 + end + end + + it_behaves_like 'a successful package creation' + end + end + + context 'when the large field is not one of the ignored fields' do + let(:params) do + upload_params(package_name: package_name, package_version: '1.2.3').tap do |h| + h['versions']['1.2.3']['test'] = 'test' * 10000 + end + end + + it_behaves_like 'not a package tracking event' + + it 'returns an error' do + expect { upload_package_with_token } + .not_to change { project.packages.count } + + expect(response).to have_gitlab_http_status(:bad_request) + expect(response.body).to include('Validation failed: Package json structure is too large') + end + end + end + + context 'when the Npm-Command in headers is deprecate' do + let(:package_name) { "@#{group.path}/my_package_name" } + let(:headers) { build_token_auth_header(token.plaintext_token).merge('Npm-Command' => 'deprecate') } + let(:params) do + { + 'id' => project.id.to_s, + 'package_name' => package_name, + 'versions' => { + '1.0.1' => { + 'name' => package_name, + 'deprecated' => 'This version is deprecated' + }, + '1.0.2' => { + 'name' => package_name + } + } + } + end + + subject(:request) { put api("/projects/#{project.id}/packages/npm/#{package_name.sub('/', '%2f')}"), params: params, headers: headers } + + context 'when the user is not authorized to destroy the package' do + before do + project.add_developer(user) + end + + it 'does not call DeprecatePackageService' do + expect(::Packages::Npm::DeprecatePackageService).not_to receive(:new) + + request + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'when the user is authorized to destroy the package' do + before do + project.add_maintainer(user) + end + + it 'calls DeprecatePackageService with the correct arguments' do + expect(::Packages::Npm::DeprecatePackageService).to receive(:new).with(project, params) do + double.tap do |service| + expect(service).to receive(:execute).with(async: true) + end + end + + request + + expect(response).to have_gitlab_http_status(:ok) + end end end end diff --git a/spec/requests/api/pages/pages_spec.rb b/spec/requests/api/pages/pages_spec.rb index 0f6675799ad..aa1869eaa84 100644 --- a/spec/requests/api/pages/pages_spec.rb +++ b/spec/requests/api/pages/pages_spec.rb @@ -13,13 +13,23 @@ RSpec.describe API::Pages, feature_category: :pages do end describe 'DELETE /projects/:id/pages' do + let(:path) { "/projects/#{project.id}/pages" } + + it_behaves_like 'DELETE request permissions for admin mode' do + before do + allow(Gitlab.config.pages).to receive(:enabled).and_return(true) + end + + let(:succes_status_code) { :no_content } + end + context 'when Pages is disabled' do before do allow(Gitlab.config.pages).to receive(:enabled).and_return(false) end it_behaves_like '404 response' do - let(:request) { delete api("/projects/#{project.id}/pages", admin, admin_mode: true) } + let(:request) { delete api(path, admin, admin_mode: true) } end end @@ -30,13 +40,13 @@ RSpec.describe API::Pages, feature_category: :pages do context 'when Pages are deployed' do it 'returns 204' do - delete api("/projects/#{project.id}/pages", admin, admin_mode: true) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end it 'removes the pages' do - delete api("/projects/#{project.id}/pages", admin, admin_mode: true) + delete api(path, admin, admin_mode: true) expect(project.reload.pages_metadatum.deployed?).to be(false) end @@ -48,7 +58,7 @@ RSpec.describe API::Pages, feature_category: :pages do end it 'returns 204' do - delete api("/projects/#{project.id}/pages", admin, admin_mode: true) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb index ea83fa384af..9ca027c2edc 100644 --- a/spec/requests/api/pages_domains_spec.rb +++ b/spec/requests/api/pages_domains_spec.rb @@ -35,6 +35,10 @@ RSpec.describe API::PagesDomains, feature_category: :pages do end describe 'GET /pages/domains' do + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { '/pages/domains' } + end + context 'when pages is disabled' do before do allow(Gitlab.config.pages).to receive(:enabled).and_return(false) diff --git a/spec/requests/api/personal_access_tokens/self_information_spec.rb b/spec/requests/api/personal_access_tokens/self_information_spec.rb index 2a7af350054..3cfaaaf7d3f 100644 --- a/spec/requests/api/personal_access_tokens/self_information_spec.rb +++ b/spec/requests/api/personal_access_tokens/self_information_spec.rb @@ -12,7 +12,7 @@ RSpec.describe API::PersonalAccessTokens::SelfInformation, feature_category: :sy subject(:delete_token) { delete api(path, personal_access_token: token) } shared_examples 'revoking token succeeds' do - it 'revokes token' do + it 'revokes token', :aggregate_failures do delete_token expect(response).to have_gitlab_http_status(:no_content) @@ -72,7 +72,7 @@ RSpec.describe API::PersonalAccessTokens::SelfInformation, feature_category: :sy context "with a '#{scope}' scoped token" do let(:token) { create(:personal_access_token, scopes: [scope], user: current_user) } - it 'shows token info' do + it 'shows token info', :aggregate_failures do get api(path, personal_access_token: token) expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/requests/api/personal_access_tokens_spec.rb b/spec/requests/api/personal_access_tokens_spec.rb index cca94c7a012..487cbdacb03 100644 --- a/spec/requests/api/personal_access_tokens_spec.rb +++ b/spec/requests/api/personal_access_tokens_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::PersonalAccessTokens, feature_category: :system_access do +RSpec.describe API::PersonalAccessTokens, :aggregate_failures, feature_category: :system_access do let_it_be(:path) { '/personal_access_tokens' } describe 'GET /personal_access_tokens' do @@ -30,9 +30,13 @@ RSpec.describe API::PersonalAccessTokens, feature_category: :system_access do end end + # Since all user types pass the same test successfully, we can avoid using + # shared examples and test each user type separately for its expected + # returned value. + context 'logged in as an Administrator' do let_it_be(:current_user) { create(:admin) } - let_it_be(:current_users_token) { create(:personal_access_token, user: current_user) } + let_it_be(:current_users_token) { create(:personal_access_token, :admin_mode, user: current_user) } it 'returns all PATs by default' do get api(path, current_user) @@ -46,7 +50,7 @@ RSpec.describe API::PersonalAccessTokens, feature_category: :system_access do let_it_be(:token_impersonated) { create(:personal_access_token, impersonation: true, user: token.user) } it 'returns only PATs belonging to that user' do - get api(path, current_user), params: { user_id: token.user.id } + get api(path, current_user, admin_mode: true), params: { user_id: token.user.id } expect(response).to have_gitlab_http_status(:ok) expect(json_response.count).to eq(2) diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml index 60406f380a5..22a61adfae2 100644 --- a/spec/requests/api/project_attributes.yml +++ b/spec/requests/api/project_attributes.yml @@ -164,6 +164,8 @@ project_setting: - emails_enabled - pages_unique_domain_enabled - pages_unique_domain + - runner_registration_enabled + - product_analytics_instrumentation_key build_service_desk_setting: # service_desk_setting unexposed_attributes: diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb index 895192252da..c52948a4cb0 100644 --- a/spec/requests/api/project_clusters_spec.rb +++ b/spec/requests/api/project_clusters_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::ProjectClusters, feature_category: :kubernetes_management do +RSpec.describe API::ProjectClusters, feature_category: :deployment_management do include KubernetesHelpers let_it_be(:maintainer_user) { create(:user) } diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb index 096f0b73b4c..22d7ea36f6c 100644 --- a/spec/requests/api/project_export_spec.rb +++ b/spec/requests/api/project_export_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: :importers do +RSpec.describe API::ProjectExport, :aggregate_failures, :clean_gitlab_redis_cache, feature_category: :importers do let_it_be(:project) { create(:project) } let_it_be(:project_none) { create(:project) } let_it_be(:project_started) { create(:project) } @@ -45,21 +45,27 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end describe 'GET /projects/:project_id/export' do + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + shared_examples_for 'get project export status not found' do it_behaves_like '404 response' do - let(:request) { get api(path, user) } + subject(:request) { get api(path, user) } end end shared_examples_for 'get project export status denied' do it_behaves_like '403 response' do - let(:request) { get api(path, user) } + subject(:request) { get api(path, user) } end end shared_examples_for 'get project export status ok' do + let_it_be(:admin_mode) { false } + it 'is none' do - get api(path_none, user) + get api(path_none, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('public_api/v4/project/export_status') @@ -72,7 +78,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it 'returns status started' do - get api(path_started, user) + get api(path_started, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('public_api/v4/project/export_status') @@ -82,7 +88,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: context 'when project export has finished' do it 'returns status finished' do - get api(path_finished, user) + get api(path_finished, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('public_api/v4/project/export_status') @@ -96,7 +102,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it 'returns status regeneration_in_progress' do - get api(path_finished, user) + get api(path_finished, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('public_api/v4/project/export_status') @@ -106,14 +112,16 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it_behaves_like 'when project export is disabled' do - let(:request) { get api(path, admin) } + subject(:request) { get api(path, admin, admin_mode: true) } end context 'when project export is enabled' do context 'when user is an admin' do let(:user) { admin } - it_behaves_like 'get project export status ok' + it_behaves_like 'get project export status ok' do + let(:admin_mode) { true } + end end context 'when user is a maintainer' do @@ -159,29 +167,34 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end describe 'GET /projects/:project_id/export/download' do + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { download_path_finished } + let(:failed_status_code) { :not_found } + end + shared_examples_for 'get project export download not found' do it_behaves_like '404 response' do - let(:request) { get api(download_path, user) } + subject(:request) { get api(download_path, user) } end end shared_examples_for 'get project export download denied' do it_behaves_like '403 response' do - let(:request) { get api(download_path, user) } + subject(:request) { get api(download_path, user) } end end shared_examples_for 'get project export download' do it_behaves_like '404 response' do - let(:request) { get api(download_path_none, user) } + subject(:request) { get api(download_path_none, user, admin_mode: admin_mode) } end it_behaves_like '404 response' do - let(:request) { get api(download_path_started, user) } + subject(:request) { get api(download_path_started, user, admin_mode: admin_mode) } end it 'downloads' do - get api(download_path_finished, user) + get api(download_path_finished, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) end @@ -190,7 +203,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: shared_examples_for 'get project export upload after action' do context 'and is uploading' do it 'downloads' do - get api(download_path_export_action, user) + get api(download_path_export_action, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) end @@ -202,7 +215,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it 'returns 404' do - get api(download_path_export_action, user) + get api(download_path_export_action, user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('The project export file is not available yet') @@ -219,12 +232,14 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it_behaves_like '404 response' do - let(:request) { get api(download_path_export_action, user) } + subject(:request) { get api(download_path_export_action, user, admin_mode: admin_mode) } end end end shared_examples_for 'get project download by strategy' do + let_it_be(:admin_mode) { false } + context 'when upload strategy set' do it_behaves_like 'get project export upload after action' end @@ -235,17 +250,19 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it_behaves_like 'when project export is disabled' do - let(:request) { get api(download_path, admin) } + subject(:request) { get api(download_path, admin, admin_mode: true) } end context 'when project export is enabled' do context 'when user is an admin' do let(:user) { admin } - it_behaves_like 'get project download by strategy' + it_behaves_like 'get project download by strategy' do + let(:admin_mode) { true } + end context 'when rate limit is exceeded' do - let(:request) { get api(download_path, admin) } + subject(:request) { get api(download_path, admin, admin_mode: true) } before do allow_next_instance_of(Gitlab::ApplicationRateLimiter::BaseStrategy) do |strategy| @@ -271,7 +288,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: # simulate prior request to the same namespace, which increments the rate limit counter for that scope Gitlab::ApplicationRateLimiter.throttled?(:project_download_export, scope: [user, project_finished.namespace]) - get api(download_path_finished, user) + get api(download_path_finished, user, admin_mode: true) expect(response).to have_gitlab_http_status(:too_many_requests) end @@ -280,7 +297,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: Gitlab::ApplicationRateLimiter.throttled?(:project_download_export, scope: [user, create(:project, :with_export).namespace]) - get api(download_path_finished, user) + get api(download_path_finished, user, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) end end @@ -345,30 +362,41 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end describe 'POST /projects/:project_id/export' do + let(:admin_mode) { false } + let(:params) { {} } + + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { { 'upload[url]' => 'http://gitlab.com' } } + let(:failed_status_code) { :not_found } + let(:success_status_code) { :accepted } + end + + subject(:request) { post api(path, user, admin_mode: admin_mode), params: params } + shared_examples_for 'post project export start not found' do - it_behaves_like '404 response' do - let(:request) { post api(path, user) } - end + it_behaves_like '404 response' end shared_examples_for 'post project export start denied' do - it_behaves_like '403 response' do - let(:request) { post api(path, user) } - end + it_behaves_like '403 response' end shared_examples_for 'post project export start' do + let_it_be(:admin_mode) { false } + context 'with upload strategy' do context 'when params invalid' do it_behaves_like '400 response' do - let(:request) { post(api(path, user), params: { 'upload[url]' => 'whatever' }) } + let(:params) { { 'upload[url]' => 'whatever' } } end end it 'starts' do allow_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).to receive(:send_file) - post(api(path, user), params: { 'upload[url]' => 'http://gitlab.com' }) + request do + let(:params) { { 'upload[url]' => 'http://gitlab.com' } } + end expect(response).to have_gitlab_http_status(:accepted) end @@ -388,7 +416,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: it 'starts' do expect_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).not_to receive(:send_file) - post api(path, user) + request expect(response).to have_gitlab_http_status(:accepted) end @@ -396,20 +424,21 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: it 'removes previously exported archive file' do expect(project).to receive(:remove_exports).once - post api(path, user) + request end end end - it_behaves_like 'when project export is disabled' do - let(:request) { post api(path, admin) } - end + it_behaves_like 'when project export is disabled' context 'when project export is enabled' do context 'when user is an admin' do let(:user) { admin } + let(:admin_mode) { true } - it_behaves_like 'post project export start' + it_behaves_like 'post project export start' do + let(:admin_mode) { true } + end context 'with project export size limit' do before do @@ -417,7 +446,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it 'starts if limit not exceeded' do - post api(path, user) + request expect(response).to have_gitlab_http_status(:accepted) end @@ -425,7 +454,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: it '400 response if limit exceeded' do project.statistics.update!(lfs_objects_size: 2.megabytes, repository_size: 2.megabytes) - post api(path, user) + request expect(response).to have_gitlab_http_status(:bad_request) expect(json_response["message"]).to include('The project size exceeds the export limit.') @@ -441,7 +470,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it 'prevents requesting project export' do - post api(path, admin) + request expect(response).to have_gitlab_http_status(:too_many_requests) expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.') @@ -559,7 +588,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: let(:relation) { ::BulkImports::FileTransfer::ProjectConfig.new(project).skipped_relations.first } it_behaves_like '400 response' do - let(:request) { get api(download_path, user) } + subject(:request) { get api(download_path, user) } end end @@ -595,7 +624,7 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: describe 'POST /projects/:id/export_relations' do it_behaves_like '404 response' do - let(:request) { post api(path, user) } + subject(:request) { post api(path, user) } end end @@ -608,13 +637,13 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: end it_behaves_like '404 response' do - let(:request) { post api(path, user) } + subject(:request) { post api(path, user) } end end describe 'GET /projects/:id/export_relations/status' do it_behaves_like '404 response' do - let(:request) { get api(status_path, user) } + subject(:request) { get api(status_path, user) } end end end @@ -629,26 +658,26 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category: describe 'POST /projects/:id/export_relations' do it_behaves_like '403 response' do - let(:request) { post api(path, developer) } + subject(:request) { post api(path, developer) } end end describe 'GET /projects/:id/export_relations/download' do it_behaves_like '403 response' do - let(:request) { get api(download_path, developer) } + subject(:request) { get api(download_path, developer) } end end describe 'GET /projects/:id/export_relations/status' do it_behaves_like '403 response' do - let(:request) { get api(status_path, developer) } + subject(:request) { get api(status_path, developer) } end end end context 'when bulk import is disabled' do it_behaves_like '404 response' do - let(:request) { get api(path, user) } + subject(:request) { get api(path, user) } end end end diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index 027c61bb9e1..78b83356675 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -403,63 +403,49 @@ RSpec.describe API::ProjectImport, :aggregate_failures, feature_category: :impor it_behaves_like 'requires authentication' - it 'returns NOT FOUND when the feature is disabled' do - stub_feature_flags(import_project_from_remote_file_s3: false) - - subject - - expect(response).to have_gitlab_http_status(:not_found) - end - - context 'when the feature flag is enabled' do - before do - stub_feature_flags(import_project_from_remote_file_s3: true) - end - - context 'when the response is successful' do - it 'schedules the import successfully' do - project = create( - :project, - namespace: user.namespace, - name: 'test-import', - path: 'test-import' - ) + context 'when the response is successful' do + it 'schedules the import successfully' do + project = create( + :project, + namespace: user.namespace, + name: 'test-import', + path: 'test-import' + ) - service_response = ServiceResponse.success(payload: project) - expect_next(::Import::GitlabProjects::CreateProjectService) - .to receive(:execute) - .and_return(service_response) + service_response = ServiceResponse.success(payload: project) + expect_next(::Import::GitlabProjects::CreateProjectService) + .to receive(:execute) + .and_return(service_response) - subject + subject - expect(response).to have_gitlab_http_status(:created) - expect(json_response).to include({ - 'id' => project.id, - 'name' => 'test-import', - 'name_with_namespace' => "#{user.namespace.name} / test-import", - 'path' => 'test-import', - 'path_with_namespace' => "#{user.namespace.path}/test-import" - }) - end + expect(response).to have_gitlab_http_status(:created) + expect(json_response).to include({ + 'id' => project.id, + 'name' => 'test-import', + 'name_with_namespace' => "#{user.namespace.name} / test-import", + 'path' => 'test-import', + 'path_with_namespace' => "#{user.namespace.path}/test-import" + }) end + end - context 'when the service returns an error' do - it 'fails to schedule the import' do - service_response = ServiceResponse.error( - message: 'Failed to import', - http_status: :bad_request - ) - expect_next(::Import::GitlabProjects::CreateProjectService) - .to receive(:execute) - .and_return(service_response) + context 'when the service returns an error' do + it 'fails to schedule the import' do + service_response = ServiceResponse.error( + message: 'Failed to import', + http_status: :bad_request + ) + expect_next(::Import::GitlabProjects::CreateProjectService) + .to receive(:execute) + .and_return(service_response) - subject + subject - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response).to eq({ - 'message' => 'Failed to import' - }) - end + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response).to eq({ + 'message' => 'Failed to import' + }) end end end diff --git a/spec/requests/api/project_snapshots_spec.rb b/spec/requests/api/project_snapshots_spec.rb index 5d3c596e605..cbf6907f9a3 100644 --- a/spec/requests/api/project_snapshots_spec.rb +++ b/spec/requests/api/project_snapshots_spec.rb @@ -2,11 +2,12 @@ require 'spec_helper' -RSpec.describe API::ProjectSnapshots, feature_category: :source_code_management do +RSpec.describe API::ProjectSnapshots, :aggregate_failures, feature_category: :source_code_management do include WorkhorseHelpers let(:project) { create(:project) } let(:admin) { create(:admin) } + let(:path) { "/projects/#{project.id}/snapshot" } before do allow(Feature::Gitaly).to receive(:server_feature_flags).and_return({ @@ -32,27 +33,29 @@ RSpec.describe API::ProjectSnapshots, feature_category: :source_code_management expect(response.parsed_body).to be_empty end + it_behaves_like 'GET request permissions for admin mode' + it 'returns authentication error as project owner' do - get api("/projects/#{project.id}/snapshot", project.first_owner) + get api(path, project.first_owner) expect(response).to have_gitlab_http_status(:forbidden) end it 'returns authentication error as unauthenticated user' do - get api("/projects/#{project.id}/snapshot", nil) + get api(path, nil) expect(response).to have_gitlab_http_status(:unauthorized) end it 'requests project repository raw archive as administrator' do - get api("/projects/#{project.id}/snapshot", admin), params: { wiki: '0' } + get api(path, admin, admin_mode: true), params: { wiki: '0' } expect(response).to have_gitlab_http_status(:ok) expect_snapshot_response_for(project.repository) end it 'requests wiki repository raw archive as administrator' do - get api("/projects/#{project.id}/snapshot", admin), params: { wiki: '1' } + get api(path, admin, admin_mode: true), params: { wiki: '1' } expect(response).to have_gitlab_http_status(:ok) expect_snapshot_response_for(project.wiki.repository) diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb index 267557b8137..f0aa61c688b 100644 --- a/spec/requests/api/project_snippets_spec.rb +++ b/spec/requests/api/project_snippets_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::ProjectSnippets, feature_category: :source_code_management do +RSpec.describe API::ProjectSnippets, :aggregate_failures, feature_category: :source_code_management do include SnippetHelpers let_it_be(:project) { create(:project, :public) } @@ -14,8 +14,12 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d describe "GET /projects/:project_id/snippets/:id/user_agent_detail" do let_it_be(:user_agent_detail) { create(:user_agent_detail, subject: public_snippet) } + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { "/projects/#{public_snippet.project.id}/snippets/#{public_snippet.id}/user_agent_detail" } + end + it 'exposes known attributes' do - get api("/projects/#{project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin) + get api("/projects/#{project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['user_agent']).to eq(user_agent_detail.user_agent) @@ -26,7 +30,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'respects project scoping' do other_project = create(:project) - get api("/projects/#{other_project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin) + get api("/projects/#{other_project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -38,7 +42,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/user_agent_detail", admin) } + subject(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/user_agent_detail", admin, admin_mode: true) } end end end @@ -72,7 +76,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { get api("/projects/#{project_no_snippets.id}/snippets", user) } + subject(:request) { get api("/projects/#{project_no_snippets.id}/snippets", user) } end end end @@ -83,16 +87,14 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'returns snippet json' do get api("/projects/#{project.id}/snippets/#{snippet.id}", user) - aggregate_failures do - expect(response).to have_gitlab_http_status(:ok) + expect(response).to have_gitlab_http_status(:ok) - expect(json_response['title']).to eq(snippet.title) - expect(json_response['description']).to eq(snippet.description) - expect(json_response['file_name']).to eq(snippet.file_name_on_repo) - expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) }) - expect(json_response['ssh_url_to_repo']).to eq(snippet.ssh_url_to_repo) - expect(json_response['http_url_to_repo']).to eq(snippet.http_url_to_repo) - end + expect(json_response['title']).to eq(snippet.title) + expect(json_response['description']).to eq(snippet.description) + expect(json_response['file_name']).to eq(snippet.file_name_on_repo) + expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) }) + expect(json_response['ssh_url_to_repo']).to eq(snippet.ssh_url_to_repo) + expect(json_response['http_url_to_repo']).to eq(snippet.http_url_to_repo) end it 'returns 404 for invalid snippet id' do @@ -104,7 +106,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", user) } + subject(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", user) } end end @@ -126,22 +128,25 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d let(:file_content) { 'puts "hello world"' } let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } } let(:params) { base_params.merge(file_params) } + let(:admin_mode) { false } + + subject(:request) { post api("/projects/#{project.id}/snippets/", actor, admin_mode: admin_mode), params: params } - subject { post api("/projects/#{project.id}/snippets/", actor), params: params } + it_behaves_like 'POST request permissions for admin mode' do + let(:path) { "/projects/#{project.id}/snippets/" } + end shared_examples 'project snippet repository actions' do let(:snippet) { ProjectSnippet.find(json_response['id']) } it 'commit the files to the repository' do - subject + request - aggregate_failures do - expect(snippet.repository.exists?).to be_truthy + expect(snippet.repository.exists?).to be_truthy - blob = snippet.repository.blob_at(snippet.default_branch, file_path) + blob = snippet.repository.blob_at(snippet.default_branch, file_path) - expect(blob.data).to eq file_content - end + expect(blob.data).to eq file_content end end @@ -152,7 +157,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'creates a new snippet' do project.add_developer(actor) - subject + request expect(response).to have_gitlab_http_status(:created) end @@ -160,7 +165,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'that does not belong to the project' do it 'does not create a new snippet' do - subject + request expect(response).to have_gitlab_http_status(:forbidden) end @@ -180,7 +185,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d end it 'creates a new snippet' do - subject + request expect(response).to have_gitlab_http_status(:created) snippet = ProjectSnippet.find(json_response['id']) @@ -196,9 +201,10 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with an admin' do let(:actor) { admin } + let(:admin_mode) { true } it 'creates a new snippet' do - subject + request expect(response).to have_gitlab_http_status(:created) snippet = ProjectSnippet.find(json_response['id']) @@ -214,7 +220,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'returns 400 for missing parameters' do params.delete(:title) - subject + request expect(response).to have_gitlab_http_status(:bad_request) end @@ -226,7 +232,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'returns 400 if title is blank' do params[:title] = '' - subject + request expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq 'title is empty' @@ -235,6 +241,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'when save fails because the repository could not be created' do let(:actor) { admin } + let(:admin_mode) { true } before do allow_next_instance_of(Snippets::CreateService) do |instance| @@ -243,7 +250,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d end it 'returns 400' do - subject + request expect(response).to have_gitlab_http_status(:bad_request) end @@ -264,7 +271,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d it 'creates the snippet' do params['visibility'] = 'private' - expect { subject }.to change { Snippet.count }.by(1) + expect { request }.to change { Snippet.count }.by(1) end end @@ -274,13 +281,13 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d end it 'rejects the snippet' do - expect { subject }.not_to change { Snippet.count } + expect { request }.not_to change { Snippet.count } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['error']).to match(/snippet has been recognized as spam/) end it 'creates a spam log' do - expect { subject } + expect { request } .to log_spam(title: 'Test Title', user_id: user.id, noteable_type: 'ProjectSnippet') end end @@ -288,7 +295,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { post api("/projects/#{project_no_snippets.id}/snippets", user), params: params } + subject(:request) { post api("/projects/#{project_no_snippets.id}/snippets", user), params: params } end end end @@ -296,6 +303,11 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d describe 'PUT /projects/:project_id/snippets/:id/' do let(:visibility_level) { Snippet::PUBLIC } let(:snippet) { create(:project_snippet, :repository, author: admin, visibility_level: visibility_level, project: project) } + let(:params) { { title: 'Foo' } } + + it_behaves_like 'PUT request permissions for admin mode' do + let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}" } + end it_behaves_like 'snippet file updates' it_behaves_like 'snippet non-file updates' @@ -317,7 +329,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d let(:visibility_level) { Snippet::PRIVATE } it 'creates the snippet' do - expect { update_snippet(params: { title: 'Foo' }) } + expect { update_snippet(admin_mode: true, params: params) } .to change { snippet.reload.title }.to('Foo') end end @@ -326,12 +338,12 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d let(:visibility_level) { Snippet::PUBLIC } it 'rejects the snippet' do - expect { update_snippet(params: { title: 'Foo' }) } + expect { update_snippet(params: params) } .not_to change { snippet.reload.title } end it 'creates a spam log' do - expect { update_snippet(params: { title: 'Foo' }) } + expect { update_snippet(params: params) } .to log_spam(title: 'Foo', user_id: admin.id, noteable_type: 'ProjectSnippet') end end @@ -340,7 +352,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d let(:visibility_level) { Snippet::PRIVATE } it 'rejects the snippet' do - expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) } + expect { update_snippet(admin_mode: true, params: { title: 'Foo', visibility: 'public' }) } .not_to change { snippet.reload.title } expect(response).to have_gitlab_http_status(:bad_request) @@ -348,7 +360,7 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d end it 'creates a spam log' do - expect { update_snippet(params: { title: 'Foo', visibility: 'public' }) } + expect { update_snippet(admin_mode: true, params: { title: 'Foo', visibility: 'public' }) } .to log_spam(title: 'Foo', user_id: admin.id, noteable_type: 'ProjectSnippet') end end @@ -356,47 +368,58 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin), params: { description: 'foo' } } + subject(:request) { put api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin, admin_mode: true), params: { description: 'foo' } } end end - def update_snippet(snippet_id: snippet.id, params: {}) - put api("/projects/#{snippet.project.id}/snippets/#{snippet_id}", admin), params: params + def update_snippet(snippet_id: snippet.id, admin_mode: false, params: {}) + put api("/projects/#{snippet.project.id}/snippets/#{snippet_id}", admin, admin_mode: admin_mode), params: params end end describe 'DELETE /projects/:project_id/snippets/:id/' do let_it_be(:snippet, refind: true) { public_snippet } + let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/" } + + it_behaves_like 'DELETE request permissions for admin mode' it 'deletes snippet' do - delete api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end it 'returns 404 for invalid snippet id' do - delete api("/projects/#{snippet.project.id}/snippets/#{non_existing_record_id}", admin) + delete api("/projects/#{snippet.project.id}/snippets/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Snippet Not Found') end it_behaves_like '412 response' do - let(:request) { api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin) } + subject(:request) { api(path, admin, admin_mode: true) } end context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin) } + subject(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin, admin_mode: true) } end end end describe 'GET /projects/:project_id/snippets/:id/raw' do let_it_be(:snippet) { create(:project_snippet, :repository, :public, author: admin, project: project) } + let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw" } + + it_behaves_like 'GET request permissions for admin mode' do + let_it_be(:snippet_with_empty_repo) { create(:project_snippet, :empty_repo, author: admin, project: project) } + + let(:snippet) { snippet_with_empty_repo } + let(:failed_status_code) { :not_found } + end it 'returns raw text' do - get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin) + get api(path, admin) expect(response).to have_gitlab_http_status(:ok) expect(response.media_type).to eq 'text/plain' @@ -404,38 +427,41 @@ RSpec.describe API::ProjectSnippets, feature_category: :source_code_management d end it 'returns 404 for invalid snippet id' do - get api("/projects/#{snippet.project.id}/snippets/#{non_existing_record_id}/raw", admin) + get api("/projects/#{snippet.project.id}/snippets/#{non_existing_record_id}/raw", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Snippet Not Found') end - it_behaves_like 'project snippet access levels' do - let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw" } - end + it_behaves_like 'project snippet access levels' context 'with snippets disabled' do it_behaves_like '403 response' do - let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/raw", admin) } + subject(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/raw", admin, admin_mode: true) } end end it_behaves_like 'snippet blob content' do let_it_be(:snippet_with_empty_repo) { create(:project_snippet, :empty_repo, author: admin, project: project) } + let_it_be(:admin_mode) { snippet.author.admin? } - subject { get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", snippet.author) } + subject { get api(path, snippet.author, admin_mode: admin_mode) } end end describe 'GET /projects/:project_id/snippets/:id/files/:ref/:file_path/raw' do let_it_be(:snippet) { create(:project_snippet, :repository, author: admin, project: project) } + let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/files/master/%2Egitattributes/raw" } + + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + it_behaves_like 'raw snippet files' do let(:api_path) { "/projects/#{snippet.project.id}/snippets/#{snippet_id}/files/#{ref}/#{file_path}/raw" } end - it_behaves_like 'project snippet access levels' do - let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/files/master/%2Egitattributes/raw" } - end + it_behaves_like 'project snippet access levels' end end diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index d755a4231da..bf30052069d 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -15,7 +15,7 @@ RSpec.shared_examples 'languages and percentages JSON response' do end context "when the languages haven't been detected yet" do - it 'returns expected language values', :sidekiq_might_not_need_inline do + it 'returns expected language values', :aggregate_failures, :sidekiq_might_not_need_inline do get api("/projects/#{project.id}/languages", user) expect(response).to have_gitlab_http_status(:ok) @@ -33,7 +33,7 @@ RSpec.shared_examples 'languages and percentages JSON response' do Projects::DetectRepositoryLanguagesService.new(project, project.first_owner).execute end - it 'returns the detection from the database' do + it 'returns the detection from the database', :aggregate_failures do # Allow this to happen once, so the expected languages can be determined expect(project.repository).to receive(:languages).once @@ -46,7 +46,7 @@ RSpec.shared_examples 'languages and percentages JSON response' do end end -RSpec.describe API::Projects, feature_category: :projects do +RSpec.describe API::Projects, :aggregate_failures, feature_category: :projects do include ProjectForksHelper include WorkhorseHelpers include StubRequests @@ -55,8 +55,8 @@ RSpec.describe API::Projects, feature_category: :projects do let_it_be(:user2) { create(:user) } let_it_be(:user3) { create(:user) } let_it_be(:admin) { create(:admin) } - let_it_be(:project, reload: true) { create(:project, :repository, create_branch: 'something_else', namespace: user.namespace) } - let_it_be(:project2, reload: true) { create(:project, namespace: user.namespace) } + let_it_be(:project, reload: true) { create(:project, :repository, create_branch: 'something_else', namespace: user.namespace, updated_at: 5.days.ago) } + let_it_be(:project2, reload: true) { create(:project, namespace: user.namespace, updated_at: 4.days.ago) } let_it_be(:project_member) { create(:project_member, :developer, user: user3, project: project) } let_it_be(:user4) { create(:user, username: 'user.withdot') } let_it_be(:project3, reload: true) do @@ -149,9 +149,15 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'GET /projects' do + let(:path) { '/projects' } + + let_it_be(:public_project) { create(:project, :public, name: 'public_project') } + shared_examples_for 'projects response' do + let_it_be(:admin_mode) { false } + it 'returns an array of projects' do - get api('/projects', current_user), params: filter + get api(path, current_user, admin_mode: admin_mode), params: filter expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -160,7 +166,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns the proper security headers' do - get api('/projects', current_user), params: filter + get api(path, current_user, admin_mode: admin_mode), params: filter expect(response).to include_security_headers end @@ -171,19 +177,17 @@ RSpec.describe API::Projects, feature_category: :projects do it 'avoids N + 1 queries', :use_sql_query_cache do control = ActiveRecord::QueryRecorder.new(skip_cached: false) do - get api('/projects', current_user) + get api(path, current_user) end additional_project expect do - get api('/projects', current_user) + get api(path, current_user) end.not_to exceed_all_query_limit(control).with_threshold(threshold) end end - let_it_be(:public_project) { create(:project, :public, name: 'public_project') } - context 'when unauthenticated' do it_behaves_like 'projects response' do let(:filter) { { search: project.name } } @@ -208,10 +212,10 @@ RSpec.describe API::Projects, feature_category: :projects do end shared_examples 'includes container_registry_access_level' do - it do + specify do project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED) - get api('/projects', user) + get api(path, user) project_response = json_response.find { |p| p['id'] == project.id } expect(response).to have_gitlab_http_status(:ok) @@ -231,8 +235,8 @@ RSpec.describe API::Projects, feature_category: :projects do include_examples 'includes container_registry_access_level' end - it 'includes various project feature fields', :aggregate_failures do - get api('/projects', user) + it 'includes various project feature fields' do + get api(path, user) project_response = json_response.find { |p| p['id'] == project.id } expect(response).to have_gitlab_http_status(:ok) @@ -254,10 +258,10 @@ RSpec.describe API::Projects, feature_category: :projects do end end - it 'includes correct value of container_registry_enabled', :aggregate_failures do + it 'includes correct value of container_registry_enabled' do project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED) - get api('/projects', user) + get api(path, user) project_response = json_response.find { |p| p['id'] == project.id } expect(response).to have_gitlab_http_status(:ok) @@ -266,7 +270,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'includes project topics' do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -276,7 +280,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'includes open_issues_count' do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -287,7 +291,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'does not include projects marked for deletion' do project.update!(pending_delete: true) - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -297,7 +301,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'does not include open_issues_count if issues are disabled' do project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED) - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -311,7 +315,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns no projects' do - get api('/projects', user), params: { topic: 'foo' } + get api(path, user), params: { topic: 'foo' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -319,7 +323,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns matching project for a single topic' do - get api('/projects', user), params: { topic: 'ruby' } + get api(path, user), params: { topic: 'ruby' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -327,7 +331,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns matching project for multiple topics' do - get api('/projects', user), params: { topic: 'ruby, javascript' } + get api(path, user), params: { topic: 'ruby, javascript' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -335,7 +339,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns no projects if project match only some topic' do - get api('/projects', user), params: { topic: 'ruby, foo' } + get api(path, user), params: { topic: 'ruby, foo' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -343,7 +347,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'ignores topic if it is empty' do - get api('/projects', user), params: { topic: '' } + get api(path, user), params: { topic: '' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -404,7 +408,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it "does not include statistics by default" do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -413,7 +417,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it "includes statistics if requested" do - get api('/projects', user), params: { statistics: true } + get api(path, user), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -425,7 +429,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it "does not include license by default" do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -434,7 +438,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it "does not include license if requested" do - get api('/projects', user), params: { license: true } + get api(path, user), params: { license: true } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -446,7 +450,7 @@ RSpec.describe API::Projects, feature_category: :projects do let!(:jira_integration) { create(:jira_integration, project: project) } it 'includes open_issues_count' do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -458,7 +462,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'does not include open_issues_count if issues are disabled' do project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED) - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -501,7 +505,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns every project' do - get api('/projects', user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -510,6 +514,35 @@ RSpec.describe API::Projects, feature_category: :projects do end end + context 'filter by updated_at' do + let(:filter) { { updated_before: 2.days.ago.iso8601, updated_after: 6.days.ago, order_by: :updated_at } } + + it_behaves_like 'projects response' do + let(:current_user) { user } + let(:projects) { [project2, project] } + end + + it 'returns projects sorted by updated_at' do + get api(path, user), params: filter + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.map { |p| p['id'] }).to match([project2, project].map(&:id)) + end + + context 'when filtering by updated_at and sorting by a different column' do + let(:filter) { { updated_before: 2.days.ago.iso8601, updated_after: 6.days.ago, order_by: 'id' } } + + it 'returns an error' do + get api(path, user), params: filter + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to eq( + '400 Bad request - `updated_at` filter and `updated_at` sorting must be paired' + ) + end + end + end + context 'and using search' do it_behaves_like 'projects response' do let(:filter) { { search: project.name } } @@ -583,7 +616,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and using the visibility filter' do it 'filters based on private visibility param' do - get api('/projects', user), params: { visibility: 'private' } + get api(path, user), params: { visibility: 'private' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -594,7 +627,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'filters based on internal visibility param' do project2.update_attribute(:visibility_level, Gitlab::VisibilityLevel::INTERNAL) - get api('/projects', user), params: { visibility: 'internal' } + get api(path, user), params: { visibility: 'internal' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -603,7 +636,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'filters based on public visibility param' do - get api('/projects', user), params: { visibility: 'public' } + get api(path, user), params: { visibility: 'public' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -616,7 +649,7 @@ RSpec.describe API::Projects, feature_category: :projects do include_context 'with language detection' it 'filters case-insensitively by programming language' do - get api('/projects', user), params: { with_programming_language: 'javascript' } + get api(path, user), params: { with_programming_language: 'javascript' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -627,7 +660,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and using sorting' do it 'returns the correct order when sorted by id' do - get api('/projects', user), params: { order_by: 'id', sort: 'desc' } + get api(path, user), params: { order_by: 'id', sort: 'desc' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -638,7 +671,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and with owned=true' do it 'returns an array of projects the user owns' do - get api('/projects', user4), params: { owned: true } + get api(path, user4), params: { owned: true } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -659,7 +692,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not list as owned project for admin' do - get api('/projects', admin), params: { owned: true } + get api(path, admin, admin_mode: true), params: { owned: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_empty @@ -675,7 +708,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns the starred projects viewable by the user' do - get api('/projects', user3), params: { starred: true } + get api(path, user3), params: { starred: true } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -697,7 +730,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'including owned filter' do it 'returns only projects that satisfy all query parameters' do - get api('/projects', user), params: { visibility: 'public', owned: true, starred: true, search: 'gitlab' } + get api(path, user), params: { visibility: 'public', owned: true, starred: true, search: 'gitlab' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -716,7 +749,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns only projects that satisfy all query parameters' do - get api('/projects', user), params: { visibility: 'public', membership: true, starred: true, search: 'gitlab' } + get api(path, user), params: { visibility: 'public', membership: true, starred: true, search: 'gitlab' } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -735,7 +768,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns an array of projects the user has at least developer access' do - get api('/projects', user2), params: { min_access_level: 30 } + get api(path, user2), params: { min_access_level: 30 } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -797,6 +830,7 @@ RSpec.describe API::Projects, feature_category: :projects do it_behaves_like 'projects response' do let(:filter) { {} } let(:current_user) { admin } + let(:admin_mode) { true } let(:projects) { Project.all } end end @@ -810,7 +844,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:current_user) { user } let(:params) { {} } - subject { get api('/projects', current_user), params: params } + subject(:request) { get api(path, current_user), params: params } before do group_with_projects.add_owner(current_user) if current_user @@ -818,7 +852,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'orders by id desc instead' do projects_ordered_by_id_desc = /SELECT "projects".+ORDER BY "projects"."id" DESC/i - expect { subject }.to make_queries_matching projects_ordered_by_id_desc + expect { request }.to make_queries_matching projects_ordered_by_id_desc expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -842,7 +876,7 @@ RSpec.describe API::Projects, feature_category: :projects do context "when sorting by #{order_by} ascendingly" do it 'returns a properly sorted list of projects' do - get api('/projects', current_user), params: { order_by: order_by, sort: :asc } + get api(path, current_user, admin_mode: true), params: { order_by: order_by, sort: :asc } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -853,7 +887,7 @@ RSpec.describe API::Projects, feature_category: :projects do context "when sorting by #{order_by} descendingly" do it 'returns a properly sorted list of projects' do - get api('/projects', current_user), params: { order_by: order_by, sort: :desc } + get api(path, current_user, admin_mode: true), params: { order_by: order_by, sort: :desc } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -867,7 +901,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:current_user) { user } it 'returns projects ordered normally' do - get api('/projects', current_user), params: { order_by: order_by } + get api(path, current_user), params: { order_by: order_by } expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -879,7 +913,7 @@ RSpec.describe API::Projects, feature_category: :projects do end end - context 'by similarity', :aggregate_failures do + context 'by similarity' do let_it_be(:group_with_projects) { create(:group) } let_it_be(:project_1) { create(:project, name: 'Project', path: 'project', group: group_with_projects) } let_it_be(:project_2) { create(:project, name: 'Test Project', path: 'test-project', group: group_with_projects) } @@ -889,14 +923,14 @@ RSpec.describe API::Projects, feature_category: :projects do let(:current_user) { user } let(:params) { { order_by: 'similarity', search: 'test' } } - subject { get api('/projects', current_user), params: params } + subject(:request) { get api(path, current_user), params: params } before do group_with_projects.add_owner(current_user) if current_user end it 'returns non-public items based ordered by similarity' do - subject + request expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -910,7 +944,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:params) { { order_by: 'similarity' } } it 'returns items ordered by created_at descending' do - subject + request expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -925,7 +959,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:current_user) { nil } it 'returns items ordered by created_at descending' do - subject + request expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -952,6 +986,7 @@ RSpec.describe API::Projects, feature_category: :projects do it_behaves_like 'projects response' do let(:filter) { { repository_storage: 'nfs-11' } } let(:current_user) { admin } + let(:admin_mode) { true } let(:projects) { [project, project3] } end end @@ -974,7 +1009,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:params) { { pagination: 'keyset', order_by: :id, sort: :asc, per_page: 1 } } it 'includes a pagination header with link to the next page' do - get api('/projects', current_user), params: params + get api(path, current_user), params: params expect(response.header).to include('Link') expect(response.header['Link']).to include('pagination=keyset') @@ -982,7 +1017,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'contains only the first project with per_page = 1' do - get api('/projects', current_user), params: params + get api(path, current_user), params: params expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -990,7 +1025,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'still includes a link if the end has reached and there is no more data after this page' do - get api('/projects', current_user), params: params.merge(id_after: project2.id) + get api(path, current_user), params: params.merge(id_after: project2.id) expect(response.header).to include('Link') expect(response.header['Link']).to include('pagination=keyset') @@ -998,20 +1033,20 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not include a next link when the page does not have any records' do - get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id)) + get api(path, current_user), params: params.merge(id_after: Project.maximum(:id)) expect(response.header).not_to include('Link') end it 'returns an empty array when the page does not have any records' do - get api('/projects', current_user), params: params.merge(id_after: Project.maximum(:id)) + get api(path, current_user), params: params.merge(id_after: Project.maximum(:id)) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to eq([]) end it 'responds with 501 if order_by is different from id' do - get api('/projects', current_user), params: params.merge(order_by: :created_at) + get api(path, current_user), params: params.merge(order_by: :created_at) expect(response).to have_gitlab_http_status(:method_not_allowed) end @@ -1021,7 +1056,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 1 } } it 'includes a pagination header with link to the next page' do - get api('/projects', current_user), params: params + get api(path, current_user), params: params expect(response.header).to include('Link') expect(response.header['Link']).to include('pagination=keyset') @@ -1029,7 +1064,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'contains only the last project with per_page = 1' do - get api('/projects', current_user), params: params + get api(path, current_user), params: params expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -1041,7 +1076,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 2 } } it 'returns all projects' do - url = '/projects' + url = path requests = 0 ids = [] @@ -1067,8 +1102,11 @@ RSpec.describe API::Projects, feature_category: :projects do let_it_be(:admin) { create(:admin) } + subject(:request) { get api(path, admin) } + it 'avoids N+1 queries', :use_sql_query_cache do - get api('/projects', admin) + request + expect(response).to have_gitlab_http_status(:ok) base_project = create(:project, :public, namespace: admin.namespace) @@ -1076,37 +1114,40 @@ RSpec.describe API::Projects, feature_category: :projects do fork_project2 = fork_project(fork_project1, admin, namespace: create(:user).namespace) control = ActiveRecord::QueryRecorder.new(skip_cached: false) do - get api('/projects', admin) + request end fork_project(fork_project2, admin, namespace: create(:user).namespace) expect do - get api('/projects', admin) - end.not_to exceed_query_limit(control.count) + request + end.not_to exceed_all_query_limit(control.count) end end context 'when service desk is enabled', :use_clean_rails_memory_store_caching do let_it_be(:admin) { create(:admin) } + subject(:request) { get api(path, admin) } + it 'avoids N+1 queries' do - allow(Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(true) - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::ServiceDeskEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) - get api('/projects', admin) + request + expect(response).to have_gitlab_http_status(:ok) create(:project, :public, :service_desk_enabled, namespace: admin.namespace) control = ActiveRecord::QueryRecorder.new do - get api('/projects', admin) + request end create_list(:project, 2, :public, :service_desk_enabled, namespace: admin.namespace) expect do - get api('/projects', admin) - end.not_to exceed_query_limit(control) + request + end.not_to exceed_all_query_limit(control) end end @@ -1130,7 +1171,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when the user is signed in' do it_behaves_like 'does not log request and does not block the request' do def request - get api('/projects', current_user) + get api(path, current_user) end end end @@ -1140,7 +1181,7 @@ RSpec.describe API::Projects, feature_category: :projects do it_behaves_like 'rate limited endpoint', rate_limit_key: :projects_api_rate_limit_unauthenticated do def request - get api('/projects', current_user) + get api(path, current_user) end end end @@ -1155,7 +1196,7 @@ RSpec.describe API::Projects, feature_category: :projects do it_behaves_like 'does not log request and does not block the request' do def request - get api('/projects', current_user) + get api(path, current_user) end end end @@ -1163,7 +1204,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when the user is signed in' do it_behaves_like 'does not log request and does not block the request' do def request - get api('/projects', current_user) + get api(path, current_user) end end end @@ -1172,17 +1213,19 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects' do + let(:path) { '/projects' } + context 'maximum number of projects reached' do it 'does not create new project and respond with 403' do allow_any_instance_of(User).to receive(:projects_limit_left).and_return(0) - expect { post api('/projects', user2), params: { name: 'foo' } } + expect { post api(path, user2), params: { name: 'foo' } } .to change { Project.count }.by(0) expect(response).to have_gitlab_http_status(:forbidden) end end it 'creates new project without path but with name and returns 201' do - expect { post api('/projects', user), params: { name: 'Foo Project' } } + expect { post api(path, user), params: { name: 'Foo Project' } } .to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -1193,7 +1236,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'creates new project without name but with path and returns 201' do - expect { post api('/projects', user), params: { path: 'foo_project' } } + expect { post api(path, user), params: { path: 'foo_project' } } .to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -1204,7 +1247,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'creates new project with name and path and returns 201' do - expect { post api('/projects', user), params: { path: 'path-project-Foo', name: 'Foo Project' } } + expect { post api(path, user), params: { path: 'path-project-Foo', name: 'Foo Project' } } .to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -1215,21 +1258,21 @@ RSpec.describe API::Projects, feature_category: :projects do end it_behaves_like 'create project with default branch parameter' do - let(:request) { post api('/projects', user), params: params } + subject(:request) { post api(path, user), params: params } end it 'creates last project before reaching project limit' do allow_any_instance_of(User).to receive(:projects_limit_left).and_return(1) - post api('/projects', user2), params: { name: 'foo' } + post api(path, user2), params: { name: 'foo' } expect(response).to have_gitlab_http_status(:created) end it 'does not create new project without name or path and returns 400' do - expect { post api('/projects', user) }.not_to change { Project.count } + expect { post api(path, user) }.not_to change { Project.count } expect(response).to have_gitlab_http_status(:bad_request) end - it 'assigns attributes to project', :aggregate_failures do + it 'assigns attributes to project' do project = attributes_for(:project, { path: 'camelCasePath', issues_enabled: false, @@ -1265,7 +1308,7 @@ RSpec.describe API::Projects, feature_category: :projects do attrs[:issues_access_level] = 'disabled' end - post api('/projects', user), params: project + post api(path, user), params: project expect(response).to have_gitlab_http_status(:created) @@ -1300,10 +1343,10 @@ RSpec.describe API::Projects, feature_category: :projects do expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::DISABLED) end - it 'assigns container_registry_enabled to project', :aggregate_failures do + it 'assigns container_registry_enabled to project' do project = attributes_for(:project, { container_registry_enabled: true }) - post api('/projects', user), params: project + post api(path, user), params: project expect(response).to have_gitlab_http_status(:created) expect(json_response['container_registry_enabled']).to eq(true) @@ -1314,7 +1357,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'assigns container_registry_enabled to project' do project = attributes_for(:project, { container_registry_enabled: true }) - post api('/projects', user), params: project + post api(path, user), params: project expect(response).to have_gitlab_http_status(:created) expect(json_response['container_registry_enabled']).to eq(true) @@ -1322,7 +1365,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'creates a project using a template' do - expect { post api('/projects', user), params: { template_name: 'rails', name: 'rails-test' } } + expect { post api(path, user), params: { template_name: 'rails', name: 'rails-test' } } .to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -1333,7 +1376,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns 400 for an invalid template' do - expect { post api('/projects', user), params: { template_name: 'unknown', name: 'rails-test' } } + expect { post api(path, user), params: { template_name: 'unknown', name: 'rails-test' } } .not_to change { Project.count } expect(response).to have_gitlab_http_status(:bad_request) @@ -1342,7 +1385,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'disallows creating a project with an import_url and template' do project_params = { import_url: 'http://example.com', template_name: 'rails', name: 'rails-test' } - expect { post api('/projects', user), params: project_params } + expect { post api(path, user), params: project_params } .not_to change { Project.count } expect(response).to have_gitlab_http_status(:bad_request) @@ -1359,34 +1402,34 @@ RSpec.describe API::Projects, feature_category: :projects do headers: { 'Content-Type': 'application/x-git-upload-pack-advertisement' } }) project_params = { import_url: url, path: 'path-project-Foo', name: 'Foo Project' } - expect { post api('/projects', user), params: project_params } + expect { post api(path, user), params: project_params } .not_to change { Project.count } expect(response).to have_gitlab_http_status(:forbidden) end - it 'allows creating a project without an import_url when git import source is disabled', :aggregate_failures do + it 'allows creating a project without an import_url when git import source is disabled' do stub_application_setting(import_sources: nil) project_params = { path: 'path-project-Foo' } - expect { post api('/projects', user), params: project_params }.to change { Project.count }.by(1) + expect { post api(path, user), params: project_params }.to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) end - it 'disallows creating a project with an import_url that is not reachable', :aggregate_failures do + it 'disallows creating a project with an import_url that is not reachable' do url = 'http://example.com' endpoint_url = "#{url}/info/refs?service=git-upload-pack" stub_full_request(endpoint_url, method: :get).to_return({ status: 301, body: '', headers: nil }) project_params = { import_url: url, path: 'path-project-Foo', name: 'Foo Project' } - expect { post api('/projects', user), params: project_params }.not_to change { Project.count } + expect { post api(path, user), params: project_params }.not_to change { Project.count } expect(response).to have_gitlab_http_status(:unprocessable_entity) expect(json_response['message']).to eq("#{url} is not a valid HTTP Git repository") end - it 'creates a project with an import_url that is valid', :aggregate_failures do + it 'creates a project with an import_url that is valid' do url = 'http://example.com' endpoint_url = "#{url}/info/refs?service=git-upload-pack" git_response = { @@ -1397,7 +1440,7 @@ RSpec.describe API::Projects, feature_category: :projects do stub_full_request(endpoint_url, method: :get).to_return(git_response) project_params = { import_url: url, path: 'path-project-Foo', name: 'Foo Project' } - expect { post api('/projects', user), params: project_params }.to change { Project.count }.by(1) + expect { post api(path, user), params: project_params }.to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) end @@ -1405,7 +1448,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as public' do project = attributes_for(:project, visibility: 'public') - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['visibility']).to eq('public') end @@ -1413,7 +1456,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as internal' do project = attributes_for(:project, visibility: 'internal') - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['visibility']).to eq('internal') end @@ -1421,7 +1464,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as private' do project = attributes_for(:project, visibility: 'private') - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['visibility']).to eq('private') end @@ -1429,7 +1472,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'creates a new project initialized with a README.md' do project = attributes_for(:project, initialize_with_readme: 1, name: 'somewhere') - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['readme_url']).to eql("#{Gitlab.config.gitlab.url}/#{json_response['namespace']['full_path']}/somewhere/-/blob/master/README.md") end @@ -1437,7 +1480,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets tag list to a project (deprecated)' do project = attributes_for(:project, tag_list: %w[tagFirst tagSecond]) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['topics']).to eq(%w[tagFirst tagSecond]) end @@ -1445,7 +1488,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets topics to a project' do project = attributes_for(:project, topics: %w[topic1 topics2]) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['topics']).to eq(%w[topic1 topics2]) end @@ -1454,7 +1497,7 @@ RSpec.describe API::Projects, feature_category: :projects do project = attributes_for(:project, avatar: fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif')) workhorse_form_with_file( - api('/projects', user), + api(path, user), method: :post, file_key: :avatar, params: project @@ -1467,7 +1510,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not allowing outdated diff discussions to automatically resolve' do project = attributes_for(:project, resolve_outdated_diff_discussions: false) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['resolve_outdated_diff_discussions']).to be_falsey end @@ -1475,7 +1518,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing outdated diff discussions to automatically resolve' do project = attributes_for(:project, resolve_outdated_diff_discussions: true) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['resolve_outdated_diff_discussions']).to be_truthy end @@ -1483,7 +1526,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not removing source branches' do project = attributes_for(:project, remove_source_branch_after_merge: false) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['remove_source_branch_after_merge']).to be_falsey end @@ -1491,7 +1534,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as removing source branches' do project = attributes_for(:project, remove_source_branch_after_merge: true) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['remove_source_branch_after_merge']).to be_truthy end @@ -1499,7 +1542,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge even if build fails' do project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_falsey end @@ -1507,7 +1550,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge only if merge_when_pipeline_succeeds' do project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: true) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_truthy end @@ -1515,7 +1558,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not allowing merge when pipeline is skipped' do project_params = attributes_for(:project, allow_merge_on_skipped_pipeline: false) - post api('/projects', user), params: project_params + post api(path, user), params: project_params expect(json_response['allow_merge_on_skipped_pipeline']).to be_falsey end @@ -1523,7 +1566,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge when pipeline is skipped' do project_params = attributes_for(:project, allow_merge_on_skipped_pipeline: true) - post api('/projects', user), params: project_params + post api(path, user), params: project_params expect(json_response['allow_merge_on_skipped_pipeline']).to be_truthy end @@ -1531,7 +1574,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge even if discussions are unresolved' do project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: false) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey end @@ -1539,7 +1582,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge if only_allow_merge_if_all_discussions_are_resolved is nil' do project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: nil) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey end @@ -1547,7 +1590,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge only if all discussions are resolved' do project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: true) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy end @@ -1555,7 +1598,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as enabling auto close referenced issues' do project = attributes_for(:project, autoclose_referenced_issues: true) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['autoclose_referenced_issues']).to be_truthy end @@ -1563,7 +1606,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as disabling auto close referenced issues' do project = attributes_for(:project, autoclose_referenced_issues: false) - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['autoclose_referenced_issues']).to be_falsey end @@ -1571,7 +1614,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets the merge method of a project to rebase merge' do project = attributes_for(:project, merge_method: 'rebase_merge') - post api('/projects', user), params: project + post api(path, user), params: project expect(json_response['merge_method']).to eq('rebase_merge') end @@ -1579,7 +1622,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'rejects invalid values for merge_method' do project = attributes_for(:project, merge_method: 'totally_not_valid_method') - post api('/projects', user), params: project + post api(path, user), params: project expect(response).to have_gitlab_http_status(:bad_request) end @@ -1587,7 +1630,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'ignores import_url when it is nil' do project = attributes_for(:project, import_url: nil) - post api('/projects', user), params: project + post api(path, user), params: project expect(response).to have_gitlab_http_status(:created) end @@ -1600,7 +1643,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not allow a non-admin to use a restricted visibility level' do - post api('/projects', user), params: project_param + post api(path, user), params: project_param expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['visibility_level'].first).to( @@ -1609,7 +1652,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'allows an admin to override restricted visibility settings' do - post api('/projects', admin), params: project_param + post api(path, admin), params: project_param expect(json_response['visibility']).to eq('public') end @@ -1635,7 +1678,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id) end - it 'includes container_registry_access_level', :aggregate_failures do + it 'includes container_registry_access_level' do get api("/users/#{user4.id}/projects/", user) expect(response).to have_gitlab_http_status(:ok) @@ -1643,6 +1686,16 @@ RSpec.describe API::Projects, feature_category: :projects do expect(json_response.first.keys).to include('container_registry_access_level') end + context 'filter by updated_at' do + it 'returns only projects updated on the given timeframe' do + get api("/users/#{user.id}/projects", user), + params: { updated_before: 2.days.ago.iso8601, updated_after: 6.days.ago } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.map { |project| project['id'] }).to contain_exactly(project2.id, project.id) + end + end + context 'and using id_after' do let!(:another_public_project) { create(:project, :public, name: 'another_public_project', creator_id: user4.id, namespace: user4.namespace) } @@ -1723,7 +1776,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(json_response.map { |project| project['id'] }).to contain_exactly(private_project1.id) end - context 'and using an admin to search', :enable_admin_mode, :aggregate_errors do + context 'and using an admin to search', :enable_admin_mode do it 'returns users projects when authenticated as admin' do private_project1 = create(:project, :private, name: 'private_project1', creator_id: user4.id, namespace: user4.namespace) @@ -1757,6 +1810,8 @@ RSpec.describe API::Projects, feature_category: :projects do user3.reload end + let(:path) { "/users/#{user3.id}/starred_projects/" } + it 'returns error when user not found' do get api("/users/#{non_existing_record_id}/starred_projects/") @@ -1766,7 +1821,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'with a public profile' do it 'returns projects filtered by user' do - get api("/users/#{user3.id}/starred_projects/", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1774,6 +1829,16 @@ RSpec.describe API::Projects, feature_category: :projects do expect(json_response.map { |project| project['id'] }) .to contain_exactly(project.id, project2.id, project3.id) end + + context 'filter by updated_at' do + it 'returns only projects updated on the given timeframe' do + get api(path, user), + params: { updated_before: 2.days.ago.iso8601, updated_after: 6.days.ago } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.map { |project| project['id'] }).to contain_exactly(project2.id, project.id) + end + end end context 'with a private profile' do @@ -1784,7 +1849,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'user does not have access to view the private profile' do it 'returns no projects' do - get api("/users/#{user3.id}/starred_projects/", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1795,7 +1860,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'user has access to view the private profile' do it 'returns projects filtered by user' do - get api("/users/#{user3.id}/starred_projects/", admin) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -1808,8 +1873,14 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects/user/:id' do + let(:path) { "/projects/user/#{user.id}" } + + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { { name: 'Foo Project' } } + end + it 'creates new project without path but with name and return 201' do - expect { post api("/projects/user/#{user.id}", admin), params: { name: 'Foo Project' } }.to change { Project.count }.by(1) + expect { post api(path, admin, admin_mode: true), params: { name: 'Foo Project' } }.to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) project = Project.find(json_response['id']) @@ -1819,7 +1890,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'creates new project with name and path and returns 201' do - expect { post api("/projects/user/#{user.id}", admin), params: { path: 'path-project-Foo', name: 'Foo Project' } } + expect { post api(path, admin, admin_mode: true), params: { path: 'path-project-Foo', name: 'Foo Project' } } .to change { Project.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -1830,11 +1901,11 @@ RSpec.describe API::Projects, feature_category: :projects do end it_behaves_like 'create project with default branch parameter' do - let(:request) { post api("/projects/user/#{user.id}", admin), params: params } + subject(:request) { post api(path, admin, admin_mode: true), params: params } end it 'responds with 400 on failure and not project' do - expect { post api("/projects/user/#{user.id}", admin) } + expect { post api(path, admin, admin_mode: true) } .not_to change { Project.count } expect(response).to have_gitlab_http_status(:bad_request) @@ -1846,7 +1917,7 @@ RSpec.describe API::Projects, feature_category: :projects do attrs[:container_registry_enabled] = true end - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(response).to have_gitlab_http_status(:created) expect(json_response['container_registry_enabled']).to eq(true) @@ -1862,7 +1933,7 @@ RSpec.describe API::Projects, feature_category: :projects do jobs_enabled: true }) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(response).to have_gitlab_http_status(:created) @@ -1876,7 +1947,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as public' do project = attributes_for(:project, visibility: 'public') - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(response).to have_gitlab_http_status(:created) expect(json_response['visibility']).to eq('public') @@ -1885,7 +1956,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as internal' do project = attributes_for(:project, visibility: 'internal') - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(response).to have_gitlab_http_status(:created) expect(json_response['visibility']).to eq('internal') @@ -1894,7 +1965,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as private' do project = attributes_for(:project, visibility: 'private') - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['visibility']).to eq('private') end @@ -1902,7 +1973,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not allowing outdated diff discussions to automatically resolve' do project = attributes_for(:project, resolve_outdated_diff_discussions: false) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['resolve_outdated_diff_discussions']).to be_falsey end @@ -1910,7 +1981,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing outdated diff discussions to automatically resolve' do project = attributes_for(:project, resolve_outdated_diff_discussions: true) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['resolve_outdated_diff_discussions']).to be_truthy end @@ -1918,7 +1989,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not removing source branches' do project = attributes_for(:project, remove_source_branch_after_merge: false) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['remove_source_branch_after_merge']).to be_falsey end @@ -1926,7 +1997,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as removing source branches' do project = attributes_for(:project, remove_source_branch_after_merge: true) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['remove_source_branch_after_merge']).to be_truthy end @@ -1934,7 +2005,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge even if build fails' do project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: false) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_falsey end @@ -1942,7 +2013,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge only if pipeline succeeds' do project = attributes_for(:project, only_allow_merge_if_pipeline_succeeds: true) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['only_allow_merge_if_pipeline_succeeds']).to be_truthy end @@ -1950,7 +2021,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as not allowing merge when pipeline is skipped' do project = attributes_for(:project, allow_merge_on_skipped_pipeline: false) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['allow_merge_on_skipped_pipeline']).to be_falsey end @@ -1958,7 +2029,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge when pipeline is skipped' do project = attributes_for(:project, allow_merge_on_skipped_pipeline: true) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['allow_merge_on_skipped_pipeline']).to be_truthy end @@ -1966,7 +2037,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge even if discussions are unresolved' do project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: false) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_falsey end @@ -1974,7 +2045,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets a project as allowing merge only if all discussions are resolved' do project = attributes_for(:project, only_allow_merge_if_all_discussions_are_resolved: true) - post api("/projects/user/#{user.id}", admin), params: project + post api(path, admin, admin_mode: true), params: project expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to be_truthy end @@ -1988,12 +2059,12 @@ RSpec.describe API::Projects, feature_category: :projects do end with_them do - it 'setting container_registry_enabled also sets container_registry_access_level', :aggregate_failures do + it 'setting container_registry_enabled also sets container_registry_access_level' do project_attributes = attributes_for(:project).tap do |attrs| attrs[:container_registry_enabled] = container_registry_enabled end - post api("/projects/user/#{user.id}", admin), params: project_attributes + post api(path, admin, admin_mode: true), params: project_attributes project = Project.find_by(path: project_attributes[:path]) expect(response).to have_gitlab_http_status(:created) @@ -2015,12 +2086,12 @@ RSpec.describe API::Projects, feature_category: :projects do end with_them do - it 'setting container_registry_access_level also sets container_registry_enabled', :aggregate_failures do + it 'setting container_registry_access_level also sets container_registry_enabled' do project_attributes = attributes_for(:project).tap do |attrs| attrs[:container_registry_access_level] = container_registry_access_level end - post api("/projects/user/#{user.id}", admin), params: project_attributes + post api(path, admin, admin_mode: true), params: project_attributes project = Project.find_by(path: project_attributes[:path]) expect(response).to have_gitlab_http_status(:created) @@ -2035,10 +2106,11 @@ RSpec.describe API::Projects, feature_category: :projects do describe "POST /projects/:id/uploads/authorize" do let(:headers) { workhorse_internal_api_request_header.merge({ 'HTTP_GITLAB_WORKHORSE' => 1 }) } + let(:path) { "/projects/#{project.id}/uploads/authorize" } context 'with authorized user' do it "returns 200" do - post api("/projects/#{project.id}/uploads/authorize", user), headers: headers + post api(path, user), headers: headers expect(response).to have_gitlab_http_status(:ok) expect(json_response['MaximumSize']).to eq(project.max_attachment_size) @@ -2047,7 +2119,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'with unauthorized user' do it "returns 404" do - post api("/projects/#{project.id}/uploads/authorize", user2), headers: headers + post api(path, user2), headers: headers expect(response).to have_gitlab_http_status(:not_found) end @@ -2059,7 +2131,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it "returns 200" do - post api("/projects/#{project.id}/uploads/authorize", user), headers: headers + post api(path, user), headers: headers expect(response).to have_gitlab_http_status(:ok) expect(json_response['MaximumSize']).to eq(1.gigabyte) @@ -2068,7 +2140,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'with no Workhorse headers' do it "returns 403" do - post api("/projects/#{project.id}/uploads/authorize", user) + post api(path, user) expect(response).to have_gitlab_http_status(:forbidden) end @@ -2077,6 +2149,7 @@ RSpec.describe API::Projects, feature_category: :projects do describe "POST /projects/:id/uploads" do let(:file) { fixture_file_upload("spec/fixtures/dk.png", "image/png") } + let(:path) { "/projects/#{project.id}/uploads" } before do project @@ -2087,7 +2160,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(instance).to receive(:override_max_attachment_size=).with(project.max_attachment_size).and_call_original end - post api("/projects/#{project.id}/uploads", user), params: { file: file } + post api(path, user), params: { file: file } expect(response).to have_gitlab_http_status(:created) expect(json_response['alt']).to eq("dk") @@ -2107,7 +2180,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(path).not_to be(nil) expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile) - post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") } + post api(path, user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") } expect(tempfile.path).to be(nil) expect(File.exist?(path)).to be(false) @@ -2119,7 +2192,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(instance).to receive(:override_max_attachment_size=).with(1.gigabyte).and_call_original end - post api("/projects/#{project.id}/uploads", user), params: { file: file } + post api(path, user), params: { file: file } expect(response).to have_gitlab_http_status(:created) end @@ -2131,7 +2204,7 @@ RSpec.describe API::Projects, feature_category: :projects do hash_including(message: 'File exceeds maximum size', upload_allowed: upload_allowed)) .and_call_original - post api("/projects/#{project.id}/uploads", user), params: { file: file } + post api(path, user), params: { file: file } end end @@ -2152,33 +2225,37 @@ RSpec.describe API::Projects, feature_category: :projects do let_it_be(:private_project) { create(:project, :private, group: project_group) } let_it_be(:public_project) { create(:project, :public, group: project_group) } + let(:path) { "/projects/#{private_project.id}/groups" } + before_all do create(:project_group_link, :developer, group: shared_group_with_dev_access, project: private_project) create(:project_group_link, :reporter, group: shared_group_with_reporter_access, project: private_project) end + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + shared_examples_for 'successful groups response' do it 'returns an array of groups' do request - aggregate_failures do - expect(response).to have_gitlab_http_status(:ok) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.map { |g| g['name'] }).to match_array(expected_groups.map(&:name)) - end + expect(response).to have_gitlab_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.map { |g| g['name'] }).to match_array(expected_groups.map(&:name)) end end context 'when unauthenticated' do it 'does not return groups for private projects' do - get api("/projects/#{private_project.id}/groups") + get api(path) expect(response).to have_gitlab_http_status(:not_found) end context 'for public projects' do - let(:request) { get api("/projects/#{public_project.id}/groups") } + subject(:request) { get api("/projects/#{public_project.id}/groups") } it_behaves_like 'successful groups response' do let(:expected_groups) { [root_group, project_group] } @@ -2189,14 +2266,15 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when authenticated as user' do context 'when user does not have access to the project' do it 'does not return groups' do - get api("/projects/#{private_project.id}/groups", user) + get api(path, user) expect(response).to have_gitlab_http_status(:not_found) end end context 'when user has access to the project' do - let(:request) { get api("/projects/#{private_project.id}/groups", user), params: params } + subject(:request) { get api(path, user), params: params } + let(:params) { {} } before do @@ -2258,7 +2336,7 @@ RSpec.describe API::Projects, feature_category: :projects do end context 'when authenticated as admin' do - let(:request) { get api("/projects/#{private_project.id}/groups", admin) } + subject(:request) { get api(path, admin, admin_mode: true) } it_behaves_like 'successful groups response' do let(:expected_groups) { [root_group, project_group] } @@ -2271,23 +2349,26 @@ RSpec.describe API::Projects, feature_category: :projects do let_it_be(:project_group1) { create(:group, :public, parent: root_group, name: 'group1', path: 'group-1-path') } let_it_be(:project_group2) { create(:group, :public, parent: root_group, name: 'group2', path: 'group-2-path') } let_it_be(:project) { create(:project, :private, group: project_group1) } + let(:path) { "/projects/#{project.id}/share_locations" } + + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end shared_examples_for 'successful groups response' do it 'returns an array of groups' do request - aggregate_failures do - expect(response).to have_gitlab_http_status(:ok) - expect(response).to include_pagination_headers - expect(json_response).to be_an Array - expect(json_response.map { |g| g['name'] }).to match_array(expected_groups.map(&:name)) - end + expect(response).to have_gitlab_http_status(:ok) + expect(response).to include_pagination_headers + expect(json_response).to be_an Array + expect(json_response.map { |g| g['name'] }).to match_array(expected_groups.map(&:name)) end end context 'when unauthenticated' do it 'does not return the groups for the given project' do - get api("/projects/#{project.id}/share_locations") + get api(path) expect(response).to have_gitlab_http_status(:not_found) end @@ -2296,14 +2377,15 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when authenticated' do context 'when user is not the owner of the project' do it 'does not return the groups' do - get api("/projects/#{project.id}/share_locations", user) + get api(path, user) expect(response).to have_gitlab_http_status(:not_found) end end context 'when user is the owner of the project' do - let(:request) { get api("/projects/#{project.id}/share_locations", user), params: params } + subject(:request) { get api(path, user), params: params } + let(:params) { {} } before do @@ -2314,15 +2396,15 @@ RSpec.describe API::Projects, feature_category: :projects do context 'with default search' do it_behaves_like 'successful groups response' do - let(:expected_groups) { [project_group1, project_group2] } + let(:expected_groups) { [project_group2] } end end context 'when searching by group name' do context 'searching by group name' do it_behaves_like 'successful groups response' do - let(:params) { { search: 'group1' } } - let(:expected_groups) { [project_group1] } + let(:params) { { search: 'group2' } } + let(:expected_groups) { [project_group2] } end end @@ -2341,11 +2423,11 @@ RSpec.describe API::Projects, feature_category: :projects do end context 'when authenticated as admin' do - let(:request) { get api("/projects/#{project.id}/share_locations", admin), params: {} } + subject(:request) { get api(path, admin, admin_mode: true), params: {} } context 'without share_with_group_lock' do it_behaves_like 'successful groups response' do - let(:expected_groups) { [root_group, project_group1, project_group2] } + let(:expected_groups) { [project_group2] } end end @@ -2362,6 +2444,12 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'GET /projects/:id' do + let(:path) { "/projects/#{project.id}" } + + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + context 'when unauthenticated' do it 'does not return private projects' do private_project = create(:project, :private) @@ -2401,7 +2489,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:protected_attributes) { %w(default_branch ci_config_path) } it 'hides protected attributes of private repositories if user is not a member' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) protected_attributes.each do |attribute| @@ -2412,7 +2500,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'exposes protected attributes of private repositories if user is a member' do project.add_developer(user) - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) protected_attributes.each do |attribute| @@ -2459,13 +2547,13 @@ RSpec.describe API::Projects, feature_category: :projects do keys end - it 'returns a project by id', :aggregate_failures do + it 'returns a project by id' do project project_member group = create(:group) link = create(:project_group_link, project: project, group: group) - get api("/projects/#{project.id}", admin) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['id']).to eq(project.id) @@ -2521,19 +2609,19 @@ RSpec.describe API::Projects, feature_category: :projects do it 'exposes all necessary attributes' do create(:project_group_link, project: project) - get api("/projects/#{project.id}", admin) + get api(path, admin, admin_mode: true) diff = Set.new(json_response.keys) ^ Set.new(expected_keys) expect(diff).to be_empty, failure_message(diff) end - def failure_message(diff) + def failure_message(_diff) <<~MSG It looks like project's set of exposed attributes is different from the expected set. The following attributes are missing or newly added: - #{diff.to_a.to_sentence} + {diff.to_a.to_sentence} Please update #{project_attributes_file} file" MSG @@ -2547,11 +2635,11 @@ RSpec.describe API::Projects, feature_category: :projects do stub_container_registry_config(enabled: true, host_port: 'registry.example.org:5000') end - it 'returns a project by id', :aggregate_failures do + it 'returns a project by id' do group = create(:group) link = create(:project_group_link, project: project, group: group) - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['id']).to eq(project.id) @@ -2635,7 +2723,7 @@ RSpec.describe API::Projects, feature_category: :projects do expires_at = 5.days.from_now.to_date link = create(:project_group_link, project: project, group: group, expires_at: expires_at) - get api("/projects/#{project.id}", user) + get api(path, user) expect(json_response['shared_with_groups']).to be_an Array expect(json_response['shared_with_groups'].length).to eq(1) @@ -2647,7 +2735,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns a project by path name' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq(project.name) end @@ -2660,7 +2748,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns a 404 error if user is not a member' do other_user = create(:user) - get api("/projects/#{project.id}", other_user) + get api(path, other_user) expect(response).to have_gitlab_http_status(:not_found) end @@ -2674,7 +2762,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'exposes namespace fields' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['namespace']).to eq({ @@ -2690,14 +2778,14 @@ RSpec.describe API::Projects, feature_category: :projects do end it "does not include license fields by default" do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include('license', 'license_url') end it 'includes license fields when requested' do - get api("/projects/#{project.id}", user), params: { license: true } + get api(path, user), params: { license: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response['license']).to eq({ @@ -2710,14 +2798,14 @@ RSpec.describe API::Projects, feature_category: :projects do end it "does not include statistics by default" do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include 'statistics' end it "includes statistics if requested" do - get api("/projects/#{project.id}", user), params: { statistics: true } + get api(path, user), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include 'statistics' @@ -2727,7 +2815,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:project) { create(:project, :public, :repository, :repository_private) } it "does not include statistics if user is not a member" do - get api("/projects/#{project.id}", user), params: { statistics: true } + get api(path, user), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include 'statistics' @@ -2736,7 +2824,7 @@ RSpec.describe API::Projects, feature_category: :projects do it "includes statistics if user is a member" do project.add_developer(user) - get api("/projects/#{project.id}", user), params: { statistics: true } + get api(path, user), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include 'statistics' @@ -2746,7 +2834,7 @@ RSpec.describe API::Projects, feature_category: :projects do project.add_developer(user) project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED) - get api("/projects/#{project.id}", user), params: { statistics: true } + get api(path, user), params: { statistics: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include 'statistics' @@ -2754,14 +2842,14 @@ RSpec.describe API::Projects, feature_category: :projects do end it "includes import_error if user can admin project" do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to include("import_error") end it "does not include import_error if user cannot admin project" do - get api("/projects/#{project.id}", user3) + get api(path, user3) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include("import_error") @@ -2770,7 +2858,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns 404 when project is marked for deletion' do project.update!(pending_delete: true) - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Project Not Found') @@ -2778,7 +2866,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'links exposure' do it 'exposes related resources full URIs' do - get api("/projects/#{project.id}", user) + get api(path, user) links = json_response['_links'] @@ -2852,7 +2940,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'personal project' do it 'sets project access and returns 200' do project.add_maintainer(user) - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) expect(json_response['permissions']['project_access']['access_level']) @@ -2919,7 +3007,7 @@ RSpec.describe API::Projects, feature_category: :projects do let!(:project_member) { create(:project_member, :developer, user: user, project: project) } it 'returns group web_url and avatar_url' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) @@ -2934,7 +3022,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:project) { create(:project, namespace: user.namespace) } it 'returns user web_url and avatar_url' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(response).to have_gitlab_http_status(:ok) @@ -2950,16 +3038,19 @@ RSpec.describe API::Projects, feature_category: :projects do let_it_be(:project) { create(:project, :public) } let(:expected_params) { { user: user.username, project: project.full_path } } - subject { get api("/projects/#{project.id}", user) } + subject { get api(path, user) } end describe 'repository_storage attribute' do + let_it_be(:admin_mode) { false } + before do - get api("/projects/#{project.id}", user) + get api(path, user, admin_mode: admin_mode) end context 'when authenticated as an admin' do let(:user) { create(:admin) } + let_it_be(:admin_mode) { true } it 'returns repository_storage attribute' do expect(response).to have_gitlab_http_status(:ok) @@ -2975,31 +3066,34 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'exposes service desk attributes' do - get api("/projects/#{project.id}", user) + get api(path, user) expect(json_response).to have_key 'service_desk_enabled' expect(json_response).to have_key 'service_desk_address' end context 'when project is shared to multiple groups' do - it 'avoids N+1 queries' do + it 'avoids N+1 queries', :use_sql_query_cache do create(:project_group_link, project: project) - get api("/projects/#{project.id}", user) + get api(path, user) + expect(response).to have_gitlab_http_status(:ok) control = ActiveRecord::QueryRecorder.new do - get api("/projects/#{project.id}", user) + get api(path, user) end create(:project_group_link, project: project) expect do - get api("/projects/#{project.id}", user) + get api(path, user) end.not_to exceed_query_limit(control) end end end describe 'GET /projects/:id/users' do + let(:path) { "/projects/#{project.id}/users" } + shared_examples_for 'project users response' do let(:reporter_1) { create(:user) } let(:reporter_2) { create(:user) } @@ -3010,7 +3104,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns the project users' do - get api("/projects/#{project.id}/users", current_user) + get api(path, current_user) user = project.namespace.first_owner @@ -3029,6 +3123,10 @@ RSpec.describe API::Projects, feature_category: :projects do end end + it_behaves_like 'GET request permissions for admin mode' do + let(:failed_status_code) { :not_found } + end + context 'when unauthenticated' do it_behaves_like 'project users response' do let(:project) { create(:project, :public) } @@ -3054,7 +3152,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns a 404 error if user is not a member' do other_user = create(:user) - get api("/projects/#{project.id}/users", other_user) + get api(path, other_user) expect(response).to have_gitlab_http_status(:not_found) end @@ -3073,18 +3171,25 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'fork management' do - let(:project_fork_target) { create(:project) } - let(:project_fork_source) { create(:project, :public) } - let(:private_project_fork_source) { create(:project, :private) } + let_it_be_with_refind(:project_fork_target) { create(:project) } + let_it_be_with_refind(:project_fork_source) { create(:project, :public) } + let_it_be_with_refind(:private_project_fork_source) { create(:project, :private) } describe 'POST /projects/:id/fork/:forked_from_id' do + let(:path) { "/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}" } + + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { {} } + let(:failed_status_code) { :not_found } + end + context 'user is a developer' do before do project_fork_target.add_developer(user) end it 'denies project to be forked from an existing project' do - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user) + post api(path, user) expect(response).to have_gitlab_http_status(:forbidden) end @@ -3102,7 +3207,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'allows project to be forked from an existing project' do expect(project_fork_target).not_to be_forked - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user) + post api(path, user) project_fork_target.reload expect(response).to have_gitlab_http_status(:created) @@ -3114,7 +3219,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'fails without permission from forked_from project' do project_fork_source.project_feature.update_attribute(:forking_access_level, ProjectFeature::PRIVATE) - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", user) + post api(path, user) expect(response).to have_gitlab_http_status(:forbidden) expect(project_fork_target.forked_from_project).to be_nil @@ -3133,25 +3238,25 @@ RSpec.describe API::Projects, feature_category: :projects do it 'allows project to be forked from an existing project' do expect(project_fork_target).not_to be_forked - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin) + post api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) end it 'allows project to be forked from a private project' do - post api("/projects/#{project_fork_target.id}/fork/#{private_project_fork_source.id}", admin) + post api("/projects/#{project_fork_target.id}/fork/#{private_project_fork_source.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) end it 'refreshes the forks count cachce' do expect do - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin) + post api(path, admin, admin_mode: true) end.to change(project_fork_source, :forks_count).by(1) end it 'fails if forked_from project which does not exist' do - post api("/projects/#{project_fork_target.id}/fork/#{non_existing_record_id}", admin) + post api("/projects/#{project_fork_target.id}/fork/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3160,7 +3265,7 @@ RSpec.describe API::Projects, feature_category: :projects do Projects::ForkService.new(project_fork_source, admin).execute(project_fork_target) - post api("/projects/#{project_fork_target.id}/fork/#{other_project_fork_source.id}", admin) + post api("/projects/#{project_fork_target.id}/fork/#{other_project_fork_source.id}", admin, admin_mode: true) project_fork_target.reload expect(response).to have_gitlab_http_status(:conflict) @@ -3171,8 +3276,10 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'DELETE /projects/:id/fork' do + let(:path) { "/projects/#{project_fork_target.id}/fork" } + it "is not visible to users outside group" do - delete api("/projects/#{project_fork_target.id}/fork", user) + delete api(path, user) expect(response).to have_gitlab_http_status(:not_found) end @@ -3186,14 +3293,19 @@ RSpec.describe API::Projects, feature_category: :projects do context 'for a forked project' do before do - post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin) + post api("/projects/#{project_fork_target.id}/fork/#{project_fork_source.id}", admin, admin_mode: true) project_fork_target.reload expect(project_fork_target.forked_from_project).to be_present expect(project_fork_target).to be_forked end + it_behaves_like 'DELETE request permissions for admin mode' do + let(:success_status_code) { :no_content } + let(:failed_status_code) { :not_found } + end + it 'makes forked project unforked' do - delete api("/projects/#{project_fork_target.id}/fork", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) project_fork_target.reload @@ -3202,18 +3314,18 @@ RSpec.describe API::Projects, feature_category: :projects do end it_behaves_like '412 response' do - let(:request) { api("/projects/#{project_fork_target.id}/fork", admin) } + subject(:request) { api(path, admin, admin_mode: true) } end end it 'is forbidden to non-owner users' do - delete api("/projects/#{project_fork_target.id}/fork", user2) + delete api(path, user2) expect(response).to have_gitlab_http_status(:forbidden) end it 'is idempotent if not forked' do expect(project_fork_target.forked_from_project).to be_nil - delete api("/projects/#{project_fork_target.id}/fork", admin) + delete api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_modified) expect(project_fork_target.reload.forked_from_project).to be_nil end @@ -3221,17 +3333,17 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'GET /projects/:id/forks' do - let(:private_fork) { create(:project, :private, :empty_repo) } - let(:member) { create(:user) } - let(:non_member) { create(:user) } + let_it_be_with_refind(:private_fork) { create(:project, :private, :empty_repo) } + let_it_be(:member) { create(:user) } + let_it_be(:non_member) { create(:user) } - before do + before_all do private_fork.add_developer(member) end context 'for a forked project' do before do - post api("/projects/#{private_fork.id}/fork/#{project_fork_source.id}", admin) + post api("/projects/#{private_fork.id}/fork/#{project_fork_source.id}", admin, admin_mode: true) private_fork.reload expect(private_fork.forked_from_project).to be_present expect(private_fork).to be_forked @@ -3249,6 +3361,20 @@ RSpec.describe API::Projects, feature_category: :projects do expect(json_response.length).to eq(1) expect(json_response[0]['name']).to eq(private_fork.name) end + + context 'filter by updated_at' do + before do + private_fork.update!(updated_at: 4.days.ago) + end + + it 'returns only forks updated on the given timeframe' do + get api("/projects/#{project_fork_source.id}/forks", member), + params: { updated_before: 2.days.ago.iso8601, updated_after: 6.days.ago } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response.map { |project| project['id'] }).to contain_exactly(private_fork.id) + end + end end context 'for a user that cannot access the forks' do @@ -3277,6 +3403,7 @@ RSpec.describe API::Projects, feature_category: :projects do describe "POST /projects/:id/share" do let_it_be(:group) { create(:group, :private) } let_it_be(:group_user) { create(:user) } + let(:path) { "/projects/#{project.id}/share" } before do group.add_developer(user) @@ -3287,7 +3414,7 @@ RSpec.describe API::Projects, feature_category: :projects do expires_at = 10.days.from_now.to_date expect do - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at } + post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER, expires_at: expires_at } end.to change { ProjectGroupLink.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -3298,51 +3425,51 @@ RSpec.describe API::Projects, feature_category: :projects do it 'updates project authorization', :sidekiq_inline do expect do - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } end.to( change { group_user.can?(:read_project, project) }.from(false).to(true) ) end it "returns a 400 error when group id is not given" do - post api("/projects/#{project.id}/share", user), params: { group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_access: Gitlab::Access::DEVELOPER } expect(response).to have_gitlab_http_status(:bad_request) end it "returns a 400 error when access level is not given" do - post api("/projects/#{project.id}/share", user), params: { group_id: group.id } + post api(path, user), params: { group_id: group.id } expect(response).to have_gitlab_http_status(:bad_request) end it "returns a 400 error when sharing is disabled" do project.namespace.update!(share_with_group_lock: true) - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } expect(response).to have_gitlab_http_status(:bad_request) end it 'returns a 404 error when user cannot read group' do private_group = create(:group, :private) - post api("/projects/#{project.id}/share", user), params: { group_id: private_group.id, group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_id: private_group.id, group_access: Gitlab::Access::DEVELOPER } expect(response).to have_gitlab_http_status(:not_found) end it 'returns a 404 error when group does not exist' do - post api("/projects/#{project.id}/share", user), params: { group_id: non_existing_record_id, group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_id: non_existing_record_id, group_access: Gitlab::Access::DEVELOPER } expect(response).to have_gitlab_http_status(:not_found) end it "returns a 400 error when wrong params passed" do - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: non_existing_record_access_level } + post api(path, user), params: { group_id: group.id, group_access: non_existing_record_access_level } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq 'group_access does not have a valid value' end it "returns a 400 error when the project-group share is created with an OWNER access level" do - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::OWNER } + post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::OWNER } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq 'group_access does not have a valid value' @@ -3352,7 +3479,7 @@ RSpec.describe API::Projects, feature_category: :projects do allow(::Projects::GroupLinks::CreateService).to receive_message_chain(:new, :execute) .and_return({ status: :error, http_status: 409, message: 'error' }) - post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } + post api(path, user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER } expect(response).to have_gitlab_http_status(:conflict) end @@ -3385,7 +3512,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it_behaves_like '412 response' do - let(:request) { api("/projects/#{project.id}/share/#{group.id}", user) } + subject(:request) { api("/projects/#{project.id}/share/#{group.id}", user) } end end @@ -3411,6 +3538,7 @@ RSpec.describe API::Projects, feature_category: :projects do describe 'POST /projects/:id/import_project_members/:project_id' do let_it_be(:project2) { create(:project) } let_it_be(:project2_user) { create(:user) } + let(:path) { "/projects/#{project.id}/import_project_members/#{project2.id}" } before_all do project.add_maintainer(user) @@ -3419,7 +3547,8 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'records the query', :request_store, :use_sql_query_cache do - post api("/projects/#{project.id}/import_project_members/#{project2.id}", user) + post api(path, user) + expect(response).to have_gitlab_http_status(:created) control_project = create(:project) control_project.add_maintainer(user) @@ -3443,7 +3572,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns 200 when it successfully imports members from another project' do expect do - post api("/projects/#{project.id}/import_project_members/#{project2.id}", user) + post api(path, user) end.to change { project.members.count }.by(2) expect(response).to have_gitlab_http_status(:created) @@ -3486,7 +3615,7 @@ RSpec.describe API::Projects, feature_category: :projects do project2.add_developer(user2) expect do - post api("/projects/#{project.id}/import_project_members/#{project2.id}", user2) + post api(path, user2) end.not_to change { project.members.count } expect(response).to have_gitlab_http_status(:forbidden) @@ -3499,7 +3628,7 @@ RSpec.describe API::Projects, feature_category: :projects do end expect do - post api("/projects/#{project.id}/import_project_members/#{project2.id}", user) + post api(path, user) end.not_to change { project.members.count } expect(response).to have_gitlab_http_status(:unprocessable_entity) @@ -3508,6 +3637,8 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'PUT /projects/:id' do + let(:path) { "/projects/#{project.id}" } + before do expect(project).to be_persisted expect(user).to be_persisted @@ -3519,13 +3650,18 @@ RSpec.describe API::Projects, feature_category: :projects do expect(project_member).to be_persisted end + it_behaves_like 'PUT request permissions for admin mode' do + let(:params) { { visibility: 'internal' } } + let(:failed_status_code) { :not_found } + end + describe 'updating packages_enabled attribute' do it 'is enabled by default' do expect(project.packages_enabled).to be true end it 'disables project packages feature' do - put(api("/projects/#{project.id}", user), params: { packages_enabled: false }) + put(api(path, user), params: { packages_enabled: false }) expect(response).to have_gitlab_http_status(:ok) expect(project.reload.packages_enabled).to be false @@ -3533,8 +3669,8 @@ RSpec.describe API::Projects, feature_category: :projects do end end - it 'sets container_registry_access_level', :aggregate_failures do - put api("/projects/#{project.id}", user), params: { container_registry_access_level: 'private' } + it 'sets container_registry_access_level' do + put api(path, user), params: { container_registry_access_level: 'private' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['container_registry_access_level']).to eq('private') @@ -3544,31 +3680,31 @@ RSpec.describe API::Projects, feature_category: :projects do it 'sets container_registry_enabled' do project.project_feature.update!(container_registry_access_level: ProjectFeature::DISABLED) - put(api("/projects/#{project.id}", user), params: { container_registry_enabled: true }) + put(api(path, user), params: { container_registry_enabled: true }) expect(response).to have_gitlab_http_status(:ok) expect(json_response['container_registry_enabled']).to eq(true) expect(project.reload.container_registry_access_level).to eq(ProjectFeature::ENABLED) end - it 'sets security_and_compliance_access_level', :aggregate_failures do - put api("/projects/#{project.id}", user), params: { security_and_compliance_access_level: 'private' } + it 'sets security_and_compliance_access_level' do + put api(path, user), params: { security_and_compliance_access_level: 'private' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['security_and_compliance_access_level']).to eq('private') expect(Project.find_by(path: project[:path]).security_and_compliance_access_level).to eq(ProjectFeature::PRIVATE) end - it 'sets operations_access_level', :aggregate_failures do - put api("/projects/#{project.id}", user), params: { operations_access_level: 'private' } + it 'sets operations_access_level' do + put api(path, user), params: { operations_access_level: 'private' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['operations_access_level']).to eq('private') expect(Project.find_by(path: project[:path]).operations_access_level).to eq(ProjectFeature::PRIVATE) end - it 'sets analytics_access_level', :aggregate_failures do - put api("/projects/#{project.id}", user), params: { analytics_access_level: 'private' } + it 'sets analytics_access_level' do + put api(path, user), params: { analytics_access_level: 'private' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['analytics_access_level']).to eq('private') @@ -3576,8 +3712,8 @@ RSpec.describe API::Projects, feature_category: :projects do end %i(releases_access_level environments_access_level feature_flags_access_level infrastructure_access_level monitor_access_level).each do |field| - it "sets #{field}", :aggregate_failures do - put api("/projects/#{project.id}", user), params: { field => 'private' } + it "sets #{field}" do + put api(path, user), params: { field => 'private' } expect(response).to have_gitlab_http_status(:ok) expect(json_response[field.to_s]).to eq('private') @@ -3588,7 +3724,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns 400 when nothing sent' do project_param = {} - put api("/projects/#{project.id}", user), params: project_param + put api(path, user), params: project_param expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to match('at least one parameter must be provided') @@ -3598,7 +3734,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns authentication error' do project_param = { name: 'bar' } - put api("/projects/#{project.id}"), params: project_param + put api(path), params: project_param expect(response).to have_gitlab_http_status(:unauthorized) end @@ -3644,7 +3780,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'does not update name to existing name' do project_param = { name: project3.name } - put api("/projects/#{project.id}", user), params: project_param + put api(path, user), params: project_param expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['name']).to eq(['has already been taken']) @@ -3653,7 +3789,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'updates request_access_enabled' do project_param = { request_access_enabled: false } - put api("/projects/#{project.id}", user), params: project_param + put api(path, user), params: project_param expect(response).to have_gitlab_http_status(:ok) expect(json_response['request_access_enabled']).to eq(false) @@ -3674,7 +3810,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'updates default_branch' do project_param = { default_branch: 'something_else' } - put api("/projects/#{project.id}", user), params: project_param + put api(path, user), params: project_param expect(response).to have_gitlab_http_status(:ok) @@ -3763,7 +3899,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(response).to have_gitlab_http_status(:bad_request) end - it 'updates restrict_user_defined_variables', :aggregate_failures do + it 'updates restrict_user_defined_variables' do project_param = { restrict_user_defined_variables: true } put api("/projects/#{project3.id}", user), params: project_param @@ -3965,7 +4101,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'updates name' do project_param = { name: 'bar' } - put api("/projects/#{project.id}", user), params: project_param + put api(path, user), params: project_param expect(response).to have_gitlab_http_status(:ok) @@ -4040,7 +4176,7 @@ RSpec.describe API::Projects, feature_category: :projects do merge_requests_enabled: true, description: 'new description', request_access_enabled: true } - put api("/projects/#{project.id}", user3), params: project_param + put api(path, user3), params: project_param expect(response).to have_gitlab_http_status(:forbidden) end end @@ -4051,7 +4187,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'ignores visibility level restrictions' do stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL]) - put api("/projects/#{project3.id}", admin), params: { visibility: 'internal' } + put api("/projects/#{project3.id}", admin, admin_mode: true), params: { visibility: 'internal' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['visibility']).to eq('internal') @@ -4082,7 +4218,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:admin) { create(:admin) } it 'returns 400 when repository storage is unknown' do - put(api("/projects/#{new_project.id}", admin), params: { repository_storage: unknown_storage }) + put(api("/projects/#{new_project.id}", admin, admin_mode: true), params: { repository_storage: unknown_storage }) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['repository_storage_moves']).to eq(['is invalid']) @@ -4093,7 +4229,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect do Sidekiq::Testing.fake! do - put(api("/projects/#{new_project.id}", admin), params: { repository_storage: 'test_second_storage' }) + put(api("/projects/#{new_project.id}", admin, admin_mode: true), params: { repository_storage: 'test_second_storage' }) end end.to change(Projects::UpdateRepositoryStorageWorker.jobs, :size).by(1) @@ -4103,40 +4239,42 @@ RSpec.describe API::Projects, feature_category: :projects do end context 'when updating service desk' do - subject { put(api("/projects/#{project.id}", user), params: { service_desk_enabled: true }) } + let(:params) { { service_desk_enabled: true } } + + subject(:request) { put(api(path, user), params: params) } before do project.update!(service_desk_enabled: false) - allow(::Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) + allow(::Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) end it 'returns 200' do - subject + request expect(response).to have_gitlab_http_status(:ok) end it 'enables the service_desk' do - expect { subject }.to change { project.reload.service_desk_enabled }.to(true) + expect { request }.to change { project.reload.service_desk_enabled }.to(true) end end context 'when updating keep latest artifact' do - subject { put(api("/projects/#{project.id}", user), params: { keep_latest_artifact: true }) } + subject(:request) { put(api(path, user), params: { keep_latest_artifact: true }) } before do project.update!(keep_latest_artifact: false) end it 'returns 200' do - subject + request expect(response).to have_gitlab_http_status(:ok) end it 'enables keep_latest_artifact' do - expect { subject }.to change { project.reload.keep_latest_artifact }.to(true) + expect { request }.to change { project.reload.keep_latest_artifact }.to(true) end end @@ -4182,9 +4320,11 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects/:id/archive' do + let(:path) { "/projects/#{project.id}/archive" } + context 'on an unarchived project' do it 'archives the project' do - post api("/projects/#{project.id}/archive", user) + post api(path, user) expect(response).to have_gitlab_http_status(:created) expect(json_response['archived']).to be_truthy @@ -4197,7 +4337,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'remains archived' do - post api("/projects/#{project.id}/archive", user) + post api(path, user) expect(response).to have_gitlab_http_status(:created) expect(json_response['archived']).to be_truthy @@ -4210,7 +4350,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'rejects the action' do - post api("/projects/#{project.id}/archive", user3) + post api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end @@ -4218,9 +4358,11 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects/:id/unarchive' do + let(:path) { "/projects/#{project.id}/unarchive" } + context 'on an unarchived project' do it 'remains unarchived' do - post api("/projects/#{project.id}/unarchive", user) + post api(path, user) expect(response).to have_gitlab_http_status(:created) expect(json_response['archived']).to be_falsey @@ -4233,7 +4375,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'unarchives the project' do - post api("/projects/#{project.id}/unarchive", user) + post api(path, user) expect(response).to have_gitlab_http_status(:created) expect(json_response['archived']).to be_falsey @@ -4246,7 +4388,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'rejects the action' do - post api("/projects/#{project.id}/unarchive", user3) + post api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end @@ -4254,9 +4396,11 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects/:id/star' do + let(:path) { "/projects/#{project.id}/star" } + context 'on an unstarred project' do it 'stars the project' do - expect { post api("/projects/#{project.id}/star", user) }.to change { project.reload.star_count }.by(1) + expect { post api(path, user) }.to change { project.reload.star_count }.by(1) expect(response).to have_gitlab_http_status(:created) expect(json_response['star_count']).to eq(1) @@ -4270,7 +4414,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not modify the star count' do - expect { post api("/projects/#{project.id}/star", user) }.not_to change { project.reload.star_count } + expect { post api(path, user) }.not_to change { project.reload.star_count } expect(response).to have_gitlab_http_status(:not_modified) end @@ -4278,6 +4422,8 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'POST /projects/:id/unstar' do + let(:path) { "/projects/#{project.id}/unstar" } + context 'on a starred project' do before do user.toggle_star(project) @@ -4285,7 +4431,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'unstars the project' do - expect { post api("/projects/#{project.id}/unstar", user) }.to change { project.reload.star_count }.by(-1) + expect { post api(path, user) }.to change { project.reload.star_count }.by(-1) expect(response).to have_gitlab_http_status(:created) expect(json_response['star_count']).to eq(0) @@ -4294,7 +4440,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'on an unstarred project' do it 'does not modify the star count' do - expect { post api("/projects/#{project.id}/unstar", user) }.not_to change { project.reload.star_count } + expect { post api(path, user) }.not_to change { project.reload.star_count } expect(response).to have_gitlab_http_status(:not_modified) end @@ -4302,9 +4448,13 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'GET /projects/:id/starrers' do + let(:path) { "/projects/#{public_project.id}/starrers" } + let(:public_project) { create(:project, :public) } + let(:private_user) { create(:user, private_profile: true) } + shared_examples_for 'project starrers response' do it 'returns an array of starrers' do - get api("/projects/#{public_project.id}/starrers", current_user) + get api(path, current_user) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -4314,15 +4464,12 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns the proper security headers' do - get api("/projects/#{public_project.id}/starrers", current_user) + get api(path, current_user) expect(response).to include_security_headers end end - let(:public_project) { create(:project, :public) } - let(:private_user) { create(:user, private_profile: true) } - before do user.update!(starred_projects: [public_project]) private_user.update!(starred_projects: [public_project]) @@ -4340,7 +4487,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns only starrers with a public profile' do - get api("/projects/#{public_project.id}/starrers", nil) + get api(path, nil) user_ids = json_response.map { |s| s['user']['id'] } expect(user_ids).to include(user.id) @@ -4354,7 +4501,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns current user with a private profile' do - get api("/projects/#{public_project.id}/starrers", private_user) + get api(path, private_user) user_ids = json_response.map { |s| s['user']['id'] } expect(user_ids).to include(user.id, private_user.id) @@ -4417,9 +4564,16 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'DELETE /projects/:id' do + let(:path) { "/projects/#{project.id}" } + + it_behaves_like 'DELETE request permissions for admin mode' do + let(:success_status_code) { :accepted } + let(:failed_status_code) { :not_found } + end + context 'when authenticated as user' do it 'removes project' do - delete api("/projects/#{project.id}", user) + delete api(path, user) expect(response).to have_gitlab_http_status(:accepted) expect(json_response['message']).to eql('202 Accepted') @@ -4427,13 +4581,13 @@ RSpec.describe API::Projects, feature_category: :projects do it_behaves_like '412 response' do let(:success_status) { 202 } - let(:request) { api("/projects/#{project.id}", user) } + subject(:request) { api(path, user) } end it 'does not remove a project if not an owner' do user3 = create(:user) project.add_developer(user3) - delete api("/projects/#{project.id}", user3) + delete api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end @@ -4443,27 +4597,27 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not remove a project not attached to user' do - delete api("/projects/#{project.id}", user2) + delete api(path, user2) expect(response).to have_gitlab_http_status(:not_found) end end context 'when authenticated as admin' do it 'removes any existing project' do - delete api("/projects/#{project.id}", admin) + delete api("/projects/#{project.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:accepted) expect(json_response['message']).to eql('202 Accepted') end it 'does not remove a non existing project' do - delete api("/projects/#{non_existing_record_id}", admin) + delete api("/projects/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end it_behaves_like '412 response' do let(:success_status) { 202 } - let(:request) { api("/projects/#{project.id}", admin) } + subject(:request) { api("/projects/#{project.id}", admin, admin_mode: true) } end end end @@ -4473,6 +4627,8 @@ RSpec.describe API::Projects, feature_category: :projects do create(:project, :repository, creator: user, namespace: user.namespace) end + let(:path) { "/projects/#{project.id}/fork" } + let(:project2) do create(:project, :repository, creator: user, namespace: user.namespace) end @@ -4489,9 +4645,14 @@ RSpec.describe API::Projects, feature_category: :projects do project2.add_reporter(user2) end + it_behaves_like 'POST request permissions for admin mode' do + let(:params) { {} } + let(:failed_status_code) { :not_found } + end + context 'when authenticated' do it 'forks if user has sufficient access to project' do - post api("/projects/#{project.id}/fork", user2) + post api(path, user2) expect(response).to have_gitlab_http_status(:created) expect(json_response['name']).to eq(project.name) @@ -4504,7 +4665,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'forks if user is admin' do - post api("/projects/#{project.id}/fork", admin) + post api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:created) expect(json_response['name']).to eq(project.name) @@ -4518,7 +4679,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'fails on missing project access for the project to fork' do new_user = create(:user) - post api("/projects/#{project.id}/fork", new_user) + post api(path, new_user) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Project Not Found') @@ -4543,41 +4704,41 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'forks with explicit own user namespace id' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: user2.namespace.id } + post api(path, user2), params: { namespace: user2.namespace.id } expect(response).to have_gitlab_http_status(:created) expect(json_response['owner']['id']).to eq(user2.id) end it 'forks with explicit own user name as namespace' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: user2.username } + post api(path, user2), params: { namespace: user2.username } expect(response).to have_gitlab_http_status(:created) expect(json_response['owner']['id']).to eq(user2.id) end it 'forks to another user when admin' do - post api("/projects/#{project.id}/fork", admin), params: { namespace: user2.username } + post api(path, admin, admin_mode: true), params: { namespace: user2.username } expect(response).to have_gitlab_http_status(:created) expect(json_response['owner']['id']).to eq(user2.id) end it 'fails if trying to fork to another user when not admin' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: admin.namespace.id } + post api(path, user2), params: { namespace: admin.namespace.id } expect(response).to have_gitlab_http_status(:not_found) end it 'fails if trying to fork to non-existent namespace' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: non_existing_record_id } + post api(path, user2), params: { namespace: non_existing_record_id } expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Namespace Not Found') end it 'forks to owned group' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: group2.name } + post api(path, user2), params: { namespace: group2.name } expect(response).to have_gitlab_http_status(:created) expect(json_response['namespace']['name']).to eq(group2.name) @@ -4594,7 +4755,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and namespace_id is specified alone' do before do - post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id } + post api(path, user2), params: { namespace_id: user2.namespace.id } end it_behaves_like 'forking to specified namespace_id' @@ -4602,7 +4763,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and namespace_id and namespace are both specified' do before do - post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id, namespace: admin.namespace.id } + post api(path, user2), params: { namespace_id: user2.namespace.id, namespace: admin.namespace.id } end it_behaves_like 'forking to specified namespace_id' @@ -4610,7 +4771,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and namespace_id and namespace_path are both specified' do before do - post api("/projects/#{project.id}/fork", user2), params: { namespace_id: user2.namespace.id, namespace_path: admin.namespace.path } + post api(path, user2), params: { namespace_id: user2.namespace.id, namespace_path: admin.namespace.path } end it_behaves_like 'forking to specified namespace_id' @@ -4628,7 +4789,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and namespace_path is specified alone' do before do - post api("/projects/#{project.id}/fork", user2), params: { namespace_path: user2.namespace.path } + post api(path, user2), params: { namespace_path: user2.namespace.path } end it_behaves_like 'forking to specified namespace_path' @@ -4636,7 +4797,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'and namespace_path and namespace are both specified' do before do - post api("/projects/#{project.id}/fork", user2), params: { namespace_path: user2.namespace.path, namespace: admin.namespace.path } + post api(path, user2), params: { namespace_path: user2.namespace.path, namespace: admin.namespace.path } end it_behaves_like 'forking to specified namespace_path' @@ -4645,7 +4806,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'forks to owned subgroup' do full_path = "#{group2.path}/#{group3.path}" - post api("/projects/#{project.id}/fork", user2), params: { namespace: full_path } + post api(path, user2), params: { namespace: full_path } expect(response).to have_gitlab_http_status(:created) expect(json_response['namespace']['name']).to eq(group3.name) @@ -4653,21 +4814,21 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'fails to fork to not owned group' do - post api("/projects/#{project.id}/fork", user2), params: { namespace: group.name } + post api(path, user2), params: { namespace: group.name } expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq("404 Target Namespace Not Found") end it 'forks to not owned group when admin' do - post api("/projects/#{project.id}/fork", admin), params: { namespace: group.name } + post api(path, admin, admin_mode: true), params: { namespace: group.name } expect(response).to have_gitlab_http_status(:created) expect(json_response['namespace']['name']).to eq(group.name) end it 'accepts a path for the target project' do - post api("/projects/#{project.id}/fork", user2), params: { path: 'foobar' } + post api(path, user2), params: { path: 'foobar' } expect(response).to have_gitlab_http_status(:created) expect(json_response['name']).to eq(project.name) @@ -4680,7 +4841,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'fails to fork if path is already taken' do - post api("/projects/#{project.id}/fork", user2), params: { path: 'foobar' } + post api(path, user2), params: { path: 'foobar' } post api("/projects/#{project2.id}/fork", user2), params: { path: 'foobar' } expect(response).to have_gitlab_http_status(:conflict) @@ -4688,7 +4849,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'accepts custom parameters for the target project' do - post api("/projects/#{project.id}/fork", user2), + post api(path, user2), params: { name: 'My Random Project', description: 'A description', @@ -4710,7 +4871,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'fails to fork if name is already taken' do - post api("/projects/#{project.id}/fork", user2), params: { name: 'My Random Project' } + post api(path, user2), params: { name: 'My Random Project' } post api("/projects/#{project2.id}/fork", user2), params: { name: 'My Random Project' } expect(response).to have_gitlab_http_status(:conflict) @@ -4718,7 +4879,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'forks to the same namespace with alternative path and name' do - post api("/projects/#{project.id}/fork", user), params: { path: 'path_2', name: 'name_2' } + post api(path, user), params: { path: 'path_2', name: 'name_2' } expect(response).to have_gitlab_http_status(:created) expect(json_response['name']).to eq('name_2') @@ -4730,7 +4891,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'fails to fork to the same namespace without alternative path and name' do - post api("/projects/#{project.id}/fork", user) + post api(path, user) expect(response).to have_gitlab_http_status(:conflict) expect(json_response['message']['path']).to eq(['has already been taken']) @@ -4738,7 +4899,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'fails to fork with an unknown visibility level' do - post api("/projects/#{project.id}/fork", user2), params: { visibility: 'something' } + post api(path, user2), params: { visibility: 'something' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('visibility does not have a valid value') @@ -4747,7 +4908,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when unauthenticated' do it 'returns authentication error' do - post api("/projects/#{project.id}/fork") + post api(path) expect(response).to have_gitlab_http_status(:unauthorized) expect(json_response['message']).to eq('401 Unauthorized') @@ -4761,7 +4922,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'denies project to be forked' do - post api("/projects/#{project.id}/fork", admin) + post api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -4771,8 +4932,9 @@ RSpec.describe API::Projects, feature_category: :projects do describe 'POST /projects/:id/housekeeping' do let(:housekeeping) { Repositories::HousekeepingService.new(project) } let(:params) { {} } + let(:path) { "/projects/#{project.id}/housekeeping" } - subject { post api("/projects/#{project.id}/housekeeping", user), params: params } + subject(:request) { post api(path, user), params: params } before do allow(Repositories::HousekeepingService).to receive(:new).with(project, :eager).and_return(housekeeping) @@ -4782,7 +4944,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'starts the housekeeping process' do expect(housekeeping).to receive(:execute).once - subject + request expect(response).to have_gitlab_http_status(:created) end @@ -4797,7 +4959,7 @@ RSpec.describe API::Projects, feature_category: :projects do message: "Housekeeping task: eager" )) - subject + request end context 'when requesting prune' do @@ -4807,7 +4969,7 @@ RSpec.describe API::Projects, feature_category: :projects do expect(Repositories::HousekeepingService).to receive(:new).with(project, :prune).and_return(housekeeping) expect(housekeeping).to receive(:execute).once - subject + request expect(response).to have_gitlab_http_status(:created) end @@ -4819,7 +4981,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'responds with bad_request' do expect(Repositories::HousekeepingService).not_to receive(:new) - subject + request expect(response).to have_gitlab_http_status(:bad_request) end @@ -4829,7 +4991,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'returns conflict' do expect(housekeeping).to receive(:execute).once.and_raise(Repositories::HousekeepingService::LeaseTaken) - subject + request expect(response).to have_gitlab_http_status(:conflict) expect(json_response['message']).to match(/Somebody already triggered housekeeping for this resource/) @@ -4843,7 +5005,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns forbidden error' do - post api("/projects/#{project.id}/housekeeping", user3) + post api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end @@ -4851,7 +5013,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when unauthenticated' do it 'returns authentication error' do - post api("/projects/#{project.id}/housekeeping") + post api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -4860,6 +5022,7 @@ RSpec.describe API::Projects, feature_category: :projects do describe 'POST /projects/:id/repository_size' do let(:update_statistics_service) { Projects::UpdateStatisticsService.new(project, nil, statistics: [:repository_size, :lfs_objects_size]) } + let(:path) { "/projects/#{project.id}/repository_size" } before do allow(Projects::UpdateStatisticsService).to receive(:new).with(project, nil, statistics: [:repository_size, :lfs_objects_size]).and_return(update_statistics_service) @@ -4869,7 +5032,7 @@ RSpec.describe API::Projects, feature_category: :projects do it 'starts the housekeeping process' do expect(update_statistics_service).to receive(:execute).once - post api("/projects/#{project.id}/repository_size", user) + post api(path, user) expect(response).to have_gitlab_http_status(:created) end @@ -4881,7 +5044,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'returns forbidden error' do - post api("/projects/#{project.id}/repository_size", user3) + post api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end @@ -4889,7 +5052,7 @@ RSpec.describe API::Projects, feature_category: :projects do context 'when unauthenticated' do it 'returns authentication error' do - post api("/projects/#{project.id}/repository_size") + post api(path) expect(response).to have_gitlab_http_status(:unauthorized) end @@ -4897,31 +5060,33 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'PUT /projects/:id/transfer' do + let(:path) { "/projects/#{project.id}/transfer" } + context 'when authenticated as owner' do let(:group) { create :group } it 'transfers the project to the new namespace' do group.add_owner(user) - put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id } + put api(path, user), params: { namespace: group.id } expect(response).to have_gitlab_http_status(:ok) end it 'fails when transferring to a non owned namespace' do - put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id } + put api(path, user), params: { namespace: group.id } expect(response).to have_gitlab_http_status(:not_found) end it 'fails when transferring to an unknown namespace' do - put api("/projects/#{project.id}/transfer", user), params: { namespace: 'unknown' } + put api(path, user), params: { namespace: 'unknown' } expect(response).to have_gitlab_http_status(:not_found) end it 'fails on missing namespace' do - put api("/projects/#{project.id}/transfer", user) + put api(path, user) expect(response).to have_gitlab_http_status(:bad_request) end @@ -4936,7 +5101,7 @@ RSpec.describe API::Projects, feature_category: :projects do let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) } it 'fails transferring the project to the target namespace' do - put api("/projects/#{project.id}/transfer", user), params: { namespace: group.id } + put api(path, user), params: { namespace: group.id } expect(response).to have_gitlab_http_status(:bad_request) end @@ -5039,16 +5204,20 @@ RSpec.describe API::Projects, feature_category: :projects do end describe 'GET /projects/:id/storage' do + let(:path) { "/projects/#{project.id}/storage" } + + it_behaves_like 'GET request permissions for admin mode' + context 'when unauthenticated' do it 'does not return project storage data' do - get api("/projects/#{project.id}/storage") + get api(path) expect(response).to have_gitlab_http_status(:unauthorized) end end it 'returns project storage data when user is admin' do - get api("/projects/#{project.id}/storage", create(:admin)) + get api(path, create(:admin), admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['project_id']).to eq(project.id) @@ -5058,7 +5227,7 @@ RSpec.describe API::Projects, feature_category: :projects do end it 'does not return project storage data when user is not admin' do - get api("/projects/#{project.id}/storage", user3) + get api(path, user3) expect(response).to have_gitlab_http_status(:forbidden) end diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb index 463893afd13..622e57edf6a 100644 --- a/spec/requests/api/protected_branches_spec.rb +++ b/spec/requests/api/protected_branches_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe API::ProtectedBranches, feature_category: :source_code_management do let_it_be_with_reload(:project) { create(:project, :repository) } let_it_be(:maintainer) { create(:user) } + let_it_be(:developer) { create(:user) } let_it_be(:guest) { create(:user) } let(:protected_name) { 'feature' } @@ -16,12 +17,14 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management before_all do project.add_maintainer(maintainer) + project.add_developer(developer) project.add_guest(guest) end describe "GET /projects/:id/protected_branches" do let(:params) { {} } let(:route) { "/projects/#{project.id}/protected_branches" } + let(:expected_branch_names) { project.protected_branches.map { |x| x['name'] } } shared_examples_for 'protected branches' do it 'returns the protected branches' do @@ -39,9 +42,7 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management let(:user) { maintainer } context 'when search param is not present' do - it_behaves_like 'protected branches' do - let(:expected_branch_names) { project.protected_branches.map { |x| x['name'] } } - end + it_behaves_like 'protected branches' end context 'when search param is present' do @@ -53,6 +54,12 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management end end + context 'when authenticated as a developer' do + let(:user) { developer } + + it_behaves_like 'protected branches' + end + context 'when authenticated as a guest' do let(:user) { guest } @@ -105,6 +112,12 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management end end + context 'when authenticated as a developer' do + let(:user) { developer } + + it_behaves_like 'protected branch' + end + context 'when authenticated as a guest' do let(:user) { guest } @@ -243,10 +256,20 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management end end + context 'when authenticated as a developer' do + let(:user) { developer } + + it "returns a 403 error" do + post post_endpoint, params: { name: branch_name } + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + context 'when authenticated as a guest' do let(:user) { guest } - it "returns a 403 error if guest" do + it "returns a 403 error" do post post_endpoint, params: { name: branch_name } expect(response).to have_gitlab_http_status(:forbidden) @@ -295,6 +318,16 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management end end + context 'when authenticated as a developer' do + let(:user) { developer } + + it "returns a 403 error" do + patch api(route, user), params: { allow_force_push: true } + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + context 'when authenticated as a guest' do let(:user) { guest } @@ -307,42 +340,65 @@ RSpec.describe API::ProtectedBranches, feature_category: :source_code_management end describe "DELETE /projects/:id/protected_branches/unprotect/:branch" do - let(:user) { maintainer } let(:delete_endpoint) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) } - it "unprotects a single branch" do - delete delete_endpoint + context "when authenticated as a maintainer" do + let(:user) { maintainer } + + it "unprotects a single branch" do + delete delete_endpoint - expect(response).to have_gitlab_http_status(:no_content) - end + expect(response).to have_gitlab_http_status(:no_content) + end - it_behaves_like '412 response' do - let(:request) { delete_endpoint } - end + it_behaves_like '412 response' do + let(:request) { delete_endpoint } + end - it "returns 404 if branch does not exist" do - delete api("/projects/#{project.id}/protected_branches/barfoo", user) + it "returns 404 if branch does not exist" do + delete api("/projects/#{project.id}/protected_branches/barfoo", user) - expect(response).to have_gitlab_http_status(:not_found) + expect(response).to have_gitlab_http_status(:not_found) + end + + context 'when a policy restricts rule deletion' do + it "prevents deletion of the protected branch rule" do + disallow(:destroy_protected_branch, protected_branch) + + delete delete_endpoint + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'when branch has a wildcard in its name' do + let(:protected_name) { 'feature*' } + + it "unprotects a wildcard branch" do + delete delete_endpoint + + expect(response).to have_gitlab_http_status(:no_content) + end + end end - context 'when a policy restricts rule deletion' do - it "prevents deletion of the protected branch rule" do - disallow(:destroy_protected_branch, protected_branch) + context 'when authenticated as a developer' do + let(:user) { developer } + it "returns a 403 error" do delete delete_endpoint expect(response).to have_gitlab_http_status(:forbidden) end end - context 'when branch has a wildcard in its name' do - let(:protected_name) { 'feature*' } + context 'when authenticated as a guest' do + let(:user) { guest } - it "unprotects a wildcard branch" do + it "returns a 403 error" do delete delete_endpoint - expect(response).to have_gitlab_http_status(:no_content) + expect(response).to have_gitlab_http_status(:forbidden) end end end diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb index c3f99872cef..2f7d516900d 100644 --- a/spec/requests/api/releases_spec.rb +++ b/spec/requests/api/releases_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Releases, feature_category: :release_orchestration do +RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_orchestration do let(:project) { create(:project, :repository, :private) } let(:maintainer) { create(:user) } let(:reporter) { create(:user) } @@ -480,7 +480,7 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do end context 'when specified tag is not found in the project' do - it 'returns 404 for maintater' do + it 'returns 404 for maintainer' do get api("/projects/#{project.id}/releases/non_exist_tag", maintainer) expect(response).to have_gitlab_http_status(:not_found) @@ -1665,7 +1665,11 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do let_it_be(:release2) { create(:release, project: project2) } let_it_be(:release3) { create(:release, project: project3) } - context 'when authenticated as owner' do + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { "/groups/#{group1.id}/releases" } + end + + context 'when authenticated as owner', :enable_admin_mode do it 'gets releases from all projects in the group' do get api("/groups/#{group1.id}/releases", admin) @@ -1715,9 +1719,14 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do context 'with subgroups' do let(:group) { create(:group) } - it 'include_subgroups avoids N+1 queries' do + subject { get api("/groups/#{group.id}/releases", admin, admin_mode: true), params: query_params.merge({ include_subgroups: true }) } + + it 'include_subgroups avoids N+1 queries', :use_sql_query_cache do + subject + expect(response).to have_gitlab_http_status(:ok) + control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do - get api("/groups/#{group.id}/releases", admin), params: query_params.merge({ include_subgroups: true }) + subject end.count subgroups = create_list(:group, 10, parent: group1) @@ -1725,7 +1734,7 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do create_list(:release, 10, project: projects[0], author: admin) expect do - get api("/groups/#{group.id}/releases", admin), params: query_params.merge({ include_subgroups: true }) + subject end.not_to exceed_all_query_limit(control_count) end end diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb index eb0f3b3eaee..b4818f79ec7 100644 --- a/spec/requests/api/search_spec.rb +++ b/spec/requests/api/search_spec.rb @@ -141,7 +141,7 @@ RSpec.describe API::Search, feature_category: :global_search do end end - context 'when DB timeouts occur from global searches', :aggregate_errors do + context 'when DB timeouts occur from global searches', :aggregate_failures do %w( issues merge_requests diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb index e91d777bfb0..e161cc7e03f 100644 --- a/spec/requests/api/settings_spec.rb +++ b/spec/requests/api/settings_spec.rb @@ -68,6 +68,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu expect(json_response['user_defaults_to_private_profile']).to eq(false) expect(json_response['default_syntax_highlighting_theme']).to eq(1) expect(json_response['projects_api_rate_limit_unauthenticated']).to eq(400) + expect(json_response['silent_mode_enabled']).to be(false) end end @@ -173,7 +174,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu allow_runner_registration_token: true, user_defaults_to_private_profile: true, default_syntax_highlighting_theme: 2, - projects_api_rate_limit_unauthenticated: 100 + projects_api_rate_limit_unauthenticated: 100, + silent_mode_enabled: true } expect(response).to have_gitlab_http_status(:ok) @@ -243,6 +245,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu expect(json_response['user_defaults_to_private_profile']).to be(true) expect(json_response['default_syntax_highlighting_theme']).to eq(2) expect(json_response['projects_api_rate_limit_unauthenticated']).to be(100) + expect(json_response['silent_mode_enabled']).to be(true) end end diff --git a/spec/requests/api/sidekiq_metrics_spec.rb b/spec/requests/api/sidekiq_metrics_spec.rb index 32c4c323923..1ac065f0c0c 100644 --- a/spec/requests/api/sidekiq_metrics_spec.rb +++ b/spec/requests/api/sidekiq_metrics_spec.rb @@ -2,12 +2,19 @@ require 'spec_helper' -RSpec.describe API::SidekiqMetrics, feature_category: :shared do +RSpec.describe API::SidekiqMetrics, :aggregate_failures, feature_category: :shared do let(:admin) { create(:user, :admin) } describe 'GET sidekiq/*' do + %w[/sidekiq/queue_metrics /sidekiq/process_metrics /sidekiq/job_stats + /sidekiq/compound_metrics].each do |path| + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { path } + end + end + it 'defines the `queue_metrics` endpoint' do - get api('/sidekiq/queue_metrics', admin) + get api('/sidekiq/queue_metrics', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to match a_hash_including( @@ -25,14 +32,14 @@ RSpec.describe API::SidekiqMetrics, feature_category: :shared do end it 'defines the `process_metrics` endpoint' do - get api('/sidekiq/process_metrics', admin) + get api('/sidekiq/process_metrics', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['processes']).to be_an Array end it 'defines the `job_stats` endpoint' do - get api('/sidekiq/job_stats', admin) + get api('/sidekiq/job_stats', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_a Hash @@ -43,7 +50,7 @@ RSpec.describe API::SidekiqMetrics, feature_category: :shared do end it 'defines the `compound_metrics` endpoint' do - get api('/sidekiq/compound_metrics', admin) + get api('/sidekiq/compound_metrics', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_a Hash diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb index 2bc4c177bc9..4ba2a768e01 100644 --- a/spec/requests/api/snippets_spec.rb +++ b/spec/requests/api/snippets_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Snippets, factory_default: :keep, feature_category: :source_code_management do +RSpec.describe API::Snippets, :aggregate_failures, factory_default: :keep, feature_category: :source_code_management do include SnippetHelpers let_it_be(:admin) { create(:user, :admin) } @@ -448,7 +448,7 @@ RSpec.describe API::Snippets, factory_default: :keep, feature_category: :source_ end context "when admin" do - let_it_be(:token) { create(:personal_access_token, user: admin, scopes: [:sudo]) } + let_it_be(:token) { create(:personal_access_token, :admin_mode, user: admin, scopes: [:sudo]) } subject do put api("/snippets/#{snippet.id}", personal_access_token: token), params: { visibility: 'private', sudo: user.id } @@ -499,23 +499,19 @@ RSpec.describe API::Snippets, factory_default: :keep, feature_category: :source_ end describe "GET /snippets/:id/user_agent_detail" do - let(:snippet) { public_snippet } + let(:path) { "/snippets/#{public_snippet.id}/user_agent_detail" } - it 'exposes known attributes' do - user_agent_detail = create(:user_agent_detail, subject: snippet) + let_it_be(:user_agent_detail) { create(:user_agent_detail, subject: public_snippet) } + + it_behaves_like 'GET request permissions for admin mode' - get api("/snippets/#{snippet.id}/user_agent_detail", admin) + it 'exposes known attributes' do + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['user_agent']).to eq(user_agent_detail.user_agent) expect(json_response['ip_address']).to eq(user_agent_detail.ip_address) expect(json_response['akismet_submitted']).to eq(user_agent_detail.submitted) end - - it "returns unauthorized for non-admin users" do - get api("/snippets/#{snippet.id}/user_agent_detail", user) - - expect(response).to have_gitlab_http_status(:forbidden) - end end end diff --git a/spec/requests/api/statistics_spec.rb b/spec/requests/api/statistics_spec.rb index 85fed48a077..baac39abf2c 100644 --- a/spec/requests/api/statistics_spec.rb +++ b/spec/requests/api/statistics_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Statistics, 'Statistics', feature_category: :devops_reports do +RSpec.describe API::Statistics, 'Statistics', :aggregate_failures, feature_category: :devops_reports do include ProjectForksHelper tables_to_analyze = %w[ projects @@ -21,6 +21,8 @@ RSpec.describe API::Statistics, 'Statistics', feature_category: :devops_reports let(:path) { "/application/statistics" } describe "GET /application/statistics" do + it_behaves_like 'GET request permissions for admin mode' + context 'when no user' do it "returns authentication error" do get api(path, nil) @@ -43,7 +45,7 @@ RSpec.describe API::Statistics, 'Statistics', feature_category: :devops_reports let(:admin) { create(:admin) } it 'matches the response schema' do - get api(path, admin) + get api(path, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('statistics') @@ -66,7 +68,7 @@ RSpec.describe API::Statistics, 'Statistics', feature_category: :devops_reports ApplicationRecord.connection.execute("ANALYZE #{table}") end - get api(path, admin) + get api(path, admin, admin_mode: true) expected_statistics = { issues: 2, diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb index ab5e04246e8..604631bbf7f 100644 --- a/spec/requests/api/tags_spec.rb +++ b/spec/requests/api/tags_spec.rb @@ -178,7 +178,7 @@ RSpec.describe API::Tags, feature_category: :source_code_management do end end - context 'with keyset pagination option', :aggregate_errors do + context 'with keyset pagination option', :aggregate_failures do let(:base_params) { { pagination: 'keyset' } } context 'with gitaly pagination params' do diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb index c94643242c9..4c9f930df2f 100644 --- a/spec/requests/api/terraform/state_spec.rb +++ b/spec/requests/api/terraform/state_spec.rb @@ -114,17 +114,6 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu end end - context 'allow_dots_on_tf_state_names is disabled, and the state name contains a dot' do - let(:state_name) { 'state-name-with-dot' } - let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}.tfstate" } - - before do - stub_feature_flags(allow_dots_on_tf_state_names: false) - end - - it_behaves_like 'can access terraform state' - end - context 'for a project that does not exist' do let(:project_id) { '0000' } @@ -277,21 +266,6 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu expect(Gitlab::Json.parse(response.body)).to be_empty end end - - context 'allow_dots_on_tf_state_names is disabled, and the state name contains a dot' do - let(:non_existing_state_name) { 'state-name-with-dot.tfstate' } - - before do - stub_feature_flags(allow_dots_on_tf_state_names: false) - end - - it 'strips characters after the dot' do - expect { request }.to change { Terraform::State.count }.by(1) - - expect(response).to have_gitlab_http_status(:ok) - expect(Terraform::State.last.name).to eq('state-name-with-dot') - end - end end context 'without body' do @@ -399,18 +373,6 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu it_behaves_like 'schedules the state for deletion' end - context 'allow_dots_on_tf_state_names is disabled, and the state name contains a dot' do - let(:state_name) { 'state-name-with-dot' } - let(:state_name_with_dot) { "#{state_name}.tfstate" } - let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name_with_dot}" } - - before do - stub_feature_flags(allow_dots_on_tf_state_names: false) - end - - it_behaves_like 'schedules the state for deletion' - end - context 'with invalid state name' do let(:state_name) { 'foo/bar' } @@ -472,6 +434,7 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu request expect(response).to have_gitlab_http_status(:conflict) + expect(Gitlab::Json.parse(response.body)).to include('Who' => current_user.username) end end @@ -499,30 +462,10 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu context 'with a dot in the state name' do let(:state_name) { 'test.state' } - context 'with allow_dots_on_tf_state_names ff enabled' do - before do - stub_feature_flags(allow_dots_on_tf_state_names: true) - end - - let(:state_name) { 'test.state' } - - it 'locks the terraform state' do - request - - expect(response).to have_gitlab_http_status(:ok) - end - end - - context 'with allow_dots_on_tf_state_names ff disabled' do - before do - stub_feature_flags(allow_dots_on_tf_state_names: false) - end - - it 'returns 404' do - request + it 'locks the terraform state' do + request - expect(response).to have_gitlab_http_status(:not_found) - end + expect(response).to have_gitlab_http_status(:ok) end end end @@ -543,7 +486,6 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu before do state.lock_xid = '123.456' state.save! - stub_feature_flags(allow_dots_on_tf_state_names: true) end subject(:request) { delete api("#{state_path}/lock"), headers: auth_header, params: params } @@ -574,23 +516,6 @@ RSpec.describe API::Terraform::State, :snowplow, feature_category: :infrastructu end end - context 'with allow_dots_on_tf_state_names ff disabled' do - before do - stub_feature_flags(allow_dots_on_tf_state_names: false) - end - - context 'with dots in the state name' do - let(:lock_id) { '123.456' } - let(:state_name) { 'test.state' } - - it 'returns 404' do - request - - expect(response).to have_gitlab_http_status(:not_found) - end - end - end - context 'with no lock id (force-unlock)' do let(:params) { {} } diff --git a/spec/requests/api/terraform/state_version_spec.rb b/spec/requests/api/terraform/state_version_spec.rb index 24b3ca94581..94fd2984435 100644 --- a/spec/requests/api/terraform/state_version_spec.rb +++ b/spec/requests/api/terraform/state_version_spec.rb @@ -10,7 +10,7 @@ RSpec.describe API::Terraform::StateVersion, feature_category: :infrastructure_a let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) } let_it_be(:user_without_access) { create(:user) } - let_it_be(:state) { create(:terraform_state, project: project) } + let_it_be_with_reload(:state) { create(:terraform_state, project: project) } let!(:versions) { create_list(:terraform_state_version, 3, terraform_state: state) } diff --git a/spec/requests/api/topics_spec.rb b/spec/requests/api/topics_spec.rb index 14719292557..560f22c94be 100644 --- a/spec/requests/api/topics_spec.rb +++ b/spec/requests/api/topics_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Topics, feature_category: :projects do +RSpec.describe API::Topics, :aggregate_failures, feature_category: :projects do include WorkhorseHelpers let_it_be(:file) { fixture_file_upload('spec/fixtures/dk.png') } @@ -14,9 +14,11 @@ RSpec.describe API::Topics, feature_category: :projects do let_it_be(:admin) { create(:user, :admin) } let_it_be(:user) { create(:user) } - describe 'GET /topics', :aggregate_failures do + let(:path) { '/topics' } + + describe 'GET /topics' do it 'returns topics ordered by total_projects_count' do - get api('/topics') + get api(path) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -40,13 +42,13 @@ RSpec.describe API::Topics, feature_category: :projects do let_it_be(:topic_4) { create(:topic, name: 'unassigned topic', total_projects_count: 0) } it 'returns topics without assigned projects' do - get api('/topics'), params: { without_projects: true } + get api(path), params: { without_projects: true } expect(json_response.map { |t| t['id'] }).to contain_exactly(topic_4.id) end it 'returns topics without assigned projects' do - get api('/topics'), params: { without_projects: false } + get api(path), params: { without_projects: false } expect(json_response.map { |t| t['id'] }).to contain_exactly(topic_1.id, topic_2.id, topic_3.id, topic_4.id) end @@ -66,7 +68,7 @@ RSpec.describe API::Topics, feature_category: :projects do with_them do it 'returns filtered topics' do - get api('/topics'), params: { search: search } + get api(path), params: { search: search } expect(json_response.map { |t| t['name'] }).to eq(result) end @@ -97,7 +99,7 @@ RSpec.describe API::Topics, feature_category: :projects do with_them do it 'returns paginated topics' do - get api('/topics'), params: params + get api(path), params: params expect(json_response.map { |t| t['name'] }).to eq(result) end @@ -105,7 +107,7 @@ RSpec.describe API::Topics, feature_category: :projects do end end - describe 'GET /topic/:id', :aggregate_failures do + describe 'GET /topic/:id' do it 'returns topic' do get api("/topics/#{topic_2.id}") @@ -130,10 +132,14 @@ RSpec.describe API::Topics, feature_category: :projects do end end - describe 'POST /topics', :aggregate_failures do + describe 'POST /topics' do + let(:params) { { name: 'my-topic', title: 'My Topic' } } + + it_behaves_like 'POST request permissions for admin mode' + context 'as administrator' do it 'creates a topic' do - post api('/topics/', admin), params: { name: 'my-topic', title: 'My Topic' } + post api('/topics/', admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:created) expect(json_response['name']).to eq('my-topic') @@ -142,7 +148,7 @@ RSpec.describe API::Topics, feature_category: :projects do it 'creates a topic with avatar and description' do workhorse_form_with_file( - api('/topics/', admin), + api('/topics/', admin, admin_mode: true), file_key: :avatar, params: { name: 'my-topic', title: 'My Topic', description: 'my description...', avatar: file } ) @@ -160,14 +166,14 @@ RSpec.describe API::Topics, feature_category: :projects do end it 'returns 400 if name is not unique (case insensitive)' do - post api('/topics/', admin), params: { name: topic_1.name.downcase, title: 'My Topic' } + post api('/topics/', admin, admin_mode: true), params: { name: topic_1.name.downcase, title: 'My Topic' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['message']['name']).to eq(['has already been taken']) end it 'returns 400 if title is missing' do - post api('/topics/', admin), params: { name: 'my-topic' } + post api('/topics/', admin, admin_mode: true), params: { name: 'my-topic' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eql('title is missing') @@ -176,7 +182,7 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as normal user' do it 'returns 403 Forbidden' do - post api('/topics/', user), params: { name: 'my-topic', title: 'My Topic' } + post api('/topics/', user), params: params expect(response).to have_gitlab_http_status(:forbidden) end @@ -184,17 +190,23 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as anonymous' do it 'returns 401 Unauthorized' do - post api('/topics/'), params: { name: 'my-topic', title: 'My Topic' } + post api('/topics/'), params: params expect(response).to have_gitlab_http_status(:unauthorized) end end end - describe 'PUT /topics', :aggregate_failures do + describe 'PUT /topics' do + let(:params) { { name: 'my-topic' } } + + it_behaves_like 'PUT request permissions for admin mode' do + let(:path) { "/topics/#{topic_3.id}" } + end + context 'as administrator' do it 'updates a topic' do - put api("/topics/#{topic_3.id}", admin), params: { name: 'my-topic' } + put api("/topics/#{topic_3.id}", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq('my-topic') @@ -203,7 +215,7 @@ RSpec.describe API::Topics, feature_category: :projects do it 'updates a topic with avatar and description' do workhorse_form_with_file( - api("/topics/#{topic_3.id}", admin), + api("/topics/#{topic_3.id}", admin, admin_mode: true), method: :put, file_key: :avatar, params: { description: 'my description...', avatar: file } @@ -215,7 +227,7 @@ RSpec.describe API::Topics, feature_category: :projects do end it 'keeps avatar when updating other fields' do - put api("/topics/#{topic_1.id}", admin), params: { name: 'my-topic' } + put api("/topics/#{topic_1.id}", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq('my-topic') @@ -223,13 +235,13 @@ RSpec.describe API::Topics, feature_category: :projects do end it 'returns 404 for non existing id' do - put api("/topics/#{non_existing_record_id}", admin), params: { name: 'my-topic' } + put api("/topics/#{non_existing_record_id}", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:not_found) end it 'returns 400 for invalid `id` parameter' do - put api('/topics/invalid', admin), params: { name: 'my-topic' } + put api('/topics/invalid', admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eql('id is invalid') @@ -237,7 +249,7 @@ RSpec.describe API::Topics, feature_category: :projects do context 'with blank avatar' do it 'removes avatar' do - put api("/topics/#{topic_1.id}", admin), params: { avatar: '' } + put api("/topics/#{topic_1.id}", admin, admin_mode: true), params: { avatar: '' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['avatar_url']).to be_nil @@ -245,7 +257,7 @@ RSpec.describe API::Topics, feature_category: :projects do end it 'removes avatar besides other changes' do - put api("/topics/#{topic_1.id}", admin), params: { name: 'new-topic-name', avatar: '' } + put api("/topics/#{topic_1.id}", admin, admin_mode: true), params: { name: 'new-topic-name', avatar: '' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['name']).to eq('new-topic-name') @@ -254,7 +266,7 @@ RSpec.describe API::Topics, feature_category: :projects do end it 'does not remove avatar in case of other errors' do - put api("/topics/#{topic_1.id}", admin), params: { name: topic_2.name, avatar: '' } + put api("/topics/#{topic_1.id}", admin, admin_mode: true), params: { name: topic_2.name, avatar: '' } expect(response).to have_gitlab_http_status(:bad_request) expect(topic_1.reload.avatar_url).not_to be_nil @@ -264,7 +276,7 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as normal user' do it 'returns 403 Forbidden' do - put api("/topics/#{topic_3.id}", user), params: { name: 'my-topic' } + put api("/topics/#{topic_3.id}", user), params: params expect(response).to have_gitlab_http_status(:forbidden) end @@ -272,29 +284,37 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as anonymous' do it 'returns 401 Unauthorized' do - put api("/topics/#{topic_3.id}"), params: { name: 'my-topic' } + put api("/topics/#{topic_3.id}"), params: params expect(response).to have_gitlab_http_status(:unauthorized) end end end - describe 'DELETE /topics', :aggregate_failures do + describe 'DELETE /topics/:id' do + let(:params) { { name: 'my-topic' } } + context 'as administrator' do - it 'deletes a topic' do - delete api("/topics/#{topic_3.id}", admin), params: { name: 'my-topic' } + it 'deletes a topic with admin mode' do + delete api("/topics/#{topic_3.id}", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:no_content) end + it 'deletes a topic without admin mode' do + delete api("/topics/#{topic_3.id}", admin, admin_mode: false), params: params + + expect(response).to have_gitlab_http_status(:forbidden) + end + it 'returns 404 for non existing id' do - delete api("/topics/#{non_existing_record_id}", admin), params: { name: 'my-topic' } + delete api("/topics/#{non_existing_record_id}", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:not_found) end it 'returns 400 for invalid `id` parameter' do - delete api('/topics/invalid', admin), params: { name: 'my-topic' } + delete api('/topics/invalid', admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eql('id is invalid') @@ -303,7 +323,7 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as normal user' do it 'returns 403 Forbidden' do - delete api("/topics/#{topic_3.id}", user), params: { name: 'my-topic' } + delete api("/topics/#{topic_3.id}", user), params: params expect(response).to have_gitlab_http_status(:forbidden) end @@ -311,16 +331,21 @@ RSpec.describe API::Topics, feature_category: :projects do context 'as anonymous' do it 'returns 401 Unauthorized' do - delete api("/topics/#{topic_3.id}"), params: { name: 'my-topic' } + delete api("/topics/#{topic_3.id}"), params: params expect(response).to have_gitlab_http_status(:unauthorized) end end end - describe 'POST /topics/merge', :aggregate_failures do + describe 'POST /topics/merge' do + it_behaves_like 'POST request permissions for admin mode' do + let(:path) { '/topics/merge' } + let(:params) { { source_topic_id: topic_3.id, target_topic_id: topic_2.id } } + end + context 'as administrator' do - let_it_be(:api_url) { api('/topics/merge', admin) } + let_it_be(:api_url) { api('/topics/merge', admin, admin_mode: true) } it 'merge topics' do post api_url, params: { source_topic_id: topic_3.id, target_topic_id: topic_2.id } diff --git a/spec/requests/api/usage_data_non_sql_metrics_spec.rb b/spec/requests/api/usage_data_non_sql_metrics_spec.rb index 0a6f248af2c..b2929caf676 100644 --- a/spec/requests/api/usage_data_non_sql_metrics_spec.rb +++ b/spec/requests/api/usage_data_non_sql_metrics_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::UsageDataNonSqlMetrics, feature_category: :service_ping do +RSpec.describe API::UsageDataNonSqlMetrics, :aggregate_failures, feature_category: :service_ping do include UsageDataHelpers let_it_be(:admin) { create(:user, admin: true) } @@ -21,8 +21,12 @@ RSpec.describe API::UsageDataNonSqlMetrics, feature_category: :service_ping do stub_database_flavor_check end + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { endpoint } + end + it 'returns non sql metrics if user is admin' do - get api(endpoint, admin) + get api(endpoint, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['counts']).to be_a(Hash) @@ -53,7 +57,7 @@ RSpec.describe API::UsageDataNonSqlMetrics, feature_category: :service_ping do end it 'returns not_found for admin' do - get api(endpoint, admin) + get api(endpoint, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb index e556064025c..ab3c38adb81 100644 --- a/spec/requests/api/usage_data_queries_spec.rb +++ b/spec/requests/api/usage_data_queries_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require 'rake_helper' -RSpec.describe API::UsageDataQueries, feature_category: :service_ping do +RSpec.describe API::UsageDataQueries, :aggregate_failures, feature_category: :service_ping do include UsageDataHelpers let_it_be(:admin) { create(:user, admin: true) } @@ -22,8 +22,12 @@ RSpec.describe API::UsageDataQueries, feature_category: :service_ping do stub_feature_flags(usage_data_queries_api: true) end + it_behaves_like 'GET request permissions for admin mode' do + let(:path) { endpoint } + end + it 'returns queries if user is admin' do - get api(endpoint, admin) + get api(endpoint, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['active_user_count']).to start_with('SELECT COUNT("users"."id") FROM "users"') @@ -54,7 +58,7 @@ RSpec.describe API::UsageDataQueries, feature_category: :service_ping do end it 'returns not_found for admin' do - get api(endpoint, admin) + get api(endpoint, admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -81,7 +85,7 @@ RSpec.describe API::UsageDataQueries, feature_category: :service_ping do it 'matches the generated query' do travel_to(Time.utc(2021, 1, 1)) do - get api(endpoint, admin) + get api(endpoint, admin, admin_mode: true) end data = Gitlab::Json.parse(File.read(file)) diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index c924f529e11..6d1c25360e8 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Users, feature_category: :user_profile do +RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile do include WorkhorseHelpers let_it_be(:admin) { create(:admin) } @@ -41,7 +41,7 @@ RSpec.describe API::Users, feature_category: :user_profile do optional_attributes = { note: 'Awesome Note' } attributes = attributes_for(:user).merge(optional_attributes) - post api('/users', admin), params: attributes + post api('/users', admin, admin_mode: true), params: attributes expect(response).to have_gitlab_http_status(:created) expect(json_response['note']).to eq(optional_attributes[:note]) @@ -64,7 +64,7 @@ RSpec.describe API::Users, feature_category: :user_profile do new_note = '2019-07-07 | Email changed | user requested | www.gitlab.com' expect do - put api("/users/#{user.id}", admin), params: { note: new_note } + put api("/users/#{user.id}", admin, admin_mode: true), params: { note: new_note } end.to change { user.reload.note } .from('2018-11-05 | 2FA removed | user requested | www.gitlab.com') .to(new_note) @@ -89,7 +89,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context "when current user is an admin" do it "returns a 204 when 2FA is disabled for the target user" do expect do - patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin) + patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin, admin_mode: true) end.to change { user_with_2fa.reload.two_factor_enabled? } .from(true) .to(false) @@ -103,14 +103,14 @@ RSpec.describe API::Users, feature_category: :user_profile do .and_return(destroy_service) expect(destroy_service).to receive(:execute) - patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin) + patch api("/users/#{user_with_2fa.id}/disable_two_factor", admin, admin_mode: true) end it "returns a 400 if 2FA is not enabled for the target user" do expect(TwoFactor::DestroyService).to receive(:new).and_call_original expect do - patch api("/users/#{user.id}/disable_two_factor", admin) + patch api("/users/#{user.id}/disable_two_factor", admin, admin_mode: true) end.not_to change { user.reload.two_factor_enabled? } expect(response).to have_gitlab_http_status(:bad_request) @@ -121,7 +121,7 @@ RSpec.describe API::Users, feature_category: :user_profile do expect(TwoFactor::DestroyService).not_to receive(:new) expect do - patch api("/users/#{admin_with_2fa.id}/disable_two_factor", admin) + patch api("/users/#{admin_with_2fa.id}/disable_two_factor", admin, admin_mode: true) end.not_to change { admin_with_2fa.reload.two_factor_enabled? } expect(response).to have_gitlab_http_status(:forbidden) @@ -131,7 +131,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns a 404 if the target user cannot be found" do expect(TwoFactor::DestroyService).not_to receive(:new) - patch api("/users/#{non_existing_record_id}/disable_two_factor", admin) + patch api("/users/#{non_existing_record_id}/disable_two_factor", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq("404 User Not Found") @@ -182,7 +182,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'as an admin' do it 'contains the note of users' do - get api("/users", admin), params: { username: user.username } + get api("/users", admin, admin_mode: true), params: { username: user.username } expect(response).to have_gitlab_http_status(:success) expect(json_response.first).to have_key('note') @@ -191,7 +191,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'with `created_by` details' do it 'has created_by as nil with a self-registered account' do - get api("/users", admin), params: { username: user.username } + get api("/users", admin, admin_mode: true), params: { username: user.username } expect(response).to have_gitlab_http_status(:success) expect(json_response.first).to have_key('created_by') @@ -201,7 +201,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'is created_by a user and has those details' do created = create(:user, created_by_id: user.id) - get api("/users", admin), params: { username: created.username } + get api("/users", admin, admin_mode: true), params: { username: created.username } expect(response).to have_gitlab_http_status(:success) expect(json_response.first['created_by'].symbolize_keys) @@ -251,7 +251,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'as an admin' do context 'accesses their own profile' do it 'contains the note of the user' do - get api("/user", admin) + get api("/user", admin, admin_mode: true) expect(json_response).to have_key('note') expect(json_response['note']).to eq(admin.note) @@ -259,7 +259,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end context 'sudo' do - let(:admin_personal_access_token) { create(:personal_access_token, user: admin, scopes: %w[api sudo]).token } + let(:admin_personal_access_token) { create(:personal_access_token, :admin_mode, user: admin, scopes: %w[api sudo]).token } context 'accesses the profile of another regular user' do it 'does not contain the note of the user' do @@ -528,7 +528,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context "when admin" do context 'when sudo is defined' do it 'does not return 500' do - admin_personal_access_token = create(:personal_access_token, user: admin, scopes: [:sudo]) + admin_personal_access_token = create(:personal_access_token, :admin_mode, user: admin, scopes: [:sudo]) get api("/users?sudo=#{user.id}", admin, personal_access_token: admin_personal_access_token) expect(response).to have_gitlab_http_status(:success) @@ -536,14 +536,14 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns an array of users" do - get api("/users", admin) + get api("/users", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(response).to include_pagination_headers end it "users contain the `namespace_id` field" do - get api("/users", admin) + get api("/users", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:success) expect(response).to match_response_schema('public_api/v4/user/admins') @@ -554,7 +554,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns an array of external users" do create(:user, external: true) - get api("/users?external=true", admin) + get api("/users?external=true", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(response).to include_pagination_headers @@ -562,7 +562,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns one user by external UID" do - get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", admin) + get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}&provider=#{omniauth_user.identities.first.provider}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(1) @@ -570,13 +570,13 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 400 error if provider with no extern_uid" do - get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}", admin) + get api("/users?extern_uid=#{omniauth_user.identities.first.extern_uid}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end it "returns 400 error if provider with no extern_uid" do - get api("/users?provider=#{omniauth_user.identities.first.provider}", admin) + get api("/users?provider=#{omniauth_user.identities.first.provider}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -584,7 +584,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns a user created before a specific date" do user = create(:user, created_at: Date.new(2000, 1, 1)) - get api("/users?created_before=2000-01-02T00:00:00.060Z", admin) + get api("/users?created_before=2000-01-02T00:00:00.060Z", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(1) @@ -594,7 +594,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns no users created before a specific date" do create(:user, created_at: Date.new(2001, 1, 1)) - get api("/users?created_before=2000-01-02T00:00:00.060Z", admin) + get api("/users?created_before=2000-01-02T00:00:00.060Z", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(0) @@ -603,7 +603,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns users created before and after a specific date" do user = create(:user, created_at: Date.new(2001, 1, 1)) - get api("/users?created_before=2001-01-02T00:00:00.060Z&created_after=1999-01-02T00:00:00.060", admin) + get api("/users?created_before=2001-01-02T00:00:00.060Z&created_after=1999-01-02T00:00:00.060", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(1) @@ -615,7 +615,7 @@ RSpec.describe API::Users, feature_category: :user_profile do # - admin # - user - get api('/users', admin), params: { order_by: 'id', sort: 'asc' } + get api('/users', admin, admin_mode: true), params: { order_by: 'id', sort: 'asc' } expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(2) @@ -626,7 +626,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns users with 2fa enabled' do user_with_2fa = create(:user, :two_factor_via_otp) - get api('/users', admin), params: { two_factor: 'enabled' } + get api('/users', admin, admin_mode: true), params: { two_factor: 'enabled' } expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(1) @@ -638,7 +638,7 @@ RSpec.describe API::Users, feature_category: :user_profile do create(:project, namespace: user.namespace) create(:project, namespace: admin.namespace) - get api('/users', admin), params: { without_projects: true } + get api('/users', admin, admin_mode: true), params: { without_projects: true } expect(response).to match_response_schema('public_api/v4/user/admins') expect(json_response.size).to eq(1) @@ -646,7 +646,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns 400 when provided incorrect sort params' do - get api('/users', admin), params: { order_by: 'magic', sort: 'asc' } + get api('/users', admin, admin_mode: true), params: { order_by: 'magic', sort: 'asc' } expect(response).to have_gitlab_http_status(:bad_request) end @@ -654,7 +654,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'admins param' do it 'returns only admins' do - get api("/users?admins=true", admin) + get api("/users?admins=true", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/basics') expect(json_response.size).to eq(1) @@ -794,7 +794,7 @@ RSpec.describe API::Users, feature_category: :user_profile do expect(Gitlab::ApplicationRateLimiter) .not_to receive(:throttled?) - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) end @@ -836,7 +836,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'when authenticated as admin' do it 'contains the note of the user' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(json_response).to have_key('note') expect(json_response['note']).to eq(user.note) @@ -844,28 +844,28 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'includes the `is_admin` field' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admin') expect(json_response['is_admin']).to be(false) end it "includes the `created_at` field for private users" do - get api("/users/#{private_user.id}", admin) + get api("/users/#{private_user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admin') expect(json_response.keys).to include 'created_at' end it 'includes the `highest_role` field' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admin') expect(json_response['highest_role']).to be(0) end it 'includes the `namespace_id` field' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:success) expect(response).to match_response_schema('public_api/v4/user/admin') @@ -874,13 +874,13 @@ RSpec.describe API::Users, feature_category: :user_profile do if Gitlab.ee? it 'does not include values for plan or trial' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/basic') end else it 'does not include plan or trial data' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/basic') expect(json_response.keys).not_to include 'plan' @@ -890,7 +890,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'when user has not logged in' do it 'does not include the sign in IPs' do - get api("/users/#{user.id}", admin) + get api("/users/#{user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admin') expect(json_response).to include('current_sign_in_ip' => nil, 'last_sign_in_ip' => nil) @@ -901,7 +901,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let_it_be(:signed_in_user) { create(:user, :with_sign_ins) } it 'includes the sign in IPs' do - get api("/users/#{signed_in_user.id}", admin) + get api("/users/#{signed_in_user.id}", admin, admin_mode: true) expect(response).to match_response_schema('public_api/v4/user/admin') expect(json_response['current_sign_in_ip']).to eq('127.0.0.1') @@ -1104,12 +1104,12 @@ RSpec.describe API::Users, feature_category: :user_profile do describe "POST /users" do it "creates user" do expect do - post api("/users", admin), params: attributes_for(:user, projects_limit: 3) + post api("/users", admin, admin_mode: true), params: attributes_for(:user, projects_limit: 3) end.to change { User.count }.by(1) end it "creates user with correct attributes" do - post api('/users', admin), params: attributes_for(:user, admin: true, can_create_group: true) + post api('/users', admin, admin_mode: true), params: attributes_for(:user, admin: true, can_create_group: true) expect(response).to have_gitlab_http_status(:created) user_id = json_response['id'] new_user = User.find(user_id) @@ -1121,13 +1121,13 @@ RSpec.describe API::Users, feature_category: :user_profile do optional_attributes = { confirm: true, theme_id: 2, color_scheme_id: 4 } attributes = attributes_for(:user).merge(optional_attributes) - post api('/users', admin), params: attributes + post api('/users', admin, admin_mode: true), params: attributes expect(response).to have_gitlab_http_status(:created) end it "creates non-admin user" do - post api('/users', admin), params: attributes_for(:user, admin: false, can_create_group: false) + post api('/users', admin, admin_mode: true), params: attributes_for(:user, admin: false, can_create_group: false) expect(response).to have_gitlab_http_status(:created) user_id = json_response['id'] new_user = User.find(user_id) @@ -1136,7 +1136,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "creates non-admin users by default" do - post api('/users', admin), params: attributes_for(:user) + post api('/users', admin, admin_mode: true), params: attributes_for(:user) expect(response).to have_gitlab_http_status(:created) user_id = json_response['id'] new_user = User.find(user_id) @@ -1144,13 +1144,13 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 201 Created on success" do - post api("/users", admin), params: attributes_for(:user, projects_limit: 3) + post api("/users", admin, admin_mode: true), params: attributes_for(:user, projects_limit: 3) expect(response).to match_response_schema('public_api/v4/user/admin') expect(response).to have_gitlab_http_status(:created) end it 'creates non-external users by default' do - post api("/users", admin), params: attributes_for(:user) + post api("/users", admin, admin_mode: true), params: attributes_for(:user) expect(response).to have_gitlab_http_status(:created) user_id = json_response['id'] @@ -1159,7 +1159,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'allows an external user to be created' do - post api("/users", admin), params: attributes_for(:user, external: true) + post api("/users", admin, admin_mode: true), params: attributes_for(:user, external: true) expect(response).to have_gitlab_http_status(:created) user_id = json_response['id'] @@ -1168,7 +1168,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "creates user with reset password" do - post api('/users', admin), params: attributes_for(:user, reset_password: true).except(:password) + post api('/users', admin, admin_mode: true), params: attributes_for(:user, reset_password: true).except(:password) expect(response).to have_gitlab_http_status(:created) @@ -1181,7 +1181,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "creates user with random password" do params = attributes_for(:user, force_random_password: true) params.delete(:password) - post api('/users', admin), params: params + post api('/users', admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:created) @@ -1192,7 +1192,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "creates user with private profile" do - post api('/users', admin), params: attributes_for(:user, private_profile: true) + post api('/users', admin, admin_mode: true), params: attributes_for(:user, private_profile: true) expect(response).to have_gitlab_http_status(:created) @@ -1204,7 +1204,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "creates user with view_diffs_file_by_file" do - post api('/users', admin), params: attributes_for(:user, view_diffs_file_by_file: true) + post api('/users', admin, admin_mode: true), params: attributes_for(:user, view_diffs_file_by_file: true) expect(response).to have_gitlab_http_status(:created) @@ -1217,7 +1217,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "creates user with avatar" do workhorse_form_with_file( - api('/users', admin), + api('/users', admin, admin_mode: true), method: :post, file_key: :avatar, params: attributes_for(:user, avatar: fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif')) @@ -1232,7 +1232,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "does not create user with invalid email" do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { email: 'invalid email', password: User.random_password, @@ -1242,22 +1242,22 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns 400 error if name not given' do - post api('/users', admin), params: attributes_for(:user).except(:name) + post api('/users', admin, admin_mode: true), params: attributes_for(:user).except(:name) expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 400 error if password not given' do - post api('/users', admin), params: attributes_for(:user).except(:password) + post api('/users', admin, admin_mode: true), params: attributes_for(:user).except(:password) expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 400 error if email not given' do - post api('/users', admin), params: attributes_for(:user).except(:email) + post api('/users', admin, admin_mode: true), params: attributes_for(:user).except(:email) expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 400 error if username not given' do - post api('/users', admin), params: attributes_for(:user).except(:username) + post api('/users', admin, admin_mode: true), params: attributes_for(:user).except(:username) expect(response).to have_gitlab_http_status(:bad_request) end @@ -1265,13 +1265,13 @@ RSpec.describe API::Users, feature_category: :user_profile do optional_attributes = { theme_id: 50, color_scheme_id: 50 } attributes = attributes_for(:user).merge(optional_attributes) - post api('/users', admin), params: attributes + post api('/users', admin, admin_mode: true), params: attributes expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 400 error if user does not validate' do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { password: 'pass', email: 'test@example.com', @@ -1293,7 +1293,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'tracks weak password errors' do attributes = attributes_for(:user).merge({ password: "password" }) - post api('/users', admin), params: attributes + post api('/users', admin, admin_mode: true), params: attributes expect(json_response['message']['password']) .to eq(['must not contain commonly used combinations of words and letters']) @@ -1312,7 +1312,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'with existing user' do before do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { email: 'test@example.com', password: User.random_password, @@ -1323,7 +1323,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error if user with same email exists' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: 'test@example.com', @@ -1337,7 +1337,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error if same username exists' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: 'foo@example.com', @@ -1351,7 +1351,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error if same username exists (case insensitive)' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: 'foo@example.com', @@ -1364,7 +1364,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'creates user with new identity' do - post api("/users", admin), params: attributes_for(:user, provider: 'github', extern_uid: '67890') + post api("/users", admin, admin_mode: true), params: attributes_for(:user, provider: 'github', extern_uid: '67890') expect(response).to have_gitlab_http_status(:created) expect(json_response['identities'].first['extern_uid']).to eq('67890') @@ -1378,7 +1378,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: confirmed_user.email, @@ -1396,7 +1396,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: unconfirmed_user.email, @@ -1416,7 +1416,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: email.email, @@ -1434,7 +1434,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'does not create user' do expect do - post api('/users', admin), + post api('/users', admin, admin_mode: true), params: { name: 'foo', email: email.email, @@ -1465,7 +1465,7 @@ RSpec.describe API::Users, feature_category: :user_profile do shared_examples_for 'creates the user with the value of `private_profile` based on the application setting' do specify do - post api("/users", admin), params: params + post api("/users", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:created) user = User.find_by(id: json_response['id'], private_profile: true) @@ -1479,7 +1479,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'when the attribute is overridden in params' do it 'creates the user with the value of `private_profile` same as the value of the overridden param' do - post api("/users", admin), params: params.merge(private_profile: false) + post api("/users", admin, admin_mode: true), params: params.merge(private_profile: false) expect(response).to have_gitlab_http_status(:created) user = User.find_by(id: json_response['id'], private_profile: false) @@ -1498,7 +1498,7 @@ RSpec.describe API::Users, feature_category: :user_profile do describe "PUT /users/:id" do it "returns 200 OK on success" do - put api("/users/#{user.id}", admin), params: { bio: 'new test bio' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { bio: 'new test bio' } expect(response).to match_response_schema('public_api/v4/user/admin') expect(response).to have_gitlab_http_status(:ok) @@ -1506,7 +1506,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'updating password' do def update_password(user, admin, password = User.random_password) - put api("/users/#{user.id}", admin), params: { password: password } + put api("/users/#{user.id}", admin, admin_mode: true), params: { password: password } end context 'admin updates their own password' do @@ -1564,7 +1564,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "updates user with new bio" do - put api("/users/#{user.id}", admin), params: { bio: 'new test bio' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { bio: 'new test bio' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['bio']).to eq('new test bio') @@ -1574,7 +1574,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "updates user with empty bio" do user.update!(bio: 'previous bio') - put api("/users/#{user.id}", admin), params: { bio: '' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { bio: '' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['bio']).to eq('') @@ -1582,7 +1582,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'updates user with nil bio' do - put api("/users/#{user.id}", admin), params: { bio: nil } + put api("/users/#{user.id}", admin, admin_mode: true), params: { bio: nil } expect(response).to have_gitlab_http_status(:ok) expect(json_response['bio']).to eq('') @@ -1590,7 +1590,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "updates user with organization" do - put api("/users/#{user.id}", admin), params: { organization: 'GitLab' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { organization: 'GitLab' } expect(response).to have_gitlab_http_status(:ok) expect(json_response['organization']).to eq('GitLab') @@ -1599,7 +1599,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'updates user with avatar' do workhorse_form_with_file( - api("/users/#{user.id}", admin), + api("/users/#{user.id}", admin, admin_mode: true), method: :put, file_key: :avatar, params: { avatar: fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') } @@ -1615,7 +1615,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'updates user with a new email' do old_email = user.email old_notification_email = user.notification_email_or_default - put api("/users/#{user.id}", admin), params: { email: 'new@email.com' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: 'new@email.com' } user.reload @@ -1627,7 +1627,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'skips reconfirmation when requested' do - put api("/users/#{user.id}", admin), params: { email: 'new@email.com', skip_reconfirmation: true } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: 'new@email.com', skip_reconfirmation: true } user.reload @@ -1637,7 +1637,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'updates user with their own username' do - put api("/users/#{user.id}", admin), params: { username: user.username } + put api("/users/#{user.id}", admin, admin_mode: true), params: { username: user.username } expect(response).to have_gitlab_http_status(:ok) expect(json_response['username']).to eq(user.username) @@ -1645,14 +1645,14 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "updates user's existing identity" do - put api("/users/#{ldap_user.id}", admin), params: { provider: 'ldapmain', extern_uid: '654321' } + put api("/users/#{ldap_user.id}", admin, admin_mode: true), params: { provider: 'ldapmain', extern_uid: '654321' } expect(response).to have_gitlab_http_status(:ok) expect(ldap_user.reload.identities.first.extern_uid).to eq('654321') end it 'updates user with new identity' do - put api("/users/#{user.id}", admin), params: { provider: 'github', extern_uid: 'john' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { provider: 'github', extern_uid: 'john' } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.identities.first.extern_uid).to eq('john') @@ -1660,14 +1660,14 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "updates admin status" do - put api("/users/#{user.id}", admin), params: { admin: true } + put api("/users/#{user.id}", admin, admin_mode: true), params: { admin: true } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.admin).to eq(true) end it "updates external status" do - put api("/users/#{user.id}", admin), params: { external: true } + put api("/users/#{user.id}", admin, admin_mode: true), params: { external: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response['external']).to eq(true) @@ -1675,14 +1675,14 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "does have default values for theme and color-scheme ID" do - put api("/users/#{user.id}", admin), params: {} + put api("/users/#{user.id}", admin, admin_mode: true), params: {} expect(user.reload.theme_id).to eq(Gitlab::Themes.default.id) expect(user.reload.color_scheme_id).to eq(Gitlab::ColorSchemes.default.id) end it "updates viewing diffs file by file" do - put api("/users/#{user.id}", admin), params: { view_diffs_file_by_file: true } + put api("/users/#{user.id}", admin, admin_mode: true), params: { view_diffs_file_by_file: true } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.user_preference.view_diffs_file_by_file?).to eq(true) @@ -1693,7 +1693,7 @@ RSpec.describe API::Users, feature_category: :user_profile do current_value = user.private_profile new_value = !current_value - put api("/users/#{user.id}", admin), params: { private_profile: new_value } + put api("/users/#{user.id}", admin, admin_mode: true), params: { private_profile: new_value } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.private_profile).to eq(new_value) @@ -1707,7 +1707,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "updates private_profile to value of the application setting" do user.update!(private_profile: false) - put api("/users/#{user.id}", admin), params: { private_profile: nil } + put api("/users/#{user.id}", admin, admin_mode: true), params: { private_profile: nil } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.private_profile).to eq(true) @@ -1717,7 +1717,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "does not modify private profile when field is not provided" do user.update!(private_profile: true) - put api("/users/#{user.id}", admin), params: {} + put api("/users/#{user.id}", admin, admin_mode: true), params: {} expect(response).to have_gitlab_http_status(:ok) expect(user.reload.private_profile).to eq(true) @@ -1730,7 +1730,7 @@ RSpec.describe API::Users, feature_category: :user_profile do user.update!(theme_id: theme.id, color_scheme_id: scheme.id) - put api("/users/#{user.id}", admin), params: {} + put api("/users/#{user.id}", admin, admin_mode: true), params: {} expect(response).to have_gitlab_http_status(:ok) expect(user.reload.theme_id).to eq(theme.id) @@ -1740,7 +1740,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it "does not update admin status" do admin_user = create(:admin) - put api("/users/#{admin_user.id}", admin), params: { can_create_group: false } + put api("/users/#{admin_user.id}", admin, admin_mode: true), params: { can_create_group: false } expect(response).to have_gitlab_http_status(:ok) expect(admin_user.reload.admin).to eq(true) @@ -1748,35 +1748,35 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "does not allow invalid update" do - put api("/users/#{user.id}", admin), params: { email: 'invalid email' } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: 'invalid email' } expect(response).to have_gitlab_http_status(:bad_request) expect(user.reload.email).not_to eq('invalid email') end it "updates theme id" do - put api("/users/#{user.id}", admin), params: { theme_id: 5 } + put api("/users/#{user.id}", admin, admin_mode: true), params: { theme_id: 5 } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.theme_id).to eq(5) end it "does not update invalid theme id" do - put api("/users/#{user.id}", admin), params: { theme_id: 50 } + put api("/users/#{user.id}", admin, admin_mode: true), params: { theme_id: 50 } expect(response).to have_gitlab_http_status(:bad_request) expect(user.reload.theme_id).not_to eq(50) end it "updates color scheme id" do - put api("/users/#{user.id}", admin), params: { color_scheme_id: 5 } + put api("/users/#{user.id}", admin, admin_mode: true), params: { color_scheme_id: 5 } expect(response).to have_gitlab_http_status(:ok) expect(user.reload.color_scheme_id).to eq(5) end it "does not update invalid color scheme id" do - put api("/users/#{user.id}", admin), params: { color_scheme_id: 50 } + put api("/users/#{user.id}", admin, admin_mode: true), params: { color_scheme_id: 50 } expect(response).to have_gitlab_http_status(:bad_request) expect(user.reload.color_scheme_id).not_to eq(50) @@ -1793,20 +1793,20 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 404 for non-existing user" do - put api("/users/0", admin), params: { bio: 'update should fail' } + put api("/users/0", admin, admin_mode: true), params: { bio: 'update should fail' } expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it "returns a 404 if invalid ID" do - put api("/users/ASDF", admin) + put api("/users/ASDF", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end it 'returns 400 error if user does not validate' do - put api("/users/#{user.id}", admin), + put api("/users/#{user.id}", admin, admin_mode: true), params: { password: 'pass', email: 'test@example.com', @@ -1827,26 +1827,26 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns 400 if provider is missing for identity update' do - put api("/users/#{omniauth_user.id}", admin), params: { extern_uid: '654321' } + put api("/users/#{omniauth_user.id}", admin, admin_mode: true), params: { extern_uid: '654321' } expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 400 if external UID is missing for identity update' do - put api("/users/#{omniauth_user.id}", admin), params: { provider: 'ldap' } + put api("/users/#{omniauth_user.id}", admin, admin_mode: true), params: { provider: 'ldap' } expect(response).to have_gitlab_http_status(:bad_request) end context "with existing user" do before do - post api("/users", admin), params: { email: 'test@example.com', password: User.random_password, username: 'test', name: 'test' } - post api("/users", admin), params: { email: 'foo@bar.com', password: User.random_password, username: 'john', name: 'john' } + post api("/users", admin, admin_mode: true), params: { email: 'test@example.com', password: User.random_password, username: 'test', name: 'test' } + post api("/users", admin, admin_mode: true), params: { email: 'foo@bar.com', password: User.random_password, username: 'john', name: 'john' } @user = User.all.last end it 'returns 409 conflict error if email address exists' do - put api("/users/#{@user.id}", admin), params: { email: 'test@example.com' } + put api("/users/#{@user.id}", admin, admin_mode: true), params: { email: 'test@example.com' } expect(response).to have_gitlab_http_status(:conflict) expect(@user.reload.email).to eq(@user.email) @@ -1854,7 +1854,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error if username taken' do @user_id = User.all.last.id - put api("/users/#{@user.id}", admin), params: { username: 'test' } + put api("/users/#{@user.id}", admin, admin_mode: true), params: { username: 'test' } expect(response).to have_gitlab_http_status(:conflict) expect(@user.reload.username).to eq(@user.username) @@ -1862,7 +1862,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 409 conflict error if username taken (case insensitive)' do @user_id = User.all.last.id - put api("/users/#{@user.id}", admin), params: { username: 'TEST' } + put api("/users/#{@user.id}", admin, admin_mode: true), params: { username: 'TEST' } expect(response).to have_gitlab_http_status(:conflict) expect(@user.reload.username).to eq(@user.username) @@ -1874,7 +1874,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:confirmed_user) { create(:user, email: 'foo@example.com') } it 'returns 409 conflict error' do - put api("/users/#{user.id}", admin), params: { email: confirmed_user.email } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: confirmed_user.email } expect(response).to have_gitlab_http_status(:conflict) expect(user.reload.email).not_to eq(confirmed_user.email) @@ -1885,7 +1885,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:unconfirmed_user) { create(:user, :unconfirmed, email: 'foo@example.com') } it 'returns 409 conflict error' do - put api("/users/#{user.id}", admin), params: { email: unconfirmed_user.email } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: unconfirmed_user.email } expect(response).to have_gitlab_http_status(:conflict) expect(user.reload.email).not_to eq(unconfirmed_user.email) @@ -1898,7 +1898,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:email) { create(:email, :confirmed, email: 'foo@example.com') } it 'returns 409 conflict error' do - put api("/users/#{user.id}", admin), params: { email: email.email } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: email.email } expect(response).to have_gitlab_http_status(:conflict) expect(user.reload.email).not_to eq(email.email) @@ -1909,7 +1909,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:email) { create(:email, email: 'foo@example.com') } it 'does not update email' do - put api("/users/#{user.id}", admin), params: { email: email.email } + put api("/users/#{user.id}", admin, admin_mode: true), params: { email: email.email } expect(response).to have_gitlab_http_status(:bad_request) expect(user.reload.email).not_to eq(email.email) @@ -1941,7 +1941,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end context 'when authenticated as non-admin' do - it "does not allow updating user's credit card validation", :aggregate_failures do + it "does not allow updating user's credit card validation" do put api("/user/#{user.id}/credit_card_validation", user), params: params expect(response).to have_gitlab_http_status(:forbidden) @@ -1949,8 +1949,8 @@ RSpec.describe API::Users, feature_category: :user_profile do end context 'when authenticated as admin' do - it "updates user's credit card validation", :aggregate_failures do - put api("/user/#{user.id}/credit_card_validation", admin), params: params + it "updates user's credit card validation" do + put api("/user/#{user.id}/credit_card_validation", admin, admin_mode: true), params: params user.reload @@ -1965,13 +1965,13 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 400 error if credit_card_validated_at is missing" do - put api("/user/#{user.id}/credit_card_validation", admin), params: {} + put api("/user/#{user.id}/credit_card_validation", admin, admin_mode: true), params: {} expect(response).to have_gitlab_http_status(:bad_request) end it 'returns 404 error if user not found' do - put api("/user/#{non_existing_record_id}/credit_card_validation", admin), params: params + put api("/user/#{non_existing_record_id}/credit_card_validation", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') @@ -1993,24 +1993,24 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'when authenticated' do it 'deletes identity of given provider' do expect do - delete api("/users/#{test_user.id}/identities/ldapmain", admin) + delete api("/users/#{test_user.id}/identities/ldapmain", admin, admin_mode: true) end.to change { test_user.identities.count }.by(-1) expect(response).to have_gitlab_http_status(:no_content) end it_behaves_like '412 response' do - let(:request) { api("/users/#{test_user.id}/identities/ldapmain", admin) } + let(:request) { api("/users/#{test_user.id}/identities/ldapmain", admin, admin_mode: true) } end it 'returns 404 error if user not found' do - delete api("/users/0/identities/ldapmain", admin) + delete api("/users/0/identities/ldapmain", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns 404 error if identity not found' do - delete api("/users/#{test_user.id}/identities/saml", admin) + delete api("/users/#{test_user.id}/identities/saml", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Identity Not Found') @@ -2020,24 +2020,24 @@ RSpec.describe API::Users, feature_category: :user_profile do describe "POST /users/:id/keys" do it "does not create invalid ssh key" do - post api("/users/#{user.id}/keys", admin), params: { title: "invalid key" } + post api("/users/#{user.id}/keys", admin, admin_mode: true), params: { title: "invalid key" } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('key is missing') end it 'does not create key without title' do - post api("/users/#{user.id}/keys", admin), params: { key: 'some key' } + post api("/users/#{user.id}/keys", admin, admin_mode: true), params: { key: 'some key' } expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('title is missing') end - it "creates ssh key", :aggregate_failures do + it "creates ssh key" do key_attrs = attributes_for(:key, usage_type: :signing) expect do - post api("/users/#{user.id}/keys", admin), params: key_attrs + post api("/users/#{user.id}/keys", admin, admin_mode: true), params: key_attrs end.to change { user.keys.count }.by(1) expect(response).to have_gitlab_http_status(:created) @@ -2052,14 +2052,14 @@ RSpec.describe API::Users, feature_category: :user_profile do optional_attributes = { expires_at: 3.weeks.from_now } attributes = attributes_for(:key).merge(optional_attributes) - post api("/users/#{user.id}/keys", admin), params: attributes + post api("/users/#{user.id}/keys", admin, admin_mode: true), params: attributes expect(response).to have_gitlab_http_status(:created) expect(json_response['expires_at'].to_date).to eq(optional_attributes[:expires_at].to_date) end it "returns 400 for invalid ID" do - post api("/users/0/keys", admin) + post api("/users/0/keys", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end end @@ -2240,7 +2240,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end describe 'GET /user/:id/keys/:key_id' do - it 'gets existing key', :aggregate_failures do + it 'gets existing key' do user.keys << key get api("/users/#{user.id}/keys/#{key.id}") @@ -2249,7 +2249,7 @@ RSpec.describe API::Users, feature_category: :user_profile do expect(json_response['title']).to eq(key.title) end - it 'returns 404 error if user not found', :aggregate_failures do + it 'returns 404 error if user not found' do user.keys << key get api("/users/0/keys/#{key.id}") @@ -2258,7 +2258,7 @@ RSpec.describe API::Users, feature_category: :user_profile do expect(json_response['message']).to eq('404 User Not Found') end - it 'returns 404 error if key not found', :aggregate_failures do + it 'returns 404 error if key not found' do get api("/users/#{user.id}/keys/#{non_existing_record_id}") expect(response).to have_gitlab_http_status(:not_found) @@ -2279,26 +2279,26 @@ RSpec.describe API::Users, feature_category: :user_profile do user.keys << key expect do - delete api("/users/#{user.id}/keys/#{key.id}", admin) + delete api("/users/#{user.id}/keys/#{key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { user.keys.count }.by(-1) end it_behaves_like '412 response' do - let(:request) { api("/users/#{user.id}/keys/#{key.id}", admin) } + let(:request) { api("/users/#{user.id}/keys/#{key.id}", admin, admin_mode: true) } end it 'returns 404 error if user not found' do user.keys << key - delete api("/users/0/keys/#{key.id}", admin) + delete api("/users/0/keys/#{key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns 404 error if key not foud' do - delete api("/users/#{user.id}/keys/#{non_existing_record_id}", admin) + delete api("/users/#{user.id}/keys/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Key Not Found') end @@ -2307,7 +2307,7 @@ RSpec.describe API::Users, feature_category: :user_profile do describe 'POST /users/:id/gpg_keys' do it 'does not create invalid GPG key' do - post api("/users/#{user.id}/gpg_keys", admin) + post api("/users/#{user.id}/gpg_keys", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('key is missing') @@ -2317,14 +2317,14 @@ RSpec.describe API::Users, feature_category: :user_profile do key_attrs = attributes_for :gpg_key, key: GpgHelpers::User2.public_key expect do - post api("/users/#{user.id}/gpg_keys", admin), params: key_attrs + post api("/users/#{user.id}/gpg_keys", admin, admin_mode: true), params: key_attrs expect(response).to have_gitlab_http_status(:created) end.to change { user.gpg_keys.count }.by(1) end it 'returns 400 for invalid ID' do - post api('/users/0/gpg_keys', admin) + post api('/users/0/gpg_keys', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -2389,7 +2389,7 @@ RSpec.describe API::Users, feature_category: :user_profile do user.gpg_keys << gpg_key expect do - delete api("/users/#{user.id}/gpg_keys/#{gpg_key.id}", admin) + delete api("/users/#{user.id}/gpg_keys/#{gpg_key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { user.gpg_keys.count }.by(-1) @@ -2398,14 +2398,14 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 404 error if user not found' do user.keys << key - delete api("/users/0/gpg_keys/#{gpg_key.id}", admin) + delete api("/users/0/gpg_keys/#{gpg_key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns 404 error if key not foud' do - delete api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}", admin) + delete api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 GPG Key Not Found') @@ -2427,7 +2427,7 @@ RSpec.describe API::Users, feature_category: :user_profile do user.gpg_keys << gpg_key expect do - post api("/users/#{user.id}/gpg_keys/#{gpg_key.id}/revoke", admin) + post api("/users/#{user.id}/gpg_keys/#{gpg_key.id}/revoke", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:accepted) end.to change { user.gpg_keys.count }.by(-1) @@ -2436,14 +2436,14 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns 404 error if user not found' do user.gpg_keys << gpg_key - post api("/users/0/gpg_keys/#{gpg_key.id}/revoke", admin) + post api("/users/0/gpg_keys/#{gpg_key.id}/revoke", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns 404 error if key not foud' do - post api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}/revoke", admin) + post api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}/revoke", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 GPG Key Not Found') @@ -2453,7 +2453,7 @@ RSpec.describe API::Users, feature_category: :user_profile do describe "POST /users/:id/emails", :mailer do it "does not create invalid email" do - post api("/users/#{user.id}/emails", admin), params: {} + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: {} expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('email is missing') @@ -2464,7 +2464,7 @@ RSpec.describe API::Users, feature_category: :user_profile do perform_enqueued_jobs do expect do - post api("/users/#{user.id}/emails", admin), params: email_attrs + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: email_attrs end.to change { user.emails.count }.by(1) end @@ -2473,7 +2473,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns a 400 for invalid ID" do - post api("/users/0/emails", admin) + post api("/users/0/emails", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -2482,7 +2482,7 @@ RSpec.describe API::Users, feature_category: :user_profile do email_attrs = attributes_for :email email_attrs[:skip_confirmation] = true - post api("/users/#{user.id}/emails", admin), params: email_attrs + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: email_attrs expect(response).to have_gitlab_http_status(:created) @@ -2494,7 +2494,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:confirmed_user) { create(:user, email: 'foo@example.com') } it 'returns 400 error' do - post api("/users/#{user.id}/emails", admin), params: { email: confirmed_user.email } + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: { email: confirmed_user.email } expect(response).to have_gitlab_http_status(:bad_request) end @@ -2504,7 +2504,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:unconfirmed_user) { create(:user, :unconfirmed, email: 'foo@example.com') } it 'returns 400 error' do - post api("/users/#{user.id}/emails", admin), params: { email: unconfirmed_user.email } + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: { email: unconfirmed_user.email } expect(response).to have_gitlab_http_status(:bad_request) end @@ -2516,7 +2516,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:email) { create(:email, :confirmed, email: 'foo@example.com') } it 'returns 400 error' do - post api("/users/#{user.id}/emails", admin), params: { email: email.email } + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: { email: email.email } expect(response).to have_gitlab_http_status(:bad_request) end @@ -2526,7 +2526,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let!(:email) { create(:email, email: 'foo@example.com') } it 'returns 400 error' do - post api("/users/#{user.id}/emails", admin), params: { email: email.email } + post api("/users/#{user.id}/emails", admin, admin_mode: true), params: { email: email.email } expect(response).to have_gitlab_http_status(:bad_request) end @@ -2544,7 +2544,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'when authenticated' do it 'returns 404 for non-existing user' do - get api('/users/0/emails', admin) + get api('/users/0/emails', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end @@ -2552,7 +2552,7 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'returns array of emails' do user.emails << email - get api("/users/#{user.id}/emails", admin) + get api("/users/#{user.id}/emails", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -2562,7 +2562,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns a 404 for invalid ID" do - get api("/users/ASDF/emails", admin) + get api("/users/ASDF/emails", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -2582,26 +2582,26 @@ RSpec.describe API::Users, feature_category: :user_profile do user.emails << email expect do - delete api("/users/#{user.id}/emails/#{email.id}", admin) + delete api("/users/#{user.id}/emails/#{email.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) end.to change { user.emails.count }.by(-1) end it_behaves_like '412 response' do - let(:request) { api("/users/#{user.id}/emails/#{email.id}", admin) } + let(:request) { api("/users/#{user.id}/emails/#{email.id}", admin, admin_mode: true) } end it 'returns 404 error if user not found' do user.emails << email - delete api("/users/0/emails/#{email.id}", admin) + delete api("/users/0/emails/#{email.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns 404 error if email not foud' do - delete api("/users/#{user.id}/emails/#{non_existing_record_id}", admin) + delete api("/users/#{user.id}/emails/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Email Not Found') end @@ -2618,7 +2618,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let_it_be(:issue) { create(:issue, author: user) } it "deletes user", :sidekiq_inline do - perform_enqueued_jobs { delete api("/users/#{user.id}", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:no_content) expect(Users::GhostUserMigration.where(user: user, @@ -2630,14 +2630,14 @@ RSpec.describe API::Users, feature_category: :user_profile do context "hard delete disabled" do it "does not delete user" do - perform_enqueued_jobs { delete api("/users/#{user.id}", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:conflict) end end context "hard delete enabled" do it "delete user and group", :sidekiq_inline do - perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:no_content) expect(Group.exists?(group.id)).to be_falsy end @@ -2652,7 +2652,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "delete only user", :sidekiq_inline do - perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:no_content) expect(Group.exists?(subgroup.id)).to be_truthy end @@ -2661,7 +2661,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it_behaves_like '412 response' do - let(:request) { api("/users/#{user.id}", admin) } + let(:request) { api("/users/#{user.id}", admin, admin_mode: true) } end it "does not delete for unauthenticated user" do @@ -2675,20 +2675,20 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 404 for non-existing user" do - perform_enqueued_jobs { delete api("/users/0", admin) } + perform_enqueued_jobs { delete api("/users/0", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it "returns a 404 for invalid ID" do - perform_enqueued_jobs { delete api("/users/ASDF", admin) } + perform_enqueued_jobs { delete api("/users/ASDF", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:not_found) end context "hard delete disabled" do it "moves contributions to the ghost user", :sidekiq_might_not_need_inline do - perform_enqueued_jobs { delete api("/users/#{user.id}", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:no_content) expect(issue.reload).to be_persisted @@ -2700,7 +2700,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context "hard delete enabled" do it "removes contributions", :sidekiq_might_not_need_inline do - perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) } + perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin, admin_mode: true) } expect(response).to have_gitlab_http_status(:no_content) expect(Users::GhostUserMigration.where(user: user, @@ -2740,7 +2740,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end context 'with admin' do - let(:admin_personal_access_token) { create(:personal_access_token, user: admin).token } + let(:admin_personal_access_token) { create(:personal_access_token, :admin_mode, user: admin).token } context 'with personal access token' do it 'returns 403 without private token when sudo defined' do @@ -2881,13 +2881,13 @@ RSpec.describe API::Users, feature_category: :user_profile do user.keys << key admin - get api("/user/keys/#{key.id}", admin) + get api("/user/keys/#{key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Key Not Found') end it "returns 404 for invalid ID" do - get api("/users/keys/ASDF", admin) + get api("/users/keys/ASDF", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -2901,7 +2901,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end describe "POST /user/keys" do - it "creates ssh key", :aggregate_failures do + it "creates ssh key" do key_attrs = attributes_for(:key, usage_type: :signing) expect do @@ -2981,7 +2981,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns a 404 for invalid ID" do - delete api("/users/keys/ASDF", admin) + delete api("/users/keys/ASDF", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3037,14 +3037,14 @@ RSpec.describe API::Users, feature_category: :user_profile do it "returns 404 error if admin accesses user's GPG key" do user.gpg_keys << gpg_key - get api("/user/gpg_keys/#{gpg_key.id}", admin) + get api("/user/gpg_keys/#{gpg_key.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 GPG Key Not Found') end it 'returns 404 for invalid ID' do - get api('/users/gpg_keys/ASDF', admin) + get api('/users/gpg_keys/ASDF', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3109,7 +3109,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns a 404 for invalid ID' do - post api('/users/gpg_keys/ASDF/revoke', admin) + post api('/users/gpg_keys/ASDF/revoke', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3142,7 +3142,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns a 404 for invalid ID' do - delete api('/users/gpg_keys/ASDF', admin) + delete api('/users/gpg_keys/ASDF', admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3197,13 +3197,13 @@ RSpec.describe API::Users, feature_category: :user_profile do user.emails << email admin - get api("/user/emails/#{email.id}", admin) + get api("/user/emails/#{email.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Email Not Found') end it "returns 404 for invalid ID" do - get api("/users/emails/ASDF", admin) + get api("/users/emails/ASDF", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -3268,7 +3268,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "returns 400 for invalid ID" do - delete api("/user/emails/ASDF", admin) + delete api("/user/emails/ASDF", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -3283,7 +3283,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end describe 'POST /users/:id/activate' do - subject(:activate) { post api("/users/#{user_id}/activate", api_user) } + subject(:activate) { post api("/users/#{user_id}/activate", api_user, admin_mode: true) } let(:user_id) { user.id } @@ -3363,7 +3363,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end describe 'POST /users/:id/deactivate' do - subject(:deactivate) { post api("/users/#{user_id}/deactivate", api_user) } + subject(:deactivate) { post api("/users/#{user_id}/deactivate", api_user, admin_mode: true) } let(:user_id) { user.id } @@ -3480,7 +3480,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end describe 'POST /users/:id/approve' do - subject(:approve) { post api("/users/#{user_id}/approve", api_user) } + subject(:approve) { post api("/users/#{user_id}/approve", api_user, admin_mode: true) } context 'performed by a non-admin user' do let(:api_user) { user } @@ -3558,8 +3558,8 @@ RSpec.describe API::Users, feature_category: :user_profile do end end - describe 'POST /users/:id/reject', :aggregate_failures do - subject(:reject) { post api("/users/#{user_id}/reject", api_user) } + describe 'POST /users/:id/reject' do + subject(:reject) { post api("/users/#{user_id}/reject", api_user, admin_mode: true) } shared_examples 'returns 409' do it 'returns 409' do @@ -3648,9 +3648,9 @@ RSpec.describe API::Users, feature_category: :user_profile do end end - describe 'POST /users/:id/block', :aggregate_failures do + describe 'POST /users/:id/block' do context 'when admin' do - subject(:block_user) { post api("/users/#{user_id}/block", admin) } + subject(:block_user) { post api("/users/#{user_id}/block", admin, admin_mode: true) } context 'with an existing user' do let(:user_id) { user.id } @@ -3738,9 +3738,9 @@ RSpec.describe API::Users, feature_category: :user_profile do end end - describe 'POST /users/:id/unblock', :aggregate_failures do + describe 'POST /users/:id/unblock' do context 'when admin' do - subject(:unblock_user) { post api("/users/#{user_id}/unblock", admin) } + subject(:unblock_user) { post api("/users/#{user_id}/unblock", admin, admin_mode: true) } context 'with an existing user' do let(:user_id) { user.id } @@ -3824,9 +3824,9 @@ RSpec.describe API::Users, feature_category: :user_profile do end end - describe 'POST /users/:id/ban', :aggregate_failures do + describe 'POST /users/:id/ban' do context 'when admin' do - subject(:ban_user) { post api("/users/#{user_id}/ban", admin) } + subject(:ban_user) { post api("/users/#{user_id}/ban", admin, admin_mode: true) } context 'with an active user' do let(:user_id) { user.id } @@ -3906,9 +3906,9 @@ RSpec.describe API::Users, feature_category: :user_profile do end end - describe 'POST /users/:id/unban', :aggregate_failures do + describe 'POST /users/:id/unban' do context 'when admin' do - subject(:unban_user) { post api("/users/#{user_id}/unban", admin) } + subject(:unban_user) { post api("/users/#{user_id}/unban", admin, admin_mode: true) } context 'with a banned user' do let(:user_id) { banned_user.id } @@ -4008,8 +4008,10 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'requested by admin user' do let(:requesting_user) { create(:user, :admin) } + subject { get api("/users/#{user.id}/memberships", requesting_user, admin_mode: true) } + it "responses successfully" do - get api("/users/#{user.id}/memberships", requesting_user) + subject aggregate_failures 'expect successful response including groups and projects' do expect(response).to have_gitlab_http_status(:ok) @@ -4024,22 +4026,23 @@ RSpec.describe API::Users, feature_category: :user_profile do it 'does not submit N+1 DB queries' do # Avoid setup queries - get api("/users/#{user.id}/memberships", requesting_user) + subject + expect(response).to have_gitlab_http_status(:ok) control = ActiveRecord::QueryRecorder.new do - get api("/users/#{user.id}/memberships", requesting_user) + subject end create_list(:project, 5).map { |project| project.add_guest(user) } expect do - get api("/users/#{user.id}/memberships", requesting_user) + subject end.not_to exceed_query_limit(control) end context 'with type filter' do it "only returns project memberships" do - get api("/users/#{user.id}/memberships?type=Project", requesting_user) + get api("/users/#{user.id}/memberships?type=Project", requesting_user, admin_mode: true) aggregate_failures do expect(json_response).to contain_exactly(a_hash_including('source_type' => 'Project')) @@ -4048,7 +4051,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "only returns group memberships" do - get api("/users/#{user.id}/memberships?type=Namespace", requesting_user) + get api("/users/#{user.id}/memberships?type=Namespace", requesting_user, admin_mode: true) aggregate_failures do expect(json_response).to contain_exactly(a_hash_including('source_type' => 'Namespace')) @@ -4057,7 +4060,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it "recognizes unsupported types" do - get api("/users/#{user.id}/memberships?type=foo", requesting_user) + get api("/users/#{user.id}/memberships?type=foo", requesting_user, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) end @@ -4079,7 +4082,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'as admin' do it 'returns the activities from the last 6 months' do - get api("/user/activities", admin) + get api("/user/activities", admin, admin_mode: true) expect(response).to include_pagination_headers expect(json_response.size).to eq(1) @@ -4093,7 +4096,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'passing a :from parameter' do it 'returns the activities from the given date' do - get api("/user/activities?from=2000-1-1", admin) + get api("/user/activities?from=2000-1-1", admin, admin_mode: true) expect(response).to include_pagination_headers expect(json_response.size).to eq(2) @@ -4276,14 +4279,14 @@ RSpec.describe API::Users, feature_category: :user_profile do let(:scopes) { %w(api read_user) } it 'returns error if required attributes are missing' do - post api("/users/#{user.id}/personal_access_tokens", admin) + post api("/users/#{user.id}/personal_access_tokens", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('name is missing, scopes is missing, scopes does not have a valid value') end it 'returns a 404 error if user not found' do - post api("/users/#{non_existing_record_id}/personal_access_tokens", admin), + post api("/users/#{non_existing_record_id}/personal_access_tokens", admin, admin_mode: true), params: { name: name, scopes: scopes, @@ -4319,7 +4322,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'creates a personal access token when authenticated as admin' do - post api("/users/#{user.id}/personal_access_tokens", admin), + post api("/users/#{user.id}/personal_access_tokens", admin, admin_mode: true), params: { name: name, expires_at: expires_at, @@ -4338,7 +4341,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end context 'when an error is thrown by the model' do - let!(:admin_personal_access_token) { create(:personal_access_token, user: admin) } + let!(:admin_personal_access_token) { create(:personal_access_token, :admin_mode, user: admin) } let(:error_message) { 'error message' } before do @@ -4372,7 +4375,7 @@ RSpec.describe API::Users, feature_category: :user_profile do let_it_be(:revoked_impersonation_token) { create(:personal_access_token, :impersonation, :revoked, user: user) } it 'returns a 404 error if user not found' do - get api("/users/#{non_existing_record_id}/impersonation_tokens", admin) + get api("/users/#{non_existing_record_id}/impersonation_tokens", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') @@ -4386,7 +4389,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns an array of all impersonated tokens' do - get api("/users/#{user.id}/impersonation_tokens", admin) + get api("/users/#{user.id}/impersonation_tokens", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -4395,7 +4398,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns an array of active impersonation tokens if state active' do - get api("/users/#{user.id}/impersonation_tokens?state=active", admin) + get api("/users/#{user.id}/impersonation_tokens?state=active", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -4405,7 +4408,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns an array of inactive personal access tokens if active is set to false' do - get api("/users/#{user.id}/impersonation_tokens?state=inactive", admin) + get api("/users/#{user.id}/impersonation_tokens?state=inactive", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_an Array @@ -4421,14 +4424,14 @@ RSpec.describe API::Users, feature_category: :user_profile do let(:impersonation) { true } it 'returns validation error if impersonation token misses some attributes' do - post api("/users/#{user.id}/impersonation_tokens", admin) + post api("/users/#{user.id}/impersonation_tokens", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:bad_request) expect(json_response['error']).to eq('name is missing') end it 'returns a 404 error if user not found' do - post api("/users/#{non_existing_record_id}/impersonation_tokens", admin), + post api("/users/#{non_existing_record_id}/impersonation_tokens", admin, admin_mode: true), params: { name: name, expires_at: expires_at @@ -4450,7 +4453,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'creates a impersonation token' do - post api("/users/#{user.id}/impersonation_tokens", admin), + post api("/users/#{user.id}/impersonation_tokens", admin, admin_mode: true), params: { name: name, expires_at: expires_at, @@ -4476,21 +4479,21 @@ RSpec.describe API::Users, feature_category: :user_profile do let_it_be(:impersonation_token) { create(:personal_access_token, :impersonation, user: user) } it 'returns 404 error if user not found' do - get api("/users/#{non_existing_record_id}/impersonation_tokens/1", admin) + get api("/users/#{non_existing_record_id}/impersonation_tokens/1", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns a 404 error if impersonation token not found' do - get api("/users/#{user.id}/impersonation_tokens/#{non_existing_record_id}", admin) + get api("/users/#{user.id}/impersonation_tokens/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Impersonation Token Not Found') end it 'returns a 404 error if token is not impersonation token' do - get api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin) + get api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Impersonation Token Not Found') @@ -4504,7 +4507,7 @@ RSpec.describe API::Users, feature_category: :user_profile do end it 'returns an impersonation token' do - get api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin) + get api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response['token']).not_to be_present @@ -4517,21 +4520,21 @@ RSpec.describe API::Users, feature_category: :user_profile do let_it_be(:impersonation_token) { create(:personal_access_token, :impersonation, user: user) } it 'returns a 404 error if user not found' do - delete api("/users/#{non_existing_record_id}/impersonation_tokens/1", admin) + delete api("/users/#{non_existing_record_id}/impersonation_tokens/1", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 User Not Found') end it 'returns a 404 error if impersonation token not found' do - delete api("/users/#{user.id}/impersonation_tokens/#{non_existing_record_id}", admin) + delete api("/users/#{user.id}/impersonation_tokens/#{non_existing_record_id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Impersonation Token Not Found') end it 'returns a 404 error if token is not impersonation token' do - delete api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin) + delete api("/users/#{user.id}/impersonation_tokens/#{personal_access_token.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) expect(json_response['message']).to eq('404 Impersonation Token Not Found') @@ -4545,11 +4548,11 @@ RSpec.describe API::Users, feature_category: :user_profile do end it_behaves_like '412 response' do - let(:request) { api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin) } + let(:request) { api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin, admin_mode: true) } end it 'revokes a impersonation token' do - delete api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin) + delete api("/users/#{user.id}/impersonation_tokens/#{impersonation_token.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:no_content) expect(impersonation_token.revoked).to be_falsey @@ -4607,7 +4610,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'as an admin user' do context 'with invalid user id' do it 'returns 404 User Not Found' do - get api("/users/#{non_existing_record_id}/associations_count", admin) + get api("/users/#{non_existing_record_id}/associations_count", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:not_found) end @@ -4615,7 +4618,7 @@ RSpec.describe API::Users, feature_category: :user_profile do context 'with valid user id' do it 'returns valid JSON response' do - get api("/users/#{user.id}/associations_count", admin) + get api("/users/#{user.id}/associations_count", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to be_a Hash @@ -4629,4 +4632,168 @@ RSpec.describe API::Users, feature_category: :user_profile do let(:attributable) { user } let(:other_attributable) { admin } end + + describe 'POST /user/runners', feature_category: :runner_fleet do + subject(:request) { post api('/user/runners', current_user, **post_args), params: runner_attrs } + + let_it_be(:group_owner) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, namespace: group) } + + let(:post_args) { { admin_mode: true } } + let(:runner_attrs) { { runner_type: 'instance_type' } } + + before do + group.add_owner(group_owner) + end + + shared_context 'returns forbidden when user does not have sufficient permissions' do + let(:current_user) { admin } + let(:post_args) { { admin_mode: false } } + + it 'does not create a runner' do + expect do + request + + expect(response).to have_gitlab_http_status(:forbidden) + end.not_to change { Ci::Runner.count } + end + end + + shared_examples 'creates a runner' do + it 'creates a runner' do + expect do + request + + expect(response).to have_gitlab_http_status(:created) + end.to change { Ci::Runner.count }.by(1) + end + end + + shared_examples 'fails to create runner with :bad_request' do + it 'does not create runner' do + expect do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['message']).to include(expected_error) + end.not_to change { Ci::Runner.count } + end + end + + context 'when runner_type is :instance_type' do + let(:runner_attrs) { { runner_type: 'instance_type' } } + + context 'when user has sufficient permissions' do + let(:current_user) { admin } + + it_behaves_like 'creates a runner' + end + + it_behaves_like 'returns forbidden when user does not have sufficient permissions' + + context 'when model validation fails' do + let(:runner_attrs) { { runner_type: 'instance_type', run_untagged: false, tag_list: [] } } + let(:current_user) { admin } + + it_behaves_like 'fails to create runner with :bad_request' do + let(:expected_error) { 'Tags list can not be empty' } + end + end + end + + context 'when runner_type is :group_type' do + let(:post_args) { {} } + + context 'when group_id is specified' do + let(:runner_attrs) { { runner_type: 'group_type', group_id: group.id } } + + context 'when user has sufficient permissions' do + let(:current_user) { group_owner } + + it_behaves_like 'creates a runner' + end + + it_behaves_like 'returns forbidden when user does not have sufficient permissions' + end + + context 'when group_id is not specified' do + let(:runner_attrs) { { runner_type: 'group_type' } } + let(:current_user) { group_owner } + + it 'fails to create runner with :bad_request' do + expect do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to include('group_id is missing') + end.not_to change { Ci::Runner.count } + end + end + end + + context 'when runner_type is :project_type' do + let(:post_args) { {} } + + context 'when project_id is specified' do + let(:runner_attrs) { { runner_type: 'project_type', project_id: project.id } } + + context 'when user has sufficient permissions' do + let(:current_user) { group_owner } + + it_behaves_like 'creates a runner' + end + + it_behaves_like 'returns forbidden when user does not have sufficient permissions' + end + + context 'when project_id is not specified' do + let(:runner_attrs) { { runner_type: 'project_type' } } + let(:current_user) { group_owner } + + it 'fails to create runner with :bad_request' do + expect do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to include('project_id is missing') + end.not_to change { Ci::Runner.count } + end + end + end + + context 'with missing runner_type' do + let(:runner_attrs) { {} } + let(:current_user) { admin } + + it 'fails to create runner with :bad_request' do + expect do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to eq('runner_type is missing, runner_type does not have a valid value') + end.not_to change { Ci::Runner.count } + end + end + + context 'with unknown runner_type' do + let(:runner_attrs) { { runner_type: 'unknown' } } + let(:current_user) { admin } + + it 'fails to create runner with :bad_request' do + expect do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response['error']).to eq('runner_type does not have a valid value') + end.not_to change { Ci::Runner.count } + end + end + + it 'returns a 401 error if unauthorized' do + post api('/user/runners'), params: runner_attrs + + expect(response).to have_gitlab_http_status(:unauthorized) + end + end end diff --git a/spec/requests/api/v3/github_spec.rb b/spec/requests/api/v3/github_spec.rb index 0b8fac5c55c..4a7b552293c 100644 --- a/spec/requests/api/v3/github_spec.rb +++ b/spec/requests/api/v3/github_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::V3::Github, feature_category: :integrations do +RSpec.describe API::V3::Github, :aggregate_failures, feature_category: :integrations do let_it_be(:user) { create(:user) } let_it_be(:unauthorized_user) { create(:user) } let_it_be(:admin) { create(:user, :admin) } @@ -300,7 +300,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do context 'when instance admin' do it 'returns the requested merge request in github format' do - jira_get v3_api("/repos/#{project.namespace.path}/#{project.path}/pulls/#{merge_request.id}", admin) + jira_get v3_api("/repos/#{project.namespace.path}/#{project.path}/pulls/#{merge_request.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_response_schema('entities/github/pull_request') @@ -312,8 +312,8 @@ RSpec.describe API::V3::Github, feature_category: :integrations do describe 'GET /users/:namespace/repos' do let(:group) { create(:group, name: 'foo') } - def expect_project_under_namespace(projects, namespace, user) - jira_get v3_api("/users/#{namespace.path}/repos", user) + def expect_project_under_namespace(projects, namespace, user, admin_mode = false) + jira_get v3_api("/users/#{namespace.path}/repos", user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -343,7 +343,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do let(:user) { create(:user, :admin) } it 'returns an array of projects belonging to group' do - expect_project_under_namespace([project, project2], group, user) + expect_project_under_namespace([project, project2], group, user, true) end context 'with a private group' do @@ -351,7 +351,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do let!(:project2) { create(:project, :private, group: group) } it 'returns an array of projects belonging to group' do - expect_project_under_namespace([project, project2], group, user) + expect_project_under_namespace([project, project2], group, user, true) end end end @@ -473,7 +473,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do expect(response).to have_gitlab_http_status(:ok) end - context 'when the project has no repository', :aggregate_failures do + context 'when the project has no repository' do let_it_be(:project) { create(:project, creator: user) } it 'returns an empty collection response' do @@ -516,7 +516,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do end context 'authenticated' do - it 'returns commit with github format', :aggregate_failures do + it 'returns commit with github format' do call_api expect(response).to have_gitlab_http_status(:ok) @@ -552,7 +552,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do .and_call_original end - it 'handles the error, logs it, and returns empty diff files', :aggregate_failures do + it 'handles the error, logs it, and returns empty diff files' do allow(Gitlab::GitalyClient).to receive(:call) .with(*commit_diff_args) .and_raise(GRPC::DeadlineExceeded) @@ -567,7 +567,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do expect(response_diff_files(response)).to be_blank end - it 'only calls Gitaly once for all attempts within a period of time', :aggregate_failures do + it 'only calls Gitaly once for all attempts within a period of time' do expect(Gitlab::GitalyClient).to receive(:call) .with(*commit_diff_args) .once # <- once @@ -581,7 +581,7 @@ RSpec.describe API::V3::Github, feature_category: :integrations do end end - it 'calls Gitaly again after a period of time', :aggregate_failures do + it 'calls Gitaly again after a period of time' do expect(Gitlab::GitalyClient).to receive(:call) .with(*commit_diff_args) .twice # <- twice @@ -648,13 +648,14 @@ RSpec.describe API::V3::Github, feature_category: :integrations do get path, headers: { 'User-Agent' => user_agent } end - def v3_api(path, user = nil, personal_access_token: nil, oauth_access_token: nil) + def v3_api(path, user = nil, personal_access_token: nil, oauth_access_token: nil, admin_mode: false) api( path, user, version: 'v3', personal_access_token: personal_access_token, - oauth_access_token: oauth_access_token + oauth_access_token: oauth_access_token, + admin_mode: admin_mode ) end end diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb index d3d1a2a6cd0..5b50e8a1021 100644 --- a/spec/requests/git_http_spec.rb +++ b/spec/requests/git_http_spec.rb @@ -236,6 +236,11 @@ RSpec.describe 'Git HTTP requests', feature_category: :source_code_management do allow(::Users::ActivityService).to receive(:new).and_return(activity_service) allow(activity_service).to receive(:execute) + # During project creation, we need to track the project wiki + # repository. So it is over the query limit threshold, and we + # have to adjust it. + allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(101) + expect do upload(path, user: user.username, password: user.password) do |response| expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/requests/groups/usage_quotas_controller_spec.rb b/spec/requests/groups/usage_quotas_controller_spec.rb index a329398aab3..67aef23704a 100644 --- a/spec/requests/groups/usage_quotas_controller_spec.rb +++ b/spec/requests/groups/usage_quotas_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Groups::UsageQuotasController, :with_license, feature_category: :subscription_cost_management do +RSpec.describe Groups::UsageQuotasController, :with_license, feature_category: :consumables_cost_management do let_it_be(:group) { create(:group) } let_it_be(:subgroup) { create(:group, parent: group) } let_it_be(:user) { create(:user) } diff --git a/spec/requests/import/github_controller_spec.rb b/spec/requests/import/github_controller_spec.rb new file mode 100644 index 00000000000..5ac97e3d330 --- /dev/null +++ b/spec/requests/import/github_controller_spec.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Import::GithubController, feature_category: :importers do + describe 'GET details' do + subject { get details_import_github_path } + + let_it_be(:user) { create(:user) } + + before do + login_as(user) + end + + context 'with feature enabled' do + before do + stub_feature_flags(import_details_page: true) + + subject + end + + it 'responds with a 200 and shows the template' do + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:details) + end + end + + context 'with feature disabled' do + before do + stub_feature_flags(import_details_page: false) + + subject + end + + it 'responds with a 404' do + expect(response).to have_gitlab_http_status(:not_found) + end + end + end +end diff --git a/spec/requests/jwks_controller_spec.rb b/spec/requests/jwks_controller_spec.rb index c6f5f7c6bea..f756c1758e4 100644 --- a/spec/requests/jwks_controller_spec.rb +++ b/spec/requests/jwks_controller_spec.rb @@ -35,6 +35,15 @@ RSpec.describe JwksController, feature_category: :system_access do expect(ids).to contain_exactly(ci_jwk['kid'], oidc_jwk['kid']) end + it 'includes the OIDC signing key ID' do + get jwks_url + + expect(response).to have_gitlab_http_status(:ok) + + ids = json_response['keys'].map { |jwk| jwk['kid'] } + expect(ids).to include(Doorkeeper::OpenidConnect.signing_key_normalized.symbolize_keys[:kid]) + end + it 'does not leak private key data' do get jwks_url diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb index 2e158190734..82f972e7f94 100644 --- a/spec/requests/openid_connect_spec.rb +++ b/spec/requests/openid_connect_spec.rb @@ -276,7 +276,7 @@ RSpec.describe 'OpenID Connect requests', feature_category: :system_access do expect(response).to have_gitlab_http_status(:ok) expect(json_response['issuer']).to eq('http://localhost') expect(json_response['jwks_uri']).to eq('http://www.example.com/oauth/discovery/keys') - expect(json_response['scopes_supported']).to match_array %w[admin_mode api read_user read_api read_repository write_repository sudo openid profile email] + expect(json_response['scopes_supported']).to match_array %w[admin_mode api read_user read_api read_repository write_repository sudo openid profile email read_observability write_observability] end context 'with a cross-origin request' do @@ -286,7 +286,7 @@ RSpec.describe 'OpenID Connect requests', feature_category: :system_access do expect(response).to have_gitlab_http_status(:ok) expect(json_response['issuer']).to eq('http://localhost') expect(json_response['jwks_uri']).to eq('http://www.example.com/oauth/discovery/keys') - expect(json_response['scopes_supported']).to match_array %w[admin_mode api read_user read_api read_repository write_repository sudo openid profile email] + expect(json_response['scopes_supported']).to match_array %w[admin_mode api read_user read_api read_repository write_repository sudo openid profile email read_observability write_observability] end it_behaves_like 'cross-origin GET request' diff --git a/spec/requests/profiles/comment_templates_controller_spec.rb b/spec/requests/profiles/comment_templates_controller_spec.rb new file mode 100644 index 00000000000..cdbfbb0a346 --- /dev/null +++ b/spec/requests/profiles/comment_templates_controller_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Profiles::CommentTemplatesController, feature_category: :user_profile do + let_it_be(:user) { create(:user) } + + before do + sign_in(user) + end + + describe 'GET #index' do + describe 'feature flag disabled' do + before do + stub_feature_flags(saved_replies: false) + + get '/-/profile/comment_templates' + end + + it { expect(response).to have_gitlab_http_status(:not_found) } + end + + describe 'feature flag enabled' do + before do + get '/-/profile/comment_templates' + end + + it { expect(response).to have_gitlab_http_status(:ok) } + + it 'sets hide search settings ivar' do + expect(assigns(:hide_search_settings)).to eq(true) + end + end + end +end diff --git a/spec/requests/profiles/saved_replies_controller_spec.rb b/spec/requests/profiles/saved_replies_controller_spec.rb deleted file mode 100644 index 27a961a201f..00000000000 --- a/spec/requests/profiles/saved_replies_controller_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Profiles::SavedRepliesController, feature_category: :user_profile do - let_it_be(:user) { create(:user) } - - before do - sign_in(user) - end - - describe 'GET #index' do - describe 'feature flag disabled' do - before do - stub_feature_flags(saved_replies: false) - - get '/-/profile/saved_replies' - end - - it { expect(response).to have_gitlab_http_status(:not_found) } - end - - describe 'feature flag enabled' do - before do - get '/-/profile/saved_replies' - end - - it { expect(response).to have_gitlab_http_status(:ok) } - - it 'sets hide search settings ivar' do - expect(assigns(:hide_search_settings)).to eq(true) - end - end - end -end diff --git a/spec/requests/projects/cluster_agents_controller_spec.rb b/spec/requests/projects/cluster_agents_controller_spec.rb index d7c791fa0c1..643160ad9f3 100644 --- a/spec/requests/projects/cluster_agents_controller_spec.rb +++ b/spec/requests/projects/cluster_agents_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::ClusterAgentsController, feature_category: :kubernetes_management do +RSpec.describe Projects::ClusterAgentsController, feature_category: :deployment_management do let_it_be(:cluster_agent) { create(:cluster_agent) } let(:project) { cluster_agent.project } diff --git a/spec/requests/projects/google_cloud/configuration_controller_spec.rb b/spec/requests/projects/google_cloud/configuration_controller_spec.rb index 1aa44d1a49a..b807ff7930e 100644 --- a/spec/requests/projects/google_cloud/configuration_controller_spec.rb +++ b/spec/requests/projects/google_cloud/configuration_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::ConfigurationController, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::ConfigurationController, feature_category: :deployment_management do let_it_be(:project) { create(:project, :public) } let_it_be(:url) { project_google_cloud_configuration_path(project) } diff --git a/spec/requests/projects/google_cloud/databases_controller_spec.rb b/spec/requests/projects/google_cloud/databases_controller_spec.rb index 98e83610600..fa978a3921f 100644 --- a/spec/requests/projects/google_cloud/databases_controller_spec.rb +++ b/spec/requests/projects/google_cloud/databases_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_category: :deployment_management do shared_examples 'shared examples for database controller endpoints' do include_examples 'requires `admin_project_google_cloud` role' diff --git a/spec/requests/projects/google_cloud/deployments_controller_spec.rb b/spec/requests/projects/google_cloud/deployments_controller_spec.rb index 14214b8fdfb..e9eac1e7ecd 100644 --- a/spec/requests/projects/google_cloud/deployments_controller_spec.rb +++ b/spec/requests/projects/google_cloud/deployments_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :deployment_management do let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:repository) { project.repository } diff --git a/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb index de4b96a2e01..da000ec00c0 100644 --- a/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb +++ b/spec/requests/projects/google_cloud/gcp_regions_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::GcpRegionsController, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::GcpRegionsController, feature_category: :deployment_management do let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:repository) { project.repository } diff --git a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb index 5965953cf6f..427eff8cd76 100644 --- a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb +++ b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::RevokeOauthController, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::RevokeOauthController, feature_category: :deployment_management do include SessionHelpers describe 'POST #create', :snowplow, :clean_gitlab_redis_sessions, :aggregate_failures do diff --git a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb index 9b048f814ef..29d4154329f 100644 --- a/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb +++ b/spec/requests/projects/google_cloud/service_accounts_controller_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::GoogleCloud::ServiceAccountsController, feature_category: :kubernetes_management do +RSpec.describe Projects::GoogleCloud::ServiceAccountsController, feature_category: :deployment_management do let_it_be(:project) { create(:project, :public) } describe 'GET index', :snowplow do diff --git a/spec/requests/projects/ml/candidates_controller_spec.rb b/spec/requests/projects/ml/candidates_controller_spec.rb index d3f9d92bc44..78c8e99e3f3 100644 --- a/spec/requests/projects/ml/candidates_controller_spec.rb +++ b/spec/requests/projects/ml/candidates_controller_spec.rb @@ -6,7 +6,7 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do let_it_be(:project) { create(:project, :repository) } let_it_be(:user) { project.first_owner } let_it_be(:experiment) { create(:ml_experiments, project: project, user: user) } - let_it_be(:candidate) { create(:ml_candidates, experiment: experiment, user: user) } + let_it_be(:candidate) { create(:ml_candidates, experiment: experiment, user: user, project: project) } let(:ff_value) { true } let(:candidate_iid) { candidate.iid } @@ -18,19 +18,29 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do sign_in(user) end + shared_examples 'renders 404' do + it 'renders 404' do + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples '404 if candidate does not exist' do + context 'when experiment does not exist' do + let(:candidate_iid) { non_existing_record_id } + + it_behaves_like 'renders 404' + end + end + shared_examples '404 if feature flag disabled' do context 'when :ml_experiment_tracking disabled' do let(:ff_value) { false } - it 'is 404' do - expect(response).to have_gitlab_http_status(:not_found) - end + it_behaves_like 'renders 404' end end describe 'GET show' do - let(:params) { basic_params.merge(id: experiment.iid) } - before do show_candidate end @@ -48,20 +58,39 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do expect { show_candidate }.not_to exceed_all_query_limit(control_count) end - context 'when candidate does not exist' do - let(:candidate_iid) { non_existing_record_id.to_s } + it_behaves_like '404 if candidate does not exist' + it_behaves_like '404 if feature flag disabled' + end + + describe 'DELETE #destroy' do + let_it_be(:candidate_for_deletion) do + create(:ml_candidates, project: project, experiment: experiment, user: user) + end + + let(:candidate_iid) { candidate_for_deletion.iid } - it 'returns 404' do - expect(response).to have_gitlab_http_status(:not_found) - end + before do + destroy_candidate end + it 'deletes the experiment', :aggregate_failures do + expect(response).to have_gitlab_http_status(:found) + expect(flash[:notice]).to eq('Candidate removed') + expect(response).to redirect_to("/#{project.full_path}/-/ml/experiments/#{experiment.iid}") + expect { Ml::Candidate.find(id: candidate_for_deletion.id) }.to raise_error(ActiveRecord::RecordNotFound) + end + + it_behaves_like '404 if candidate does not exist' it_behaves_like '404 if feature flag disabled' end private def show_candidate - get project_ml_candidate_path(project, candidate_iid) + get project_ml_candidate_path(project, iid: candidate_iid) + end + + def destroy_candidate + delete project_ml_candidate_path(project, candidate_iid) end end diff --git a/spec/requests/projects/ml/experiments_controller_spec.rb b/spec/requests/projects/ml/experiments_controller_spec.rb index 9b071efc1f1..5a8496a250a 100644 --- a/spec/requests/projects/ml/experiments_controller_spec.rb +++ b/spec/requests/projects/ml/experiments_controller_spec.rb @@ -19,6 +19,7 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do let(:ff_value) { true } let(:project) { project_with_feature } let(:basic_params) { { namespace_id: project.namespace.to_param, project_id: project } } + let(:experiment_iid) { experiment.iid } before do stub_feature_flags(ml_experiment_tracking: false) @@ -27,13 +28,25 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do sign_in(user) end + shared_examples 'renders 404' do + it 'renders 404' do + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples '404 if experiment does not exist' do + context 'when experiment does not exist' do + let(:experiment_iid) { non_existing_record_id } + + it_behaves_like 'renders 404' + end + end + shared_examples '404 if feature flag disabled' do context 'when :ml_experiment_tracking disabled' do let(:ff_value) { false } - it 'is 404' do - expect(response).to have_gitlab_http_status(:not_found) - end + it_behaves_like 'renders 404' end end @@ -109,119 +122,184 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do end describe 'GET show' do - let(:params) { basic_params.merge(id: experiment.iid) } + describe 'html' do + it 'renders the template' do + show_experiment + + expect(response).to render_template('projects/ml/experiments/show') + end - it 'renders the template' do - show_experiment + describe 'pagination' do + let_it_be(:candidates) do + create_list(:ml_candidates, 5, experiment: experiment).tap do |c| + c.first.metrics.create!(name: 'metric1', value: 0.3) + c[1].metrics.create!(name: 'metric1', value: 0.2) + c.last.metrics.create!(name: 'metric1', value: 0.6) + end + end - expect(response).to render_template('projects/ml/experiments/show') - end + let(:params) { basic_params.merge(id: experiment.iid) } - describe 'pagination' do - let_it_be(:candidates) do - create_list(:ml_candidates, 5, experiment: experiment).tap do |c| - c.first.metrics.create!(name: 'metric1', value: 0.3) - c[1].metrics.create!(name: 'metric1', value: 0.2) - c.last.metrics.create!(name: 'metric1', value: 0.6) + before do + stub_const("Projects::Ml::ExperimentsController::MAX_CANDIDATES_PER_PAGE", 2) + + show_experiment end - end - let(:params) { basic_params.merge(id: experiment.iid) } + it 'fetches only MAX_CANDIDATES_PER_PAGE candidates' do + expect(assigns(:candidates).size).to eq(2) + end - before do - stub_const("Projects::Ml::ExperimentsController::MAX_CANDIDATES_PER_PAGE", 2) + it 'paginates' do + received = assigns(:page_info) - show_experiment - end + expect(received).to include({ + has_next_page: true, + has_previous_page: false, + start_cursor: nil + }) + end - it 'fetches only MAX_CANDIDATES_PER_PAGE candidates' do - expect(assigns(:candidates).size).to eq(2) - end + context 'when order by metric' do + let(:params) do + { + order_by: "metric1", + order_by_type: "metric", + sort: "desc" + } + end + + it 'paginates', :aggregate_failures do + page = assigns(:candidates) + + expect(page.first).to eq(candidates.last) + expect(page.last).to eq(candidates.first) - it 'paginates' do - received = assigns(:page_info) + new_params = params.merge(cursor: assigns(:page_info)[:end_cursor]) - expect(received).to include({ - has_next_page: true, - has_previous_page: false, - start_cursor: nil - }) + show_experiment(new_params: new_params) + + new_page = assigns(:candidates) + + expect(new_page.first).to eq(candidates[1]) + end + end end - context 'when order by metric' do + describe 'search' do let(:params) do - { - order_by: "metric1", - order_by_type: "metric", - sort: "desc" - } + basic_params.merge( + name: 'some_name', + orderBy: 'name', + orderByType: 'metric', + sort: 'asc', + invalid: 'invalid' + ) end - it 'paginates', :aggregate_failures do - page = assigns(:candidates) - - expect(page.first).to eq(candidates.last) - expect(page.last).to eq(candidates.first) + it 'formats and filters the parameters' do + expect(Projects::Ml::CandidateFinder).to receive(:new).and_call_original do |exp, params| + expect(params.to_h).to include({ + name: 'some_name', + order_by: 'name', + order_by_type: 'metric', + sort: 'asc' + }) + end + + show_experiment + end + end - new_params = params.merge(cursor: assigns(:page_info)[:end_cursor]) + it 'does not perform N+1 sql queries' do + control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { show_experiment } - show_experiment(new_params) + create_list(:ml_candidates, 2, :with_metrics_and_params, experiment: experiment) - new_page = assigns(:candidates) + expect { show_experiment }.not_to exceed_all_query_limit(control_count) + end - expect(new_page.first).to eq(candidates[1]) + describe '404' do + before do + show_experiment end + + it_behaves_like '404 if experiment does not exist' + it_behaves_like '404 if feature flag disabled' end end - describe 'search' do - let(:params) do - basic_params.merge( - id: experiment.iid, - name: 'some_name', - orderBy: 'name', - orderByType: 'metric', - sort: 'asc', - invalid: 'invalid' - ) - end - - it 'formats and filters the parameters' do - expect(Projects::Ml::CandidateFinder).to receive(:new).and_call_original do |exp, params| - expect(params.to_h).to include({ - name: 'some_name', - order_by: 'name', - order_by_type: 'metric', - sort: 'asc' - }) + describe 'csv' do + it 'responds with :ok', :aggregate_failures do + show_experiment_csv + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers['Content-Type']).to eq('text/csv; charset=utf-8') + end + + it 'calls the presenter' do + allow(::Ml::CandidatesCsvPresenter).to receive(:new).and_call_original + + show_experiment_csv + end + + it 'does not perform N+1 sql queries' do + control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { show_experiment_csv } + + create_list(:ml_candidates, 2, :with_metrics_and_params, experiment: experiment) + + expect { show_experiment_csv }.not_to exceed_all_query_limit(control_count) + end + + describe '404' do + before do + show_experiment_csv end - show_experiment + it_behaves_like '404 if experiment does not exist' + it_behaves_like '404 if feature flag disabled' end end + end - it 'does not perform N+1 sql queries' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { show_experiment } + describe 'DELETE #destroy' do + let_it_be(:experiment_for_deletion) do + create(:ml_experiments, project: project_with_feature, user: user).tap do |e| + create(:ml_candidates, experiment: e, user: user) + end + end + + let_it_be(:candidate_for_deletion) { experiment_for_deletion.candidates.first } - create_list(:ml_candidates, 2, :with_metrics_and_params, experiment: experiment) + let(:params) { basic_params.merge(id: experiment.iid) } - expect { show_experiment }.not_to exceed_all_query_limit(control_count) + before do + destroy_experiment end - it_behaves_like '404 if feature flag disabled' do - before do - show_experiment - end + it 'deletes the experiment' do + expect { experiment.reload }.to raise_error(ActiveRecord::RecordNotFound) end + + it_behaves_like '404 if experiment does not exist' + it_behaves_like '404 if feature flag disabled' end private - def show_experiment(new_params = nil) - get project_ml_experiment_path(project, experiment.iid), params: new_params || params + def show_experiment(new_params: nil, format: :html) + get project_ml_experiment_path(project, experiment_iid, format: format), params: new_params || params + end + + def show_experiment_csv + show_experiment(format: :csv) end def list_experiments(new_params = nil) get project_ml_experiments_path(project), params: new_params || params end + + def destroy_experiment + delete project_ml_experiment_path(project, experiment_iid), params: params + end end diff --git a/spec/requests/projects/usage_quotas_spec.rb b/spec/requests/projects/usage_quotas_spec.rb index 60ab64c30c3..33b206c8dc0 100644 --- a/spec/requests/projects/usage_quotas_spec.rb +++ b/spec/requests/projects/usage_quotas_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Project Usage Quotas', feature_category: :subscription_cost_management do +RSpec.describe 'Project Usage Quotas', feature_category: :consumables_cost_management do let_it_be(:project) { create(:project) } let_it_be(:role) { :maintainer } let_it_be(:user) { create(:user) } diff --git a/spec/requests/projects/wikis_controller_spec.rb b/spec/requests/projects/wikis_controller_spec.rb index 4768e7134e8..3c434b36b21 100644 --- a/spec/requests/projects/wikis_controller_spec.rb +++ b/spec/requests/projects/wikis_controller_spec.rb @@ -64,7 +64,6 @@ RSpec.describe Projects::WikisController, feature_category: :wiki do before do # Setting an invalid page title to render edit page put wiki_page_path(project_wiki, wiki_page), params: { wiki: { title: '' } } - print(response.body) end it_behaves_like 'embed.diagrams.net frame-src directive' diff --git a/spec/requests/projects/work_items_spec.rb b/spec/requests/projects/work_items_spec.rb index 056416d380d..99337771960 100644 --- a/spec/requests/projects/work_items_spec.rb +++ b/spec/requests/projects/work_items_spec.rb @@ -3,16 +3,41 @@ require 'spec_helper' RSpec.describe 'Work Items', feature_category: :team_planning do + include WorkhorseHelpers + + include_context 'workhorse headers' + let_it_be(:work_item) { create(:work_item) } - let_it_be(:developer) { create(:user) } + let_it_be(:current_user) { create(:user) } + let_it_be(:project) { create(:project) } + + let(:file) { fixture_file_upload("spec/fixtures/#{filename}") } before_all do - work_item.project.add_developer(developer) + work_item.project.add_developer(current_user) + end + + shared_examples 'response with 404 status' do + it 'returns 404' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + shared_examples 'safely handles uploaded files' do + it 'ensures the upload is handled safely', :aggregate_failures do + allow(Gitlab::Utils).to receive(:check_path_traversal!).and_call_original + expect(Gitlab::Utils).to receive(:check_path_traversal!).with(filename).at_least(:once) + expect(FileUploader).not_to receive(:cache) + + subject + end end describe 'GET /:namespace/:project/work_items/:id' do before do - sign_in(developer) + sign_in(current_user) end it 'renders index' do @@ -21,4 +46,149 @@ RSpec.describe 'Work Items', feature_category: :team_planning do expect(response).to have_gitlab_http_status(:ok) end end + + describe 'POST /:namespace/:project/work_items/import_csv' do + let(:filename) { 'work_items_valid_types.csv' } + let(:params) { { namespace_id: project.namespace.id, path: 'test' } } + + subject { upload_file(file, workhorse_headers, params) } + + shared_examples 'handles authorisation' do + context 'when unauthorized' do + context 'with non-member' do + let_it_be(:current_user) { create(:user) } + + before do + sign_in(current_user) + end + + it 'responds with error' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + + context 'with anonymous user' do + it 'responds with error' do + subject + + expect(response).to have_gitlab_http_status(:found) + expect(response).to be_redirect + end + end + end + + context 'when authorized' do + before do + sign_in(current_user) + project.add_reporter(current_user) + end + + context 'when import/export work items feature is available and member is a reporter' do + shared_examples 'response with success status' do + it 'returns 200 status and success message' do + subject + + expect(response).to have_gitlab_http_status(:success) + expect(json_response).to eq( + 'message' => "Your work items are being imported. Once finished, you'll receive a confirmation email.") + end + end + + it_behaves_like 'response with success status' + it_behaves_like 'safely handles uploaded files' + + it 'shows error when upload fails' do + expect_next_instance_of(UploadService) do |upload_service| + expect(upload_service).to receive(:execute).and_return(nil) + end + + subject + + expect(json_response).to eq('errors' => 'File upload error.') + end + + context 'when file extension is not csv' do + let(:filename) { 'sample_doc.md' } + + it 'returns error message' do + subject + + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response).to eq( + 'errors' => "The uploaded file was invalid. Supported file extensions are .csv.") + end + end + end + + context 'when work items import/export feature is not available' do + before do + stub_feature_flags(import_export_work_items_csv: false) + end + + it_behaves_like 'response with 404 status' + end + end + end + + context 'with public project' do + let_it_be(:project) { create(:project, :public) } + + it_behaves_like 'handles authorisation' + end + + context 'with private project' do + it_behaves_like 'handles authorisation' + end + + def upload_file(file, headers = {}, params = {}) + workhorse_finalize( + import_csv_project_work_items_path(project), + method: :post, + file_key: :file, + params: params.merge(file: file), + headers: headers, + send_rewritten_field: true + ) + end + end + + describe 'POST #authorize' do + subject do + post import_csv_authorize_project_work_items_path(project), + headers: workhorse_headers + end + + before do + sign_in(current_user) + end + + context 'with authorized user' do + before do + project.add_reporter(current_user) + end + + context 'when work items import/export feature is enabled' do + let(:user) { current_user } + + it_behaves_like 'handle uploads authorize request' do + let(:uploader_class) { FileUploader } + let(:maximum_size) { Gitlab::CurrentSettings.max_attachment_size.megabytes } + end + end + + context 'when work items import/export feature is disabled' do + before do + stub_feature_flags(import_export_work_items_csv: false) + end + + it_behaves_like 'response with 404 status' + end + end + + context 'with unauthorized user' do + it_behaves_like 'response with 404 status' + end + end end diff --git a/spec/requests/registrations_controller_spec.rb b/spec/requests/registrations_controller_spec.rb new file mode 100644 index 00000000000..89681485de3 --- /dev/null +++ b/spec/requests/registrations_controller_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe RegistrationsController, type: :request, feature_category: :system_access do + describe 'POST #create' do + let_it_be(:user_attrs) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) } + + subject(:create_user) { post user_registration_path, params: { user: user_attrs } } + + context 'when email confirmation is required' do + before do + stub_application_setting_enum('email_confirmation_setting', 'hard') + stub_application_setting(require_admin_approval_after_user_signup: false) + stub_feature_flags(soft_email_confirmation: false) + end + + it 'redirects to the `users_almost_there_path`', unless: Gitlab.ee? do + create_user + + expect(response).to redirect_to(users_almost_there_path(email: user_attrs[:email])) + end + end + end +end diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb index 98dda75a2b0..f2d4e288ddc 100644 --- a/spec/requests/search_controller_spec.rb +++ b/spec/requests/search_controller_spec.rb @@ -66,13 +66,9 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc let(:creation_args) { { name: 'project' } } let(:params) { { search: 'project', scope: 'projects' } } # some N+1 queries still exist - # each project requires 3 extra queries - # - one count for forks - # - one count for open MRs - # - one count for open Issues - # there are 4 additional queries run for the logged in user: - # (1) user preferences, (1) user statuses, (1) user details, (1) users - let(:threshold) { 17 } + # 1 for users + # 1 for root ancestor for each project + let(:threshold) { 7 } it_behaves_like 'an efficient database result' end diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb index bc4ac3b7335..3bff9555834 100644 --- a/spec/requests/sessions_spec.rb +++ b/spec/requests/sessions_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe 'Sessions', feature_category: :system_access do + include SessionHelpers + context 'authentication', :allow_forgery_protection do let(:user) { create(:user) } @@ -14,4 +16,48 @@ RSpec.describe 'Sessions', feature_category: :system_access do expect(response).to redirect_to(new_user_session_path) end end + + describe 'about_gitlab_active_user' do + before do + allow(::Gitlab).to receive(:com?).and_return(true) + end + + let(:user) { create(:user) } + + context 'when user signs in' do + it 'sets marketing cookie' do + post user_session_path(user: { login: user.username, password: user.password }) + expect(response.cookies['about_gitlab_active_user']).to be_present + end + end + + context 'when user uses remember_me' do + it 'sets marketing cookie' do + post user_session_path(user: { login: user.username, password: user.password, remember_me: true }) + expect(response.cookies['about_gitlab_active_user']).to be_present + end + end + + context 'when user signs out' do + before do + post user_session_path(user: { login: user.username, password: user.password }) + end + + it 'deletes marketing cookie' do + post(destroy_user_session_path) + expect(response.cookies['about_gitlab_active_user']).to be_nil + end + end + + context 'when user is not using GitLab SaaS' do + before do + allow(::Gitlab).to receive(:com?).and_return(false) + end + + it 'does not set marketing cookie' do + post user_session_path(user: { login: user.username, password: user.password }) + expect(response.cookies['about_gitlab_active_user']).to be_nil + end + end + end end diff --git a/spec/requests/time_tracking/timelogs_controller_spec.rb b/spec/requests/time_tracking/timelogs_controller_spec.rb new file mode 100644 index 00000000000..68eecf9b137 --- /dev/null +++ b/spec/requests/time_tracking/timelogs_controller_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe TimeTracking::TimelogsController, feature_category: :team_planning do + let_it_be(:user) { create(:user) } + + describe 'GET #index' do + subject { get timelogs_path } + + context 'when user is not logged in' do + it 'responds with a redirect to the login page' do + subject + + expect(response).to have_gitlab_http_status(:redirect) + end + end + + context 'when user is logged in' do + before do + sign_in(user) + end + + context 'when global_time_tracking_report FF is enabled' do + it 'responds with the global time tracking page', :aggregate_failures do + subject + + expect(response).to have_gitlab_http_status(:ok) + expect(response).to render_template(:index) + end + end + + context 'when global_time_tracking_report FF is disable' do + before do + stub_feature_flags(global_time_tracking_report: false) + end + + it 'returns a 404 page' do + subject + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end +end diff --git a/spec/requests/users/pins_spec.rb b/spec/requests/users/pins_spec.rb new file mode 100644 index 00000000000..9a32d7e9d76 --- /dev/null +++ b/spec/requests/users/pins_spec.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Pinning navigation menu items', feature_category: :navigation do + let(:user) { create(:user) } + let(:menu_item_ids) { %w[item4 item7] } + let(:other_panel_data) { { 'group' => ['some_item_id'] } } + + before do + user.update!(pinned_nav_items: other_panel_data) + sign_in(user) + end + + describe 'PUT /-/users/pins' do + before do + put pins_path, params: params, headers: { 'ACCEPT' => 'application/json' } + end + + context 'with valid params' do + let(:panel) { 'project' } + let(:params) { { menu_item_ids: menu_item_ids, panel: panel } } + + it 'saves the menu_item_ids for the correct panel' do + expect(user.pinned_nav_items).to include(panel => menu_item_ids) + end + + it 'does not change menu_item_ids of other panels' do + expect(user.pinned_nav_items).to include(other_panel_data) + end + + it 'responds OK' do + expect(response).to have_gitlab_http_status(:ok) + end + end + + context 'with invalid params' do + shared_examples 'unchanged data and error response' do + it 'does not modify existing panel data' do + expect(user.reload.pinned_nav_items).to eq(other_panel_data) + end + + it 'responds with error' do + expect(response).to have_gitlab_http_status(:bad_request) + end + end + + context 'when panel name is unknown' do + let(:params) { { menu_item_ids: menu_item_ids, panel: 'something_else' } } + + it_behaves_like 'unchanged data and error response' + end + + context 'when menu_item_ids is not array of strings' do + let(:params) { { menu_item_ids: 'not_an_array', panel: 'project' } } + + it_behaves_like 'unchanged data and error response' + end + + context 'when params are not permitted' do + let(:params) { { random_param: 'random_value' } } + + it_behaves_like 'unchanged data and error response' + end + end + end +end diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb index 664fc7dde7a..b1fafffb253 100644 --- a/spec/routing/project_routing_spec.rb +++ b/spec/routing/project_routing_spec.rb @@ -897,7 +897,7 @@ RSpec.describe 'project routing' do end describe Projects::MetricsDashboardController, 'routing' do - it 'routes to #show with no dashboard_path and no page' do + it 'routes to #show with no dashboard_path' do expect(get: "/gitlab/gitlabhq/-/metrics").to route_to( "projects/metrics_dashboard#show", **base_params @@ -912,19 +912,17 @@ RSpec.describe 'project routing' do ) end - it 'routes to #show with only page' do + it 'routes to #show' do expect(get: "/gitlab/gitlabhq/-/metrics/panel/new").to route_to( "projects/metrics_dashboard#show", - page: 'panel/new', **base_params ) end - it 'routes to #show with dashboard_path and page' do + it 'routes to #show with dashboard_path' do expect(get: "/gitlab/gitlabhq/-/metrics/config%2Fprometheus%2Fcommon_metrics.yml/panel/new").to route_to( "projects/metrics_dashboard#show", dashboard_path: 'config/prometheus/common_metrics.yml', - page: 'panel/new', **base_params ) end diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb index a6a072e2caf..032cc12ab94 100644 --- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb +++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb @@ -78,30 +78,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do end RUBY end - - context 'for migrations before 2021_09_10_00_00_00' do - it 'when limit: attribute is used (which is not supported yet for this version): registers an offense' do - allow(cop).to receive(:version).and_return(described_class::TEXT_LIMIT_ATTRIBUTE_ALLOWED_SINCE - 5) - - expect_offense(<<~RUBY) - class TestTextLimits < ActiveRecord::Migration[6.0] - def up - create_table :test_text_limit_attribute do |t| - t.integer :test_id, null: false - t.text :name, limit: 100 - ^^^^ Text columns should always have a limit set (255 is suggested). Using limit: is not supported in this version. You can add a limit to a `text` column by using `add_text_limit` or `.text_limit` inside `create_table` - end - - create_table_with_constraints :test_text_limit_attribute do |t| - t.integer :test_id, null: false - t.text :name, limit: 100 - ^^^^ Text columns should always have a limit set (255 is suggested). Using limit: is not supported in this version. You can add a limit to a `text` column by using `add_text_limit` or `.text_limit` inside `create_table` - end - end - end - RUBY - end - end end context 'when text array columns are defined without a limit' do diff --git a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb b/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb index 0d2fd029a13..e5287f7105e 100644 --- a/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb +++ b/spec/rubocop/cop/rspec/invalid_feature_category_spec.rb @@ -17,7 +17,7 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t it 'flags invalid feature category in nested context' do expect_offense(<<~RUBY, valid: valid_category, invalid: invalid_category) - RSpec.describe 'foo', feature_category: :%{valid} do + RSpec.describe 'foo', feature_category: :"%{valid}" do context 'bar', foo: :bar, feature_category: :%{invalid} do ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples. end @@ -27,7 +27,7 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t it 'flags invalid feature category in examples' do expect_offense(<<~RUBY, valid: valid_category, invalid: invalid_category) - RSpec.describe 'foo', feature_category: :%{valid} do + RSpec.describe 'foo', feature_category: :"%{valid}" do it 'bar', feature_category: :%{invalid} do ^^{invalid} Please use a valid feature category. See https://docs.gitlab.com/ee/development/feature_categorization/#rspec-examples. end @@ -37,9 +37,9 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t it 'does not flag if feature category is valid' do expect_no_offenses(<<~RUBY) - RSpec.describe 'foo', feature_category: :#{valid_category} do - context 'bar', feature_category: :#{valid_category} do - it 'baz', feature_category: :#{valid_category} do + RSpec.describe 'foo', feature_category: :"#{valid_category}" do + context 'bar', feature_category: :"#{valid_category}" do + it 'baz', feature_category: :"#{valid_category}" do end end end @@ -50,8 +50,8 @@ RSpec.describe RuboCop::Cop::RSpec::InvalidFeatureCategory, feature_category: :t mistyped = make_typo(valid_category) expect_offense(<<~RUBY, invalid: mistyped, valid: valid_category) - RSpec.describe 'foo', feature_category: :%{invalid} do - ^^{invalid} Please use a valid feature category. Did you mean `:%{valid}`? See [...] + RSpec.describe 'foo', feature_category: :"%{invalid}" do + ^^^^{invalid} Please use a valid feature category. Did you mean `:%{valid}`? See [...] end RUBY end diff --git a/spec/rubocop/cop/rspec/misspelled_aggregate_failures_spec.rb b/spec/rubocop/cop/rspec/misspelled_aggregate_failures_spec.rb new file mode 100644 index 00000000000..c551c03b896 --- /dev/null +++ b/spec/rubocop/cop/rspec/misspelled_aggregate_failures_spec.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +require 'rubocop_spec_helper' +require 'rspec-parameterized' + +require_relative '../../../../rubocop/cop/rspec/misspelled_aggregate_failures' + +RSpec.describe RuboCop::Cop::RSpec::MisspelledAggregateFailures, feature_category: :shared do + shared_examples 'misspelled tag' do |misspelled| + it 'flags and auto-corrects misspelled tags in describe' do + expect_offense(<<~'RUBY', misspelled: misspelled) + RSpec.describe 'a feature', :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + describe 'inner', :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + end + RUBY + + expect_correction(<<~'RUBY') + RSpec.describe 'a feature', :aggregate_failures do + describe 'inner', :aggregate_failures do + end + end + RUBY + end + + it 'flags and auto-corrects misspelled tags in context' do + expect_offense(<<~'RUBY', misspelled: misspelled) + context 'a feature', :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + RUBY + + expect_correction(<<~'RUBY') + context 'a feature', :aggregate_failures do + end + RUBY + end + + it 'flags and auto-corrects misspelled tags in examples' do + expect_offense(<<~'RUBY', misspelled: misspelled) + it 'aggregates', :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + + specify :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + + it :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + RUBY + + expect_correction(<<~'RUBY') + it 'aggregates', :aggregate_failures do + end + + specify :aggregate_failures do + end + + it :aggregate_failures do + end + RUBY + end + + it 'flags and auto-corrects misspelled tags in any order' do + expect_offense(<<~'RUBY', misspelled: misspelled) + it 'aggregates', :foo, :%{misspelled} do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + + it 'aggregates', :%{misspelled}, :bar do + ^^{misspelled} Use `:aggregate_failures` to aggregate failures. + end + RUBY + + expect_correction(<<~'RUBY') + it 'aggregates', :foo, :aggregate_failures do + end + + it 'aggregates', :aggregate_failures, :bar do + end + RUBY + end + end + + shared_examples 'legit tag' do |legit_tag| + it 'does not flag' do + expect_no_offenses(<<~RUBY) + RSpec.describe 'a feature', :#{legit_tag} do + end + + it 'is ok', :#{legit_tag} do + end + RUBY + end + end + + context 'with misspelled tags' do + where(:tag) do + # From https://gitlab.com/gitlab-org/gitlab/-/issues/396356#list + %w[ + aggregate_errors + aggregate_failure + aggregated_failures + aggregate_results + aggregated_errors + aggregates_failures + aggregate_failues + + aggregate_bar + aggregate_foo + ] + end + + with_them do + it_behaves_like 'misspelled tag', params[:tag] + end + end + + context 'with legit tags' do + where(:tag) do + %w[ + aggregate + aggregations + aggregate_two_underscores + ] + end + + with_them do + it_behaves_like 'legit tag', params[:tag] + end + end +end diff --git a/spec/rubocop/cop/rspec/shared_groups_metadata_spec.rb b/spec/rubocop/cop/rspec/shared_groups_metadata_spec.rb new file mode 100644 index 00000000000..3dd568e7dcd --- /dev/null +++ b/spec/rubocop/cop/rspec/shared_groups_metadata_spec.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +require 'rubocop_spec_helper' +require 'rspec-parameterized' + +require_relative '../../../../rubocop/cop/rspec/shared_groups_metadata' + +RSpec.describe RuboCop::Cop::RSpec::SharedGroupsMetadata, feature_category: :tooling do + context 'with hash metadata' do + it 'flags metadata in shared example' do + expect_offense(<<~RUBY) + RSpec.shared_examples 'foo', feature_category: :shared do + ^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + + shared_examples 'foo', feature_category: :shared do + ^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + RUBY + end + + it 'flags metadata in shared context' do + expect_offense(<<~RUBY) + RSpec.shared_context 'foo', feature_category: :shared do + ^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + + shared_context 'foo', feature_category: :shared do + ^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + RUBY + end + end + + context 'with symbol metadata' do + it 'flags metadata in shared example' do + expect_offense(<<~RUBY) + RSpec.shared_examples 'foo', :aggregate_failures do + ^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + + shared_examples 'foo', :aggregate_failures do + ^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + RUBY + end + + it 'flags metadata in shared context' do + expect_offense(<<~RUBY) + RSpec.shared_context 'foo', :aggregate_failures do + ^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + + shared_context 'foo', :aggregate_failures do + ^^^^^^^^^^^^^^^^^^^ Avoid using metadata on shared examples and shared context. They might cause flaky tests. See https://gitlab.com/gitlab-org/gitlab/-/issues/404388 + end + RUBY + end + end + + it 'does not flag if feature category is missing' do + expect_no_offenses(<<~RUBY) + RSpec.shared_examples 'foo' do + end + + shared_examples 'foo' do + end + RUBY + end +end diff --git a/spec/rubocop/cop/search/namespaced_class_spec.rb b/spec/rubocop/cop/search/namespaced_class_spec.rb new file mode 100644 index 00000000000..6e10909389e --- /dev/null +++ b/spec/rubocop/cop/search/namespaced_class_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +require 'rubocop_spec_helper' +require_relative '../../../../rubocop/cop/search/namespaced_class' + +RSpec.describe RuboCop::Cop::Search::NamespacedClass, feature_category: :global_search do + %w[Search Zoekt Elastic].each do |keyword| + context 'when Search root namespace is not used' do + it 'flags a class definition without Search namespace' do + expect_offense(<<~'SOURCE', keyword: keyword, msg: described_class::MSG) + class My%{keyword}Class + ^^^{keyword}^^^^^ %{msg} + end + SOURCE + + expect_offense(<<~'SOURCE', keyword: keyword, msg: described_class::MSG) + class %{keyword}::MyClass < ApplicationRecord + ^{keyword}^^^^^^^^^ %{msg} + def some_method + true + end + end + SOURCE + + expect_offense(<<~'SOURCE', keyword: keyword, msg: described_class::MSG) + class MyClass < %{keyword}::Class + ^^^^^^^ %{msg} + def some_method + true + end + end + SOURCE + end + + it "flags a class definition with #{keyword} in root namespace module" do + expect_offense(<<~'SOURCE', keyword: keyword, msg: described_class::MSG) + module %{keyword}Module + class MyClass < ApplicationRecord + ^^^^^^^ %{msg} + def some_method + true + end + end + end + SOURCE + end + + it 'flags a module in EE module' do + expect_offense(<<~'SOURCE', keyword: keyword, msg: described_class::MSG) + module EE + module %{keyword}Controller + ^{keyword}^^^^^^^^^^ %{msg} + def some_method + true + end + end + end + SOURCE + end + end + + context 'when Search root namespace is used' do + it 'does not flag a class definition with Search as root namespace module' do + expect_no_offenses(<<~SOURCE, keyword: keyword) + module Search + class %{keyword}::MyClass < ApplicationRecord + def some_method + true + end + end + end + SOURCE + end + + it 'does not a flag a class definition with Search as root namespace inline' do + expect_no_offenses(<<~SOURCE, keyword: keyword) + class Search::%{keyword}::MyClass < ApplicationRecord + def some_method + true + end + end + SOURCE + end + + it 'does not a flag a class definition with Search as root namespace in EE' do + expect_no_offenses(<<~SOURCE, keyword: keyword) + module EE + module Search + class %{keyword}::MyClass < ApplicationRecord + def some_method + true + end + end + end + end + SOURCE + end + end + end +end diff --git a/spec/rubocop/cop/sidekiq_load_balancing/worker_data_consistency_spec.rb b/spec/rubocop/cop/sidekiq_load_balancing/worker_data_consistency_spec.rb index 7b6578a0744..f41a441d6a6 100644 --- a/spec/rubocop/cop/sidekiq_load_balancing/worker_data_consistency_spec.rb +++ b/spec/rubocop/cop/sidekiq_load_balancing/worker_data_consistency_spec.rb @@ -3,46 +3,95 @@ require 'rubocop_spec_helper' require_relative '../../../../rubocop/cop/sidekiq_load_balancing/worker_data_consistency' -RSpec.describe RuboCop::Cop::SidekiqLoadBalancing::WorkerDataConsistency do - before do - allow(cop) - .to receive(:in_worker?) - .and_return(true) - end +RSpec.describe RuboCop::Cop::SidekiqLoadBalancing::WorkerDataConsistency, feature_category: :scalability do + context 'when data_consistency is not set' do + it 'adds an offense when not defining data_consistency' do + expect_offense(<<~CODE) + class SomeWorker + ^^^^^^^^^^^^^^^^ Should define data_consistency expectation.[...] + include ApplicationWorker - it 'adds an offense when not defining data_consistency' do - expect_offense(<<~CODE) - class SomeWorker - ^^^^^^^^^^^^^^^^ Should define data_consistency expectation.[...] - include ApplicationWorker - - queue_namespace :pipeline_hooks - feature_category :continuous_integration - urgency :high - end - CODE - end + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end + + it 'adds no offense when defining data_consistency' do + expect_no_offenses(<<~CODE) + class SomeWorker + include ApplicationWorker - it 'adds no offense when defining data_consistency' do - expect_no_offenses(<<~CODE) - class SomeWorker - include ApplicationWorker - - queue_namespace :pipeline_hooks - feature_category :continuous_integration - data_consistency :delayed - urgency :high - end - CODE + queue_namespace :pipeline_hooks + feature_category :continuous_integration + data_consistency :delayed + urgency :high + end + CODE + end + + it 'adds no offense when worker is not an ApplicationWorker' do + expect_no_offenses(<<~CODE) + class SomeWorker + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end end - it 'adds no offense when worker is not an ApplicationWorker' do - expect_no_offenses(<<~CODE) - class SomeWorker - queue_namespace :pipeline_hooks - feature_category :continuous_integration - urgency :high - end - CODE + context 'when data_consistency set to :always' do + it 'adds an offense when using `always` data_consistency' do + expect_offense(<<~CODE) + class SomeWorker + include ApplicationWorker + data_consistency :always + ^^^^^^^ Refrain from using `:always` if possible.[...] + + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end + + it 'adds no offense when using `sticky` data_consistency' do + expect_no_offenses(<<~CODE) + class SomeWorker + include ApplicationWorker + + data_consistency :sticky + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end + + it 'adds no offense when using `delayed` data_consistency' do + expect_no_offenses(<<~CODE) + class SomeWorker + include ApplicationWorker + + data_consistency :delayed + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end + + it 'adds no offense when worker is not an ApplicationWorker' do + expect_no_offenses(<<~CODE) + class SomeWorker + data_consistency :always + queue_namespace :pipeline_hooks + feature_category :continuous_integration + urgency :high + end + CODE + end end end diff --git a/spec/scripts/create_pipeline_failure_incident_spec.rb b/spec/scripts/create_pipeline_failure_incident_spec.rb deleted file mode 100644 index 8549cec1b12..00000000000 --- a/spec/scripts/create_pipeline_failure_incident_spec.rb +++ /dev/null @@ -1,120 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' -require_relative '../../scripts/create-pipeline-failure-incident' -require_relative '../support/helpers/stub_env' - -RSpec.describe CreatePipelineFailureIncident, feature_category: :tooling do - include StubENV - - describe '#execute' do - let(:create_issue) { instance_double(CreateIssue) } - let(:issue) { double('Issue', iid: 1) } # rubocop:disable RSpec/VerifiedDoubles - let(:create_issue_discussion) { instance_double(CreateIssueDiscussion, execute: true) } - let(:failed_jobs) { instance_double(PipelineFailedJobs, execute: []) } - - let(:options) do - { - project: 1234, - api_token: 'asdf1234' - } - end - - let(:issue_params) do - { - issue_type: 'incident', - title: title, - description: description, - labels: incident_labels - } - end - - subject { described_class.new(options).execute } - - before do - stub_env( - 'CI_COMMIT_SHA' => 'bfcd2b9b5cad0b889494ce830697392c8ca11257', - 'CI_PROJECT_PATH' => 'gitlab.com/gitlab-org/gitlab', - 'CI_PROJECT_NAME' => 'gitlab', - 'GITLAB_USER_ID' => '1111', - 'CI_PROJECT_ID' => '13083', - 'CI_PIPELINE_ID' => '1234567', - 'CI_PIPELINE_URL' => 'https://gitlab.com/gitlab-org/gitlab/-/pipelines/1234567', - 'CI_PROJECT_URL' => 'https://gitlab.com/gitlab-org/gitlab', - 'CI_PIPELINE_CREATED_AT' => '2023-01-24 00:00:00', - 'CI_COMMIT_TITLE' => 'Commit title', - 'CI_PIPELINE_SOURCE' => 'push', - 'GITLAB_USER_NAME' => 'Foo User', - 'PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE' => 'asdf1234', - 'CI_SERVER_URL' => 'https://gitlab.com', - 'GITLAB_USER_LOGIN' => 'foo' - ) - end - - shared_examples 'creating an issue' do - it 'successfully creates an issue' do - allow(PipelineFailedJobs).to receive(:new) - .with(API::DEFAULT_OPTIONS.merge(exclude_allowed_to_fail_jobs: true)) - .and_return(failed_jobs) - - expect(CreateIssue).to receive(:new) - .with(project: options[:project], api_token: options[:api_token]) - .and_return(create_issue) - - expect(CreateIssueDiscussion).to receive(:new) - .with(project: options[:project], api_token: options[:api_token]) - .and_return(create_issue_discussion).twice - - expect(create_issue).to receive(:execute) - .with(issue_params).and_return(issue) - - expect(subject).to eq(issue) - end - end - - context 'when stable branch' do - let(:incident_labels) { ['release-blocker'] } - let(:title) { /broken `15-6-stable-ee`/ } - let(:description) { /A broken stable branch prevents patch releases/ } - - let(:commit_merge_request) do - { - 'author' => { - 'id' => '2' - }, - 'title' => 'foo', - 'web_url' => 'https://gitlab.com/test' - } - end - - let(:merge_request) { instance_double(CommitMergeRequests, execute: [commit_merge_request]) } - let(:issue_params) { super().merge(assignee_ids: [1111, 2]) } - - before do - stub_env( - 'CI_COMMIT_REF_NAME' => '15-6-stable-ee' - ) - - allow(CommitMergeRequests).to receive(:new) - .with(API::DEFAULT_OPTIONS.merge(sha: ENV['CI_COMMIT_SHA'])) - .and_return(merge_request) - end - - it_behaves_like 'creating an issue' - end - - context 'when other branch' do - let(:incident_labels) { ['Engineering Productivity', 'master-broken::undetermined', 'master:broken'] } - let(:title) { /broken `master`/ } - let(:description) { /Follow the \[Broken `master` handbook guide\]/ } - - before do - stub_env( - 'CI_COMMIT_REF_NAME' => 'master' - ) - end - - it_behaves_like 'creating an issue' - end - end -end diff --git a/spec/scripts/failed_tests_spec.rb b/spec/scripts/failed_tests_spec.rb index ce0ec66cdb6..c9fe6eecd11 100644 --- a/spec/scripts/failed_tests_spec.rb +++ b/spec/scripts/failed_tests_spec.rb @@ -13,7 +13,7 @@ RSpec.describe FailedTests do 'suites' => [ { 'failed_count' => 1, - 'name' => 'rspec unit pg12 10/12', + 'name' => 'rspec unit pg13 10/12', 'test_cases' => [ { 'status' => 'failed', @@ -23,7 +23,7 @@ RSpec.describe FailedTests do }, { 'failed_count' => 1, - 'name' => 'rspec-ee unit pg12', + 'name' => 'rspec-ee unit pg13', 'test_cases' => [ { 'status' => 'failed', @@ -33,7 +33,7 @@ RSpec.describe FailedTests do }, { 'failed_count' => 1, - 'name' => 'rspec unit pg13 10/12', + 'name' => 'rspec unit pg14 10/12', 'test_cases' => [ { 'status' => 'failed', diff --git a/spec/scripts/generate_rspec_pipeline_spec.rb b/spec/scripts/generate_rspec_pipeline_spec.rb index b3eaf9e9127..91b5739cf63 100644 --- a/spec/scripts/generate_rspec_pipeline_spec.rb +++ b/spec/scripts/generate_rspec_pipeline_spec.rb @@ -13,42 +13,49 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin "spec/lib/gitlab/background_migration/a_spec.rb spec/lib/gitlab/background_migration/b_spec.rb " \ "spec/models/a_spec.rb spec/models/b_spec.rb " \ "spec/controllers/a_spec.rb spec/controllers/b_spec.rb " \ - "spec/features/a_spec.rb spec/features/b_spec.rb" + "spec/features/a_spec.rb spec/features/b_spec.rb " \ + "ee/spec/features/a_spec.rb" end let(:pipeline_template) { Tempfile.new(['pipeline_template', '.yml.erb']) } let(:pipeline_template_content) do <<~YAML - <% if rspec_files_per_test_level[:migration][:files].size > 0 %> + <% if test_suite_prefix.nil? && rspec_files_per_test_level[:migration][:files].size > 0 %> rspec migration: <% if rspec_files_per_test_level[:migration][:parallelization] > 1 %> parallel: <%= rspec_files_per_test_level[:migration][:parallelization] %> <% end %> <% end %> - <% if rspec_files_per_test_level[:background_migration][:files].size > 0 %> + <% if test_suite_prefix.nil? && rspec_files_per_test_level[:background_migration][:files].size > 0 %> rspec background_migration: <% if rspec_files_per_test_level[:background_migration][:parallelization] > 1 %> parallel: <%= rspec_files_per_test_level[:background_migration][:parallelization] %> <% end %> <% end %> - <% if rspec_files_per_test_level[:unit][:files].size > 0 %> + <% if test_suite_prefix.nil? && rspec_files_per_test_level[:unit][:files].size > 0 %> rspec unit: <% if rspec_files_per_test_level[:unit][:parallelization] > 1 %> parallel: <%= rspec_files_per_test_level[:unit][:parallelization] %> <% end %> <% end %> - <% if rspec_files_per_test_level[:integration][:files].size > 0 %> + <% if test_suite_prefix.nil? && rspec_files_per_test_level[:integration][:files].size > 0 %> rspec integration: <% if rspec_files_per_test_level[:integration][:parallelization] > 1 %> parallel: <%= rspec_files_per_test_level[:integration][:parallelization] %> <% end %> <% end %> - <% if rspec_files_per_test_level[:system][:files].size > 0 %> + <% if test_suite_prefix.nil? && rspec_files_per_test_level[:system][:files].size > 0 %> rspec system: <% if rspec_files_per_test_level[:system][:parallelization] > 1 %> parallel: <%= rspec_files_per_test_level[:system][:parallelization] %> <% end %> <% end %> + <% if test_suite_prefix == 'ee/' && rspec_files_per_test_level[:system][:files].size > 0 %> + rspec-ee system: + <% if rspec_files_per_test_level[:system][:parallelization] > 1 %> + parallel: <%= rspec_files_per_test_level[:system][:parallelization] %> + <% end %> + <% end %> YAML end @@ -65,7 +72,8 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin "spec/controllers/a_spec.rb": 60.2, "spec/controllers/ab_spec.rb": 180.4, "spec/features/a_spec.rb": 360.1, - "spec/features/b_spec.rb": 180.5 + "spec/features/b_spec.rb": 180.5, + "ee/spec/features/a_spec.rb": 180.5 } JSON end @@ -177,6 +185,53 @@ RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :toolin end end + context 'when test_suite_prefix is given' do + subject do + described_class.new( + rspec_files_path: rspec_files.path, + pipeline_template_path: pipeline_template.path, + knapsack_report_path: knapsack_report.path, + test_suite_prefix: 'ee/' + ) + end + + it 'generates the pipeline config based on the test_suite_prefix' do + subject.generate! + + expect(File.read("#{pipeline_template.path}.yml")) + .to eq("rspec-ee system:") + end + end + + context 'when generated_pipeline_path is given' do + let(:custom_pipeline_filename) { Tempfile.new(['custom_pipeline_filename', '.yml']) } + + around do |example| + example.run + ensure + custom_pipeline_filename.close + custom_pipeline_filename.unlink + end + + subject do + described_class.new( + rspec_files_path: rspec_files.path, + pipeline_template_path: pipeline_template.path, + generated_pipeline_path: custom_pipeline_filename.path + ) + end + + it 'writes the pipeline config in the given generated_pipeline_path' do + subject.generate! + + expect(File.read(custom_pipeline_filename.path)) + .to eq( + "rspec migration:\nrspec background_migration:\nrspec unit:\n" \ + "rspec integration:\nrspec system:" + ) + end + end + context 'when rspec_files does not exist' do subject { described_class.new(rspec_files_path: nil, pipeline_template_path: pipeline_template.path) } diff --git a/spec/scripts/pipeline/create_test_failure_issues_spec.rb b/spec/scripts/pipeline/create_test_failure_issues_spec.rb index fa27727542e..2a5910f5238 100644 --- a/spec/scripts/pipeline/create_test_failure_issues_spec.rb +++ b/spec/scripts/pipeline/create_test_failure_issues_spec.rb @@ -3,22 +3,28 @@ # rubocop:disable RSpec/VerifiedDoubles require 'fast_spec_helper' +require 'active_support/testing/time_helpers' require 'rspec-parameterized' require_relative '../../../scripts/pipeline/create_test_failure_issues' RSpec.describe CreateTestFailureIssues, feature_category: :tooling do describe CreateTestFailureIssue do + include ActiveSupport::Testing::TimeHelpers + + let(:server_host) { 'example.com' } + let(:project_path) { 'group/project' } + let(:env) do { - 'CI_JOB_URL' => 'ci_job_url', - 'CI_PIPELINE_URL' => 'ci_pipeline_url' + 'CI_SERVER_HOST' => server_host, + 'CI_PROJECT_PATH' => project_path, + 'CI_PIPELINE_URL' => "https://#{server_host}/#{project_path}/-/pipelines/1234" } end - let(:project) { 'group/project' } let(:api_token) { 'api_token' } - let(:creator) { described_class.new(project: project, api_token: api_token) } + let(:creator) { described_class.new(project: project_path, api_token: api_token) } let(:test_name) { 'The test description' } let(:test_file) { 'spec/path/to/file_spec.rb' } let(:test_file_content) do @@ -36,7 +42,7 @@ RSpec.describe CreateTestFailureIssues, feature_category: :tooling do { 'name' => test_name, 'file' => test_file, - 'job_url' => 'job_url' + 'job_url' => "https://#{server_host}/#{project_path}/-/jobs/5678" } end @@ -57,87 +63,124 @@ RSpec.describe CreateTestFailureIssues, feature_category: :tooling do } end + let(:test_hash) { Digest::SHA256.hexdigest(failed_test['file'] + failed_test['name'])[0...12] } + let(:latest_format_issue_title) { "#{failed_test['file']} [test-hash:#{test_hash}]" } + let(:latest_format_issue_description) do + <<~DESCRIPTION + ### Test description + + `#{failed_test['name']}` + + ### Test file path + + [`#{failed_test['file']}`](https://#{server_host}/#{project_path}/-/blob/master/#{failed_test['file']}) + + + ### Reports (1) + + #{failed_test_report_line} + DESCRIPTION + end + + around do |example| + freeze_time { example.run } + end + before do stub_env(env) + allow(creator).to receive(:puts) end - describe '#find' do - let(:expected_payload) do + describe '#upsert' do + let(:expected_search_payload) do { - state: 'opened', - search: "#{failed_test['file']} - ID: #{Digest::SHA256.hexdigest(failed_test['name'])[0...12]}" + state: :opened, + search: test_hash, + in: :title, + per_page: 1 } end let(:find_issue_stub) { double('FindIssues') } - let(:issue_stub) { double(title: expected_payload[:title], web_url: 'issue_web_url') } + let(:issue_stub) { double('Issue', title: latest_format_issue_title, web_url: 'issue_web_url') } - before do - allow(creator).to receive(:puts) + let(:failed_test_report_line) do + "1. #{Time.new.utc.strftime('%F')}: #{failed_test['job_url']} (#{env['CI_PIPELINE_URL']})" end - it 'calls FindIssues#execute(payload)' do - expect(FindIssues).to receive(:new).with(project: project, api_token: api_token).and_return(find_issue_stub) - expect(find_issue_stub).to receive(:execute).with(expected_payload).and_return([issue_stub]) + before do + allow(File).to receive(:open).and_call_original + allow(File).to receive(:open).with(File.expand_path(File.join('..', '..', '..', test_file), __dir__)) + .and_return(test_file_stub) + + allow(FindIssues).to receive(:new).with(project: project_path, api_token: api_token).and_return(find_issue_stub) - creator.find(failed_test) + allow(creator).to receive(:categories_mapping).and_return(categories_mapping) + allow(creator).to receive(:groups_mapping).and_return(groups_mapping) end context 'when no issues are found' do - it 'calls FindIssues#execute(payload)' do - expect(FindIssues).to receive(:new).with(project: project, api_token: api_token).and_return(find_issue_stub) - expect(find_issue_stub).to receive(:execute).with(expected_payload).and_return([]) - - creator.find(failed_test) + let(:create_issue_stub) { double('CreateIssue') } + let(:expected_create_payload) do + { + title: latest_format_issue_title, + description: latest_format_issue_description, + labels: described_class::DEFAULT_LABELS.map { |label| "wip-#{label}" } + [ + "wip-#{categories_mapping['source_code_management']['label']}", + "wip-#{groups_mapping['source_code']['label']}" + ], + weight: 1 + } end - end - end - - describe '#create' do - let(:expected_description) do - <<~DESCRIPTION - ### Full description - - `#{failed_test['name']}` - ### File path - - `#{failed_test['file']}` + before do + allow(find_issue_stub).to receive(:execute).with(expected_search_payload).and_return([]) + end - - ### Reports + it 'calls CreateIssue#execute(payload)' do + expect(CreateIssue).to receive(:new).with(project: project_path, api_token: api_token) + .and_return(create_issue_stub) + expect(create_issue_stub).to receive(:execute).with(expected_create_payload).and_return(issue_stub) - - #{failed_test['job_url']} (#{env['CI_PIPELINE_URL']}) - DESCRIPTION + creator.upsert(failed_test) + end end - let(:expected_payload) do - { - title: "#{failed_test['file']} - ID: #{Digest::SHA256.hexdigest(failed_test['name'])[0...12]}", - description: expected_description, - labels: described_class::DEFAULT_LABELS.map { |label| "wip-#{label}" } + [ - "wip-#{categories_mapping['source_code_management']['label']}", "wip-#{groups_mapping['source_code']['label']}" # rubocop:disable Layout/LineLength - ] - } - end + context 'when issues are found' do + let(:issue_stub) do + double('Issue', iid: 42, title: issue_title, description: issue_description, web_url: 'issue_web_url') + end - let(:create_issue_stub) { double('CreateIssue') } - let(:issue_stub) { double(title: expected_payload[:title], web_url: 'issue_web_url') } + before do + allow(find_issue_stub).to receive(:execute).with(expected_search_payload).and_return([issue_stub]) + end - before do - allow(creator).to receive(:puts) - allow(File).to receive(:open).and_call_original - allow(File).to receive(:open).with(File.expand_path(File.join('..', '..', '..', test_file), __dir__)) - .and_return(test_file_stub) - allow(creator).to receive(:categories_mapping).and_return(categories_mapping) - allow(creator).to receive(:groups_mapping).and_return(groups_mapping) - end + # This shared example can be useful if we want to test migration to a new format in the future + shared_examples 'existing issue update' do + let(:update_issue_stub) { double('UpdateIssue') } + let(:expected_update_payload) do + { + description: latest_format_issue_description.sub(/^### Reports.*$/, '### Reports (2)') + + "\n#{failed_test_report_line}", + weight: 2 + } + end + + it 'calls UpdateIssue#execute(payload)' do + expect(UpdateIssue).to receive(:new).with(project: project_path, api_token: api_token) + .and_return(update_issue_stub) + expect(update_issue_stub).to receive(:execute).with(42, **expected_update_payload) + + creator.upsert(failed_test) + end + end - it 'calls CreateIssue#execute(payload)' do - expect(CreateIssue).to receive(:new).with(project: project, api_token: api_token).and_return(create_issue_stub) - expect(create_issue_stub).to receive(:execute).with(expected_payload).and_return(issue_stub) + context 'when issue already has the latest format' do + let(:issue_description) { latest_format_issue_description } + let(:issue_title) { latest_format_issue_title } - creator.create(failed_test) # rubocop:disable Rails/SaveBang + it_behaves_like 'existing issue update' + end end end end diff --git a/spec/scripts/review_apps/automated_cleanup_spec.rb b/spec/scripts/review_apps/automated_cleanup_spec.rb index 546bf55a934..a8b8353d2ef 100644 --- a/spec/scripts/review_apps/automated_cleanup_spec.rb +++ b/spec/scripts/review_apps/automated_cleanup_spec.rb @@ -30,10 +30,7 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do allow(Tooling::Helm3Client).to receive(:new).and_return(helm_client) allow(Tooling::KubernetesClient).to receive(:new).and_return(kubernetes_client) - allow(kubernetes_client).to receive(:cleanup_by_created_at) - allow(kubernetes_client).to receive(:cleanup_by_release) - allow(kubernetes_client).to receive(:cleanup_review_app_namespaces) - allow(kubernetes_client).to receive(:delete_namespaces_by_exact_names) + allow(kubernetes_client).to receive(:cleanup_namespaces_by_created_at) end shared_examples 'the days argument is an integer in the correct range' do @@ -78,28 +75,50 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do end end - describe '#perform_stale_pvc_cleanup!' do - subject { instance.perform_stale_pvc_cleanup!(days: days) } + describe '.parse_args' do + subject { described_class.parse_args(argv) } - let(:days) { 2 } + context 'when no arguments are provided' do + let(:argv) { %w[] } - it_behaves_like 'the days argument is an integer in the correct range' + it 'returns the default options' do + expect(subject).to eq(dry_run: false) + end + end - it 'performs Kubernetes cleanup by created at' do - expect(kubernetes_client).to receive(:cleanup_by_created_at).with( - resource_type: 'pvc', - created_before: two_days_ago, - wait: false - ) + describe '--dry-run' do + context 'when no DRY_RUN variable is provided' do + let(:argv) { ['--dry-run='] } - subject - end + # This is the default behavior of OptionParser. + # We should always pass an environment variable with a value, or not pass the flag at all. + it 'raises an error' do + expect { subject }.to raise_error(OptionParser::InvalidArgument, 'invalid argument: --dry-run=') + end + end - context 'when the dry-run flag is true' do - let(:dry_run) { true } + context 'when the DRY_RUN variable is not set to true' do + let(:argv) { %w[--dry-run=false] } - it 'does not delete anything' do - expect(kubernetes_client).not_to receive(:cleanup_by_created_at) + it 'returns the default options' do + expect(subject).to eq(dry_run: false) + end + end + + context 'when the DRY_RUN variable is set to true' do + let(:argv) { %w[--dry-run=true] } + + it 'returns the correct dry_run value' do + expect(subject).to eq(dry_run: true) + end + end + + context 'when the short version of the flag is used' do + let(:argv) { %w[-d true] } + + it 'returns the correct dry_run value' do + expect(subject).to eq(dry_run: true) + end end end end @@ -112,10 +131,7 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do it_behaves_like 'the days argument is an integer in the correct range' it 'performs Kubernetes cleanup for review apps namespaces' do - expect(kubernetes_client).to receive(:cleanup_review_app_namespaces).with( - created_before: two_days_ago, - wait: false - ) + expect(kubernetes_client).to receive(:cleanup_namespaces_by_created_at).with(created_before: two_days_ago) subject end @@ -124,7 +140,7 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do let(:dry_run) { true } it 'does not delete anything' do - expect(kubernetes_client).not_to receive(:cleanup_review_app_namespaces) + expect(kubernetes_client).not_to receive(:cleanup_namespaces_by_created_at) end end end @@ -147,8 +163,7 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do before do allow(helm_client).to receive(:delete) - allow(kubernetes_client).to receive(:cleanup_by_release) - allow(kubernetes_client).to receive(:delete_namespaces_by_exact_names) + allow(kubernetes_client).to receive(:delete_namespaces) end it 'deletes the helm release' do @@ -157,16 +172,8 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do subject end - it 'empties the k8s resources in the k8s namespace for the release' do - expect(kubernetes_client).to receive(:cleanup_by_release).with(release_name: releases_names, wait: false) - - subject - end - it 'deletes the associated k8s namespace' do - expect(kubernetes_client).to receive(:delete_namespaces_by_exact_names).with( - resource_names: releases_names, wait: false - ) + expect(kubernetes_client).to receive(:delete_namespaces).with(releases_names) subject end @@ -179,14 +186,8 @@ RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do subject end - it 'does not empty the k8s resources in the k8s namespace for the release' do - expect(kubernetes_client).not_to receive(:cleanup_by_release) - - subject - end - it 'does not delete the associated k8s namespace' do - expect(kubernetes_client).not_to receive(:delete_namespaces_by_exact_names) + expect(kubernetes_client).not_to receive(:delete_namespaces) subject end diff --git a/spec/serializers/admin/abuse_report_entity_spec.rb b/spec/serializers/admin/abuse_report_entity_spec.rb index 7d18310977c..760c12d3cf9 100644 --- a/spec/serializers/admin/abuse_report_entity_spec.rb +++ b/spec/serializers/admin/abuse_report_entity_spec.rb @@ -3,30 +3,90 @@ require "spec_helper" RSpec.describe Admin::AbuseReportEntity, feature_category: :insider_threat do - let_it_be(:abuse_report) { build_stubbed(:abuse_report) } + include Gitlab::Routing + + let(:abuse_report) { build_stubbed(:abuse_report) } let(:entity) do described_class.new(abuse_report) end + before do + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:markdown_field).with(abuse_report, :message).and_return(abuse_report.message) + end + end + describe '#as_json' do subject(:entity_hash) { entity.as_json } it 'exposes correct attributes' do expect(entity_hash.keys).to include( :category, + :created_at, :updated_at, :reported_user, - :reporter + :reporter, + :reported_user_path, + :reporter_path, + :user_blocked, + :block_user_path, + :remove_report_path, + :remove_user_and_report_path, + :message ) end it 'correctly exposes `reported user`' do - expect(entity_hash[:reported_user].keys).to match_array([:name]) + expect(entity_hash[:reported_user].keys).to match_array([:name, :created_at]) end it 'correctly exposes `reporter`' do expect(entity_hash[:reporter].keys).to match_array([:name]) end + + it 'correctly exposes :reported_user_path' do + expect(entity_hash[:reported_user_path]).to eq user_path(abuse_report.user) + end + + it 'correctly exposes :reporter_path' do + expect(entity_hash[:reporter_path]).to eq user_path(abuse_report.reporter) + end + + describe 'user_blocked' do + subject(:user_blocked) { entity_hash[:user_blocked] } + + context 'when user is blocked' do + before do + allow(abuse_report.user).to receive(:blocked?).and_return(true) + end + + it { is_expected.to be true } + end + + context 'when user is not blocked' do + before do + allow(abuse_report.user).to receive(:blocked?).and_return(false) + end + + it { is_expected.to be false } + end + end + + it 'correctly exposes :block_user_path' do + expect(entity_hash[:block_user_path]).to eq block_admin_user_path(abuse_report.user) + end + + it 'correctly exposes :remove_report_path' do + expect(entity_hash[:remove_report_path]).to eq admin_abuse_report_path(abuse_report) + end + + it 'correctly exposes :remove_user_and_report_path' do + expect(entity_hash[:remove_user_and_report_path]).to eq admin_abuse_report_path(abuse_report, remove_user: true) + end + + it 'correctly exposes :message' do + expect(entity_hash[:message]).to eq(abuse_report.message) + end end end diff --git a/spec/serializers/admin/abuse_report_serializer_spec.rb b/spec/serializers/admin/abuse_report_serializer_spec.rb index 5b9c229584b..a56ef8816b1 100644 --- a/spec/serializers/admin/abuse_report_serializer_spec.rb +++ b/spec/serializers/admin/abuse_report_serializer_spec.rb @@ -3,17 +3,17 @@ require "spec_helper" RSpec.describe Admin::AbuseReportSerializer, feature_category: :insider_threat do - let(:resource) { build(:abuse_report) } + let_it_be(:resource) { build_stubbed(:abuse_report) } subject { described_class.new.represent(resource) } describe '#represent' do it 'serializes an abuse report' do - expect(subject[:id]).to eq resource.id + expect(subject[:updated_at]).to eq resource.updated_at end context 'when multiple objects are being serialized' do - let(:resource) { build_list(:abuse_report, 2) } + let_it_be(:resource) { create_list(:abuse_report, 2) } # rubocop:disable RSpec/FactoryBot/AvoidCreate it 'serializers the array of abuse reports' do expect(subject).not_to be_empty diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb index ea3826f903a..86eaf160b38 100644 --- a/spec/serializers/build_details_entity_spec.rb +++ b/spec/serializers/build_details_entity_spec.rb @@ -17,9 +17,7 @@ RSpec.describe BuildDetailsEntity do let(:request) { double('request', project: project) } let(:entity) do - described_class.new(build, request: request, - current_user: user, - project: project) + described_class.new(build, request: request, current_user: user, project: project) end subject { entity.as_json } @@ -69,9 +67,7 @@ RSpec.describe BuildDetailsEntity do end let(:merge_request) do - create(:merge_request, source_project: forked_project, - target_project: project, - source_branch: build.ref) + create(:merge_request, source_project: forked_project, target_project: project, source_branch: build.ref) end it 'contains the needed key value pairs' do diff --git a/spec/serializers/ci/downloadable_artifact_entity_spec.rb b/spec/serializers/ci/downloadable_artifact_entity_spec.rb index 3142b03581d..66a975e54ab 100644 --- a/spec/serializers/ci/downloadable_artifact_entity_spec.rb +++ b/spec/serializers/ci/downloadable_artifact_entity_spec.rb @@ -18,8 +18,7 @@ RSpec.describe Ci::DownloadableArtifactEntity do context 'when user cannot read job artifact' do let!(:build) do - create(:ci_build, :success, :private_artifacts, - pipeline: pipeline) + create(:ci_build, :success, :private_artifacts, pipeline: pipeline) end it 'returns only artifacts readable by user', :aggregate_failures do diff --git a/spec/serializers/ci/job_entity_spec.rb b/spec/serializers/ci/job_entity_spec.rb index 174d9a0aadb..6dce87a1fc5 100644 --- a/spec/serializers/ci/job_entity_spec.rb +++ b/spec/serializers/ci/job_entity_spec.rb @@ -97,8 +97,7 @@ RSpec.describe Ci::JobEntity do before do project.add_developer(user) - create(:protected_branch, :developers_can_merge, - name: job.ref, project: job.project) + create(:protected_branch, :developers_can_merge, name: job.ref, project: job.project) end it 'contains path to play action' do @@ -114,8 +113,7 @@ RSpec.describe Ci::JobEntity do before do allow(job.project).to receive(:empty_repo?).and_return(false) - create(:protected_branch, :no_one_can_push, - name: job.ref, project: job.project) + create(:protected_branch, :no_one_can_push, name: job.ref, project: job.project) end it 'does not contain path to play action' do diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb index 4df542e3c98..7f232a08622 100644 --- a/spec/serializers/ci/pipeline_entity_spec.rb +++ b/spec/serializers/ci/pipeline_entity_spec.rb @@ -43,10 +43,10 @@ RSpec.describe Ci::PipelineEntity do end it 'contains flags' do - expect(subject).to include :flags - expect(subject[:flags]) - .to include :stuck, :auto_devops, :yaml_errors, - :retryable, :cancelable, :merge_request + expect(subject).to include(:flags) + expect(subject[:flags]).to include( + :stuck, :auto_devops, :yaml_errors, :retryable, :cancelable, :merge_request + ) end end diff --git a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb index 7ea72351594..7df6413f416 100644 --- a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb +++ b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb @@ -29,6 +29,7 @@ RSpec.describe DeployKeys::BasicDeployKeyEntity do destroyed_when_orphaned: true, almost_orphaned: false, created_at: deploy_key.created_at, + expires_at: deploy_key.expires_at, updated_at: deploy_key.updated_at, can_edit: false } diff --git a/spec/serializers/deploy_keys/deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb index 4302ed3a097..837e30e1343 100644 --- a/spec/serializers/deploy_keys/deploy_key_entity_spec.rb +++ b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb @@ -29,6 +29,7 @@ RSpec.describe DeployKeys::DeployKeyEntity do destroyed_when_orphaned: true, almost_orphaned: false, created_at: deploy_key.created_at, + expires_at: deploy_key.expires_at, updated_at: deploy_key.updated_at, can_edit: false, deploy_keys_projects: [ diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb index fbb45162136..5eee9c34e1e 100644 --- a/spec/serializers/diff_file_entity_spec.rb +++ b/spec/serializers/diff_file_entity_spec.rb @@ -84,8 +84,8 @@ RSpec.describe DiffFileEntity do let(:options) { { conflicts: {} } } it 'calls diff_lines_for_serializer on diff_file' do - # #diff_lines_for_serializer gets called in #fully_expanded? as well so we expect twice - expect(diff_file).to receive(:diff_lines_for_serializer).twice.and_return([]) + # #diff_lines_for_serializer gets called in #fully_expanded? and whitespace_only as well so we expect three calls + expect(diff_file).to receive(:diff_lines_for_serializer).exactly(3).times.and_return([]) expect(subject[:highlighted_diff_lines]).to eq([]) end end diff --git a/spec/serializers/diff_viewer_entity_spec.rb b/spec/serializers/diff_viewer_entity_spec.rb index 53601fcff61..84d2bdceb78 100644 --- a/spec/serializers/diff_viewer_entity_spec.rb +++ b/spec/serializers/diff_viewer_entity_spec.rb @@ -12,10 +12,51 @@ RSpec.describe DiffViewerEntity do let(:diff) { commit.raw_diffs.first } let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs: diff_refs, repository: repository) } let(:viewer) { diff_file.simple_viewer } + let(:options) { {} } - subject { described_class.new(viewer).as_json } + subject { described_class.new(viewer).as_json(options) } - it 'serializes diff file viewer' do - expect(subject.with_indifferent_access).to match_schema('entities/diff_viewer') + context 'when add_ignore_all_white_spaces is enabled' do + before do + stub_feature_flags(add_ignore_all_white_spaces: true) + end + + it 'serializes diff file viewer' do + expect(subject.with_indifferent_access).to match_schema('entities/diff_viewer') + end + + it 'contains whitespace_only attribute' do + expect(subject.with_indifferent_access).to include(:whitespace_only) + end + + context 'when whitespace_only option is true' do + let(:options) { { whitespace_only: true } } + + it 'returns the whitespace_only attribute true' do + expect(subject.with_indifferent_access[:whitespace_only]).to eq true + end + end + + context 'when whitespace_only option is false' do + let(:options) { { whitespace_only: false } } + + it 'returns the whitespace_only attribute false' do + expect(subject.with_indifferent_access[:whitespace_only]).to eq false + end + end + end + + context 'when add_ignore_all_white_spaces is disabled ' do + before do + stub_feature_flags(add_ignore_all_white_spaces: false) + end + + it 'serializes diff file viewer' do + expect(subject.with_indifferent_access).to match_schema('entities/diff_viewer') + end + + it 'does not contain whitespace_only attribute' do + expect(subject.with_indifferent_access).not_to include(:whitespace_only) + end end end diff --git a/spec/serializers/discussion_diff_file_entity_spec.rb b/spec/serializers/discussion_diff_file_entity_spec.rb index 05438450d78..33c3ebc506f 100644 --- a/spec/serializers/discussion_diff_file_entity_spec.rb +++ b/spec/serializers/discussion_diff_file_entity_spec.rb @@ -32,8 +32,7 @@ RSpec.describe DiscussionDiffFileEntity do end it 'exposes no diff lines' do - expect(subject).not_to include(:highlighted_diff_lines, - :parallel_diff_lines) + expect(subject).not_to include(:highlighted_diff_lines, :parallel_diff_lines) end end end diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb index cbe32600941..d5c3f64f52d 100644 --- a/spec/serializers/environment_entity_spec.rb +++ b/spec/serializers/environment_entity_spec.rb @@ -109,11 +109,13 @@ RSpec.describe EnvironmentEntity do context 'when deployment platform is a cluster' do before do - create(:cluster, - :provided_by_gcp, - :project, - environment_scope: '*', - projects: [project]) + create( + :cluster, + :provided_by_gcp, + :project, + environment_scope: '*', + projects: [project] + ) end it 'includes cluster_type' do diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb index 01d1e47b5bb..c85727a08d8 100644 --- a/spec/serializers/environment_serializer_spec.rb +++ b/spec/serializers/environment_serializer_spec.rb @@ -262,8 +262,9 @@ RSpec.describe EnvironmentSerializer do def create_environment_with_associations(project) create(:environment, project: project).tap do |environment| create(:ci_pipeline, project: project).tap do |pipeline| - create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'stop-action', - environment: environment.name) + create( + :ci_build, :manual, project: project, pipeline: pipeline, name: 'stop-action', environment: environment.name + ) create(:ci_build, :scheduled, project: project, pipeline: pipeline, environment: environment.name).tap do |scheduled_build| diff --git a/spec/serializers/group_child_entity_spec.rb b/spec/serializers/group_child_entity_spec.rb index 469189c0768..5af704a42da 100644 --- a/spec/serializers/group_child_entity_spec.rb +++ b/spec/serializers/group_child_entity_spec.rb @@ -43,8 +43,7 @@ RSpec.describe GroupChildEntity do describe 'for a project' do let(:object) do - create(:project, :with_avatar, - description: 'Awesomeness') + create(:project, :with_avatar, description: 'Awesomeness') end before do @@ -73,8 +72,7 @@ RSpec.describe GroupChildEntity do describe 'for a group' do let(:description) { 'Awesomeness' } let(:object) do - create(:group, :nested, :with_avatar, - description: description) + create(:group, :nested, :with_avatar, description: description) end before do @@ -171,8 +169,7 @@ RSpec.describe GroupChildEntity do describe 'for a project with external authorization enabled' do let(:object) do - create(:project, :with_avatar, - description: 'Awesomeness') + create(:project, :with_avatar, description: 'Awesomeness') end before do diff --git a/spec/serializers/group_deploy_key_entity_spec.rb b/spec/serializers/group_deploy_key_entity_spec.rb index e6cef2f10b3..c502923db6a 100644 --- a/spec/serializers/group_deploy_key_entity_spec.rb +++ b/spec/serializers/group_deploy_key_entity_spec.rb @@ -25,6 +25,7 @@ RSpec.describe GroupDeployKeyEntity do fingerprint: group_deploy_key.fingerprint, fingerprint_sha256: group_deploy_key.fingerprint_sha256, created_at: group_deploy_key.created_at, + expires_at: group_deploy_key.expires_at, updated_at: group_deploy_key.updated_at, can_edit: false, group_deploy_keys_groups: [ diff --git a/spec/serializers/import/bulk_import_entity_spec.rb b/spec/serializers/import/bulk_import_entity_spec.rb index 3dfc659daf7..f2f8854174a 100644 --- a/spec/serializers/import/bulk_import_entity_spec.rb +++ b/spec/serializers/import/bulk_import_entity_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Import::BulkImportEntity do +RSpec.describe Import::BulkImportEntity, feature_category: :importers do let(:importable_data) do { 'id' => 1, diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb index 75aee7f04f0..1a9749ab323 100644 --- a/spec/serializers/issue_board_entity_spec.rb +++ b/spec/serializers/issue_board_entity_spec.rb @@ -16,13 +16,17 @@ RSpec.describe IssueBoardEntity do subject { described_class.new(resource, request: request).as_json } it 'has basic attributes' do - expect(subject).to include(:id, :iid, :title, :confidential, :due_date, :project_id, :relative_position, - :labels, :assignees, project: hash_including(:id, :path, :path_with_namespace)) + expect(subject).to include( + :id, :iid, :title, :confidential, :due_date, :project_id, :relative_position, + :labels, :assignees, project: hash_including(:id, :path, :path_with_namespace) + ) end it 'has path and endpoints' do - expect(subject).to include(:reference_path, :real_path, :issue_sidebar_endpoint, - :toggle_subscription_endpoint, :assignable_labels_endpoint) + expect(subject).to include( + :reference_path, :real_path, :issue_sidebar_endpoint, + :toggle_subscription_endpoint, :assignable_labels_endpoint + ) end it 'has milestone attributes' do diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb index 795cc357a67..5b0eeaad84a 100644 --- a/spec/serializers/issue_entity_spec.rb +++ b/spec/serializers/issue_entity_spec.rb @@ -31,8 +31,10 @@ RSpec.describe IssueEntity do end it 'has Issuable attributes' do - expect(subject).to include(:id, :iid, :author_id, :description, :lock_version, :milestone_id, - :title, :updated_by_id, :created_at, :updated_at, :milestone, :labels) + expect(subject).to include( + :id, :iid, :author_id, :description, :lock_version, :milestone_id, + :title, :updated_by_id, :created_at, :updated_at, :milestone, :labels + ) end it 'has time estimation attributes' do @@ -41,8 +43,9 @@ RSpec.describe IssueEntity do describe 'current_user' do it 'has the exprected permissions' do - expect(subject[:current_user]).to include(:can_create_note, :can_update, :can_set_issue_metadata, - :can_award_emoji) + expect(subject[:current_user]).to include( + :can_create_note, :can_update, :can_set_issue_metadata, :can_award_emoji + ) end end diff --git a/spec/serializers/issue_sidebar_basic_entity_spec.rb b/spec/serializers/issue_sidebar_basic_entity_spec.rb index 64a271e359a..f24e379ec67 100644 --- a/spec/serializers/issue_sidebar_basic_entity_spec.rb +++ b/spec/serializers/issue_sidebar_basic_entity_spec.rb @@ -44,7 +44,10 @@ RSpec.describe IssueSidebarBasicEntity do context 'for an incident issue' do before do - issue.update!(issue_type: Issue.issue_types[:incident]) + issue.update!( + issue_type: Issue.issue_types[:incident], + work_item_type: WorkItems::Type.default_by_type(:incident) + ) end it 'is present and true' do diff --git a/spec/serializers/merge_request_metrics_helper_spec.rb b/spec/serializers/merge_request_metrics_helper_spec.rb index ec764bf7853..4aba7ff5e9c 100644 --- a/spec/serializers/merge_request_metrics_helper_spec.rb +++ b/spec/serializers/merge_request_metrics_helper_spec.rb @@ -55,12 +55,12 @@ RSpec.describe MergeRequestMetricsHelper do closed_event = merge_request.closed_event merge_event = merge_request.merge_event - expect(MergeRequest::Metrics).to receive(:new) - .with(latest_closed_at: closed_event&.updated_at, - latest_closed_by: closed_event&.author, - merged_at: merge_event&.updated_at, - merged_by: merge_event&.author) - .and_call_original + expect(MergeRequest::Metrics).to receive(:new).with( + latest_closed_at: closed_event&.updated_at, + latest_closed_by: closed_event&.author, + merged_at: merge_event&.updated_at, + merged_by: merge_event&.author + ).and_call_original subject end diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb index f883156628a..458d9ecd916 100644 --- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb +++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe MergeRequestPollCachedWidgetEntity do +RSpec.describe MergeRequestPollCachedWidgetEntity, feature_category: :code_review_workflow do using RSpec::Parameterized::TableSyntax let_it_be(:project, refind: true) { create :project, :repository } @@ -49,8 +49,9 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do describe 'diverged_commits_count' do context 'when MR open and its diverging' do it 'returns diverged commits count' do - allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true, - diverged_commits_count: 10) + allow(resource).to receive_messages( + open?: true, diverged_from_target_branch?: true, diverged_commits_count: 10 + ) expect(subject[:diverged_commits_count]).to eq(10) end @@ -330,4 +331,39 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do end end end + + describe 'favicon overlay path' do + context 'when merged' do + before do + resource.mark_as_merged! + resource.metrics.update!(merged_by: user) + end + + it 'returns merged favicon overlay' do + expect(subject[:favicon_overlay_path]).to match_asset_path('/assets/mr_favicons/favicon_status_merged.png') + end + + context 'with pipeline' do + let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) } + + it 'returns merged favicon overlay' do + expect(subject[:favicon_overlay_path]).to match_asset_path('/assets/mr_favicons/favicon_status_merged.png') + end + end + end + + context 'when not merged' do + it 'returns no favicon overlay' do + expect(subject[:favicon_overlay_path]).to be_nil + end + + context 'with pipeline' do + let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) } + + it 'returns pipeline favicon overlay' do + expect(subject[:favicon_overlay_path]).to match_asset_path('/assets/ci_favicons/favicon_status_pending.png') + end + end + end + end end diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb index 418f629a301..726f35418a1 100644 --- a/spec/serializers/merge_request_poll_widget_entity_spec.rb +++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb @@ -62,9 +62,7 @@ RSpec.describe MergeRequestPollWidgetEntity do context 'when head pipeline is running' do before do - create(:ci_pipeline, :running, project: project, - ref: resource.source_branch, - sha: resource.diff_head_sha) + create(:ci_pipeline, :running, project: project, ref: resource.source_branch, sha: resource.diff_head_sha) resource.update_head_pipeline end @@ -96,9 +94,7 @@ RSpec.describe MergeRequestPollWidgetEntity do context 'when head pipeline is finished' do before do - create(:ci_pipeline, :success, project: project, - ref: resource.source_branch, - sha: resource.diff_head_sha) + create(:ci_pipeline, :success, project: project, ref: resource.source_branch, sha: resource.diff_head_sha) resource.update_head_pipeline end diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb index b4cc0b4db36..71b088e4e0d 100644 --- a/spec/serializers/pipeline_details_entity_spec.rb +++ b/spec/serializers/pipeline_details_entity_spec.rb @@ -37,10 +37,8 @@ RSpec.describe PipelineDetailsEntity, feature_category: :continuous_integration end it 'contains flags' do - expect(subject).to include :flags - expect(subject[:flags]) - .to include :latest, :stuck, - :yaml_errors, :retryable, :cancelable + expect(subject).to include(:flags) + expect(subject[:flags]).to include(:latest, :stuck, :yaml_errors, :retryable, :cancelable) end end diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb index 33fee68a2f2..d1c74bd5ec0 100644 --- a/spec/serializers/pipeline_serializer_spec.rb +++ b/spec/serializers/pipeline_serializer_spec.rb @@ -99,21 +99,25 @@ RSpec.describe PipelineSerializer do let(:resource) { Ci::Pipeline.all } let!(:merge_request_1) do - create(:merge_request, - :with_detached_merge_request_pipeline, - target_project: project, - target_branch: 'master', - source_project: project, - source_branch: 'feature') + create( + :merge_request, + :with_detached_merge_request_pipeline, + target_project: project, + target_branch: 'master', + source_project: project, + source_branch: 'feature' + ) end let!(:merge_request_2) do - create(:merge_request, - :with_detached_merge_request_pipeline, - target_project: project, - target_branch: 'master', - source_project: project, - source_branch: '2-mb-file') + create( + :merge_request, + :with_detached_merge_request_pipeline, + target_project: project, + target_branch: 'master', + source_project: project, + source_branch: '2-mb-file' + ) end before_all do @@ -235,11 +239,13 @@ RSpec.describe PipelineSerializer do end def create_pipeline(status) - create(:ci_empty_pipeline, - project: project, - status: status, - name: 'Build pipeline', - ref: 'feature').tap do |pipeline| + create( + :ci_empty_pipeline, + project: project, + status: status, + name: 'Build pipeline', + ref: 'feature' + ).tap do |pipeline| Ci::Build::AVAILABLE_STATUSES.each do |build_status| create_build(pipeline, status, build_status) end @@ -247,9 +253,11 @@ RSpec.describe PipelineSerializer do end def create_build(pipeline, stage, status) - create(:ci_build, :tags, :triggered, :artifacts, - pipeline: pipeline, stage: stage, - name: stage, status: status, ref: pipeline.ref) + create( + :ci_build, :tags, :triggered, :artifacts, + pipeline: pipeline, stage: stage, + name: stage, status: status, ref: pipeline.ref + ) end end end diff --git a/spec/services/achievements/award_service_spec.rb b/spec/services/achievements/award_service_spec.rb index fb45a634ddd..c70c1d5c22d 100644 --- a/spec/services/achievements/award_service_spec.rb +++ b/spec/services/achievements/award_service_spec.rb @@ -32,8 +32,15 @@ RSpec.describe Achievements::AwardService, feature_category: :user_profile do context 'when user has permission' do let(:current_user) { maintainer } + let(:notification_service) { instance_double(NotificationService) } + let(:mail_message) { instance_double(ActionMailer::MessageDelivery) } + + it 'creates an achievement and sends an e-mail' do + allow(NotificationService).to receive(:new).and_return(notification_service) + expect(notification_service).to receive(:new_achievement_email).with(recipient, achievement) + .and_return(mail_message) + expect(mail_message).to receive(:deliver_later) - it 'creates an achievement' do expect(response).to be_success end diff --git a/spec/services/achievements/destroy_service_spec.rb b/spec/services/achievements/destroy_service_spec.rb new file mode 100644 index 00000000000..7af10ceec6a --- /dev/null +++ b/spec/services/achievements/destroy_service_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Achievements::DestroyService, feature_category: :user_profile do + describe '#execute' do + let_it_be(:developer) { create(:user) } + let_it_be(:maintainer) { create(:user) } + let_it_be(:group) { create(:group) } + + let(:achievement) { create(:achievement, namespace: group) } + + subject(:response) { described_class.new(current_user, achievement).execute } + + before_all do + group.add_developer(developer) + group.add_maintainer(maintainer) + end + + context 'when user does not have permission' do + let(:current_user) { developer } + + it 'returns an error' do + expect(response).to be_error + expect(response.message).to match_array( + ['You have insufficient permissions to delete this achievement']) + end + end + + context 'when user has permission' do + let(:current_user) { maintainer } + + it 'deletes the achievement' do + expect(response).to be_success + expect(Achievements::Achievement.find_by(id: achievement.id)).to be_nil + end + end + end +end diff --git a/spec/services/achievements/update_service_spec.rb b/spec/services/achievements/update_service_spec.rb new file mode 100644 index 00000000000..6168d60450b --- /dev/null +++ b/spec/services/achievements/update_service_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Achievements::UpdateService, feature_category: :user_profile do + describe '#execute' do + let_it_be(:user) { create(:user) } + + let(:params) { attributes_for(:achievement, namespace: group) } + + subject(:response) { described_class.new(user, group, params).execute } + + context 'when user does not have permission' do + let_it_be(:group) { create(:group) } + let_it_be(:achievement) { create(:achievement, namespace: group) } + + before_all do + group.add_developer(user) + end + + it 'returns an error' do + expect(response).to be_error + expect(response.message).to match_array( + ['You have insufficient permission to update this achievement']) + end + end + + context 'when user has permission' do + let_it_be(:group) { create(:group) } + let_it_be(:achievement) { create(:achievement, namespace: group) } + + before_all do + group.add_maintainer(user) + end + + it 'updates an achievement' do + expect(response).to be_success + end + + it 'returns an error when the achievement cannot be updated' do + params[:name] = nil + + expect(response).to be_error + expect(response.message).to include("Name can't be blank") + end + end + end +end diff --git a/spec/services/boards/issues/list_service_spec.rb b/spec/services/boards/issues/list_service_spec.rb index 5e10d1d216c..4b31a041342 100644 --- a/spec/services/boards/issues/list_service_spec.rb +++ b/spec/services/boards/issues/list_service_spec.rb @@ -57,7 +57,15 @@ RSpec.describe Boards::Issues::ListService, feature_category: :team_planning do end context 'when filtering' do - let_it_be(:incident) { create(:labeled_issue, project: project, milestone: m1, labels: [development, p1], issue_type: 'incident') } + let_it_be(:incident) do + create( + :labeled_issue, + :incident, + project: project, + milestone: m1, + labels: [development, p1] + ) + end context 'when filtering by type' do it 'only returns the specified type' do diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb index 7f892cfe722..ff4afd6abd0 100644 --- a/spec/services/bulk_imports/create_service_spec.rb +++ b/spec/services/bulk_imports/create_service_spec.rb @@ -35,6 +35,9 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do ] end + let(:source_entity_identifier) { ERB::Util.url_encode(params[0][:source_full_path]) } + let(:source_entity_type) { BulkImports::CreateService::ENTITY_TYPES_MAPPING.fetch(params[0][:source_type]) } + subject { described_class.new(user, params, credentials) } describe '#execute' do @@ -59,6 +62,34 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do end end + context 'when direct transfer setting query returns a 404' do + it 'raises a ServiceResponse::Error' do + stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404) + stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token') + .to_return( + status: 200, + body: source_version.to_json, + headers: { 'Content-Type' => 'application/json' } + ) + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token") + .to_return(status: 404) + + expect_next_instance_of(BulkImports::Clients::HTTP) do |client| + expect(client).to receive(:get).and_raise(BulkImports::Error.setting_not_enabled) + end + + result = subject.execute + + expect(result).to be_a(ServiceResponse) + expect(result).to be_error + expect(result.message) + .to eq( + "Group import disabled on source or destination instance. " \ + "Ask an administrator to enable it on both instances and try again." + ) + end + end + context 'when required scopes are not present' do it 'returns ServiceResponse with error if token does not have api scope' do stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404) @@ -68,9 +99,13 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do body: source_version.to_json, headers: { 'Content-Type' => 'application/json' } ) + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token") + .to_return( + status: 200 + ) allow_next_instance_of(BulkImports::Clients::HTTP) do |client| - allow(client).to receive(:validate_instance_version!).and_raise(BulkImports::Error.scope_validation_failure) + allow(client).to receive(:validate_import_scopes!).and_raise(BulkImports::Error.scope_validation_failure) end result = subject.execute @@ -79,8 +114,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect(result).to be_error expect(result.message) .to eq( - "Import aborted as the provided personal access token does not have the required 'api' scope or is " \ - "no longer valid." + "Personal access token does not " \ + "have the required 'api' scope or is no longer valid." ) end end @@ -90,16 +125,21 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404) stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token') .to_return(status: 200, body: source_version.to_json, headers: { 'Content-Type' => 'application/json' }) + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token") + .to_return( + status: 200 + ) stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token') .to_return( status: 200, body: { 'scopes' => ['api'] }.to_json, headers: { 'Content-Type' => 'application/json' } ) + + parent_group.add_owner(user) end it 'creates bulk import' do - parent_group.add_owner(user) expect { subject.execute }.to change { BulkImport.count }.by(1) last_bulk_import = BulkImport.last @@ -111,7 +151,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect_snowplow_event( category: 'BulkImports::CreateService', action: 'create', - label: 'bulk_import_group' + label: 'bulk_import_group', + extra: { source_equals_destination: false } ) expect_snowplow_event( @@ -123,6 +164,23 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do ) end + context 'on the same instance' do + before do + allow(Settings.gitlab).to receive(:base_url).and_return('http://gitlab.example') + end + + it 'tracks the same instance migration' do + expect { subject.execute }.to change { BulkImport.count }.by(1) + + expect_snowplow_event( + category: 'BulkImports::CreateService', + action: 'create', + label: 'bulk_import_group', + extra: { source_equals_destination: true } + ) + end + end + describe 'projects migration flag' do let(:import) { BulkImport.last } @@ -169,11 +227,16 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do allow_next_instance_of(BulkImports::Clients::HTTP) do |instance| allow(instance).to receive(:instance_version).and_return(source_version) allow(instance).to receive(:instance_enterprise).and_return(false) + stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token") + .to_return( + status: 200 + ) end + + parent_group.add_owner(user) end it 'creates bulk import' do - parent_group.add_owner(user) expect { subject.execute }.to change { BulkImport.count }.by(1) last_bulk_import = BulkImport.last @@ -186,7 +249,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect_snowplow_event( category: 'BulkImports::CreateService', action: 'create', - label: 'bulk_import_group' + label: 'bulk_import_group', + extra: { source_equals_destination: false } ) expect_snowplow_event( @@ -198,6 +262,23 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do ) end + context 'on the same instance' do + before do + allow(Settings.gitlab).to receive(:base_url).and_return('http://gitlab.example') + end + + it 'tracks the same instance migration' do + expect { subject.execute }.to change { BulkImport.count }.by(1) + + expect_snowplow_event( + category: 'BulkImports::CreateService', + action: 'create', + label: 'bulk_import_group', + extra: { source_equals_destination: true } + ) + end + end + it 'creates bulk import entities' do expect { subject.execute }.to change { BulkImports::Entity.count }.by(3) end @@ -227,11 +308,10 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect(result).to be_a(ServiceResponse) expect(result).to be_error expect(result.message).to eq("Validation failed: Source full path can't be blank, " \ - "Source full path cannot start with a non-alphanumeric character except " \ - "for periods or underscores, can contain only alphanumeric characters, " \ - "forward slashes, periods, and underscores, cannot end with " \ - "a period or forward slash, and has a relative path structure " \ - "with no http protocol chars or leading or trailing forward slashes") + "Source full path must have a relative path structure with " \ + "no HTTP protocol characters, or leading or trailing forward slashes. " \ + "Path segments must not start or end with a special character, and " \ + "must not contain consecutive special characters.") end describe '#user-role' do @@ -263,6 +343,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do end it 'defines access_level as not a member' do + parent_group.members.delete_all + subject.execute expect_snowplow_event( category: 'BulkImports::CreateService', @@ -325,7 +407,210 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do end end - describe '.validate_destination_full_path' do + describe '#validate_setting_enabled!' do + let(:entity_source_id) { 'gid://gitlab/Model/12345' } + let(:graphql_client) { instance_double(BulkImports::Clients::Graphql) } + let(:http_client) { instance_double(BulkImports::Clients::HTTP) } + let(:http_response) { double(code: 200, success?: true) } # rubocop:disable RSpec/VerifiedDoubles + + before do + allow(BulkImports::Clients::HTTP).to receive(:new).and_return(http_client) + allow(BulkImports::Clients::Graphql).to receive(:new).and_return(graphql_client) + + allow(http_client).to receive(:instance_version).and_return(status: 200) + allow(http_client).to receive(:instance_enterprise).and_return(false) + allow(http_client).to receive(:validate_instance_version!).and_return(source_version) + allow(http_client).to receive(:validate_import_scopes!).and_return(true) + end + + context 'when the source_type is a group' do + context 'when the source_full_path contains only integer characters' do + let(:query_string) { BulkImports::Groups::Graphql::GetGroupQuery.new(context: nil).to_s } + let(:graphql_response) do + double(original_hash: { 'data' => { 'group' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles + end + + let(:params) do + [ + { + source_type: 'group_entity', + source_full_path: '67890', + destination_slug: 'destination-group-1', + destination_namespace: 'destination1' + } + ] + end + + before do + allow(graphql_client).to receive(:parse).with(query_string) + allow(graphql_client).to receive(:execute).and_return(graphql_response) + + allow(http_client).to receive(:get) + .with("/groups/12345/export_relations/status") + .and_return(http_response) + + stub_request(:get, "http://gitlab.example/api/v4/groups/12345/export_relations/status?page=1&per_page=30&private_token=token") + .to_return(status: 200, body: "", headers: {}) + end + + it 'makes a graphql request using the group full path and an http request with the correct id' do + expect(graphql_client).to receive(:parse).with(query_string) + expect(graphql_client).to receive(:execute).and_return(graphql_response) + + expect(http_client).to receive(:get).with("/groups/12345/export_relations/status") + + subject.execute + end + end + end + + context 'when the source_type is a project' do + context 'when the source_full_path contains only integer characters' do + let(:query_string) { BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil).to_s } + let(:graphql_response) do + double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles + end + + let(:params) do + [ + { + source_type: 'project_entity', + source_full_path: '67890', + destination_slug: 'destination-group-1', + destination_namespace: 'destination1' + } + ] + end + + before do + allow(graphql_client).to receive(:parse).with(query_string) + allow(graphql_client).to receive(:execute).and_return(graphql_response) + + allow(http_client).to receive(:get) + .with("/projects/12345/export_relations/status") + .and_return(http_response) + + stub_request(:get, "http://gitlab.example/api/v4/projects/12345/export_relations/status?page=1&per_page=30&private_token=token") + .to_return(status: 200, body: "", headers: {}) + end + + it 'makes a graphql request using the group full path and an http request with the correct id' do + expect(graphql_client).to receive(:parse).with(query_string) + expect(graphql_client).to receive(:execute).and_return(graphql_response) + + expect(http_client).to receive(:get).with("/projects/12345/export_relations/status") + + subject.execute + end + end + end + end + + describe '#validate_destination_namespace' do + context 'when the destination_namespace does not exist' do + let(:params) do + [ + { + source_type: 'group_entity', + source_full_path: 'full/path/to/source', + destination_slug: 'destination-slug', + destination_namespace: 'destination-namespace', + migrate_projects: migrate_projects + } + ] + end + + it 'returns ServiceResponse with an error message' do + result = subject.execute + + expect(result).to be_a(ServiceResponse) + expect(result).to be_error + expect(result.message) + .to eq("Import failed. Destination 'destination-namespace' is invalid, or you don't have permission.") + end + end + + context 'when the user does not have permission to create subgroups' do + let(:params) do + [ + { + source_type: 'group_entity', + source_full_path: 'full/path/to/source', + destination_slug: 'destination-slug', + destination_namespace: parent_group.path, + migrate_projects: migrate_projects + } + ] + end + + it 'returns ServiceResponse with an error message' do + parent_group.members.delete_all + + result = subject.execute + + expect(result).to be_a(ServiceResponse) + expect(result).to be_error + expect(result.message) + .to eq("Import failed. Destination '#{parent_group.path}' is invalid, or you don't have permission.") + end + end + + context 'when the user does not have permission to create projects' do + let(:params) do + [ + { + source_type: 'project_entity', + source_full_path: 'full/path/to/source', + destination_slug: 'destination-slug', + destination_namespace: parent_group.path, + migrate_projects: migrate_projects + } + ] + end + + it 'returns ServiceResponse with an error message' do + parent_group.members.delete_all + + result = subject.execute + + expect(result).to be_a(ServiceResponse) + expect(result).to be_error + expect(result.message) + .to eq("Import failed. Destination '#{parent_group.path}' is invalid, or you don't have permission.") + end + end + end + + describe '#validate_destination_slug' do + context 'when the destination_slug is invalid' do + let(:params) do + [ + { + source_type: 'group_entity', + source_full_path: 'full/path/to/source', + destination_slug: 'destin-*-ation-slug', + destination_namespace: parent_group.path, + migrate_projects: migrate_projects + } + ] + end + + it 'returns ServiceResponse with an error message' do + result = subject.execute + + expect(result).to be_a(ServiceResponse) + expect(result).to be_error + expect(result.message) + .to eq( + "Import failed. Destination URL " \ + "must not start or end with a special character and must " \ + "not contain consecutive special characters." + ) + end + end + end + + describe '#validate_destination_full_path' do context 'when the source_type is a group' do context 'when the provided destination_slug already exists in the destination_namespace' do let_it_be(:existing_subgroup) { create(:group, path: 'existing-subgroup', parent_id: parent_group.id ) } @@ -349,7 +634,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect(result).to be_error expect(result.message) .to eq( - "Import aborted as 'parent-group/existing-subgroup' already exists. " \ + "Import failed. 'parent-group/existing-subgroup' already exists. " \ "Change the destination and try again." ) end @@ -376,7 +661,7 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do expect(result).to be_error expect(result.message) .to eq( - "Import aborted as 'top-level-group' already exists. " \ + "Import failed. 'top-level-group' already exists. " \ "Change the destination and try again." ) end @@ -421,13 +706,15 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do end it 'returns ServiceResponse with an error message' do + existing_group.add_owner(user) + result = subject.execute expect(result).to be_a(ServiceResponse) expect(result).to be_error expect(result.message) .to eq( - "Import aborted as 'existing-group/existing-project' already exists. " \ + "Import failed. 'existing-group/existing-project' already exists. " \ "Change the destination and try again." ) end @@ -448,6 +735,8 @@ RSpec.describe BulkImports::CreateService, feature_category: :importers do end it 'returns success ServiceResponse' do + existing_group.add_owner(user) + result = subject.execute expect(result).to be_a(ServiceResponse) diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb index 260eed3c734..9095fa9a0fa 100644 --- a/spec/services/bulk_update_integration_service_spec.rb +++ b/spec/services/bulk_update_integration_service_spec.rb @@ -56,14 +56,14 @@ RSpec.describe BulkUpdateIntegrationService, feature_category: :integrations do end it 'does not change the created_at timestamp' do - subgroup_integration.update_column(:created_at, Time.utc('2022-01-01')) + subgroup_integration.update_column(:created_at, Time.utc(2022, 1, 1)) expect do described_class.new(subgroup_integration, batch).execute end.not_to change { integration.reload.created_at } end - it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc('2022-01-01') do + it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc(2022, 1, 1) do expect do described_class.new(subgroup_integration, batch).execute end.to change { integration.reload.updated_at }.to(Time.current) @@ -85,14 +85,14 @@ RSpec.describe BulkUpdateIntegrationService, feature_category: :integrations do end it 'does not change the created_at timestamp' do - subgroup_integration.data_fields.update_column(:created_at, Time.utc('2022-01-02')) + subgroup_integration.data_fields.update_column(:created_at, Time.utc(2022, 1, 2)) expect do described_class.new(subgroup_integration, batch).execute end.not_to change { integration.data_fields.reload.created_at } end - it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc('2022-01-01') do + it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc(2022, 1, 1) do expect do described_class.new(subgroup_integration, batch).execute end.to change { integration.data_fields.reload.updated_at }.to(Time.current) diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb index 3fb9d092ae7..e6e589c174b 100644 --- a/spec/services/ci/archive_trace_service_spec.rb +++ b/spec/services/ci/archive_trace_service_spec.rb @@ -63,19 +63,6 @@ RSpec.describe Ci::ArchiveTraceService, '#execute', feature_category: :continuou end end - context 'when job does not have trace' do - let(:job) { create(:ci_build, :success) } - - it 'leaves a warning message in sidekiq log' do - expect(Sidekiq.logger).to receive(:warn).with( - class: Ci::ArchiveTraceWorker.name, - message: 'The job does not have live trace but going to be archived.', - job_id: job.id) - - subject - end - end - context 'when the job is out of archival attempts' do before do create(:ci_build_trace_metadata, @@ -149,23 +136,6 @@ RSpec.describe Ci::ArchiveTraceService, '#execute', feature_category: :continuou subject end end - - context 'when job failed to archive trace but did not raise an exception' do - before do - allow_next_instance_of(Gitlab::Ci::Trace) do |instance| - allow(instance).to receive(:archive!) {} - end - end - - it 'leaves a warning message in sidekiq log' do - expect(Sidekiq.logger).to receive(:warn).with( - class: Ci::ArchiveTraceWorker.name, - message: 'The job does not have archived trace after archiving.', - job_id: job.id) - - subject - end - end end context 'when job is running' do diff --git a/spec/services/ci/catalog/add_resource_service_spec.rb b/spec/services/ci/catalog/add_resource_service_spec.rb deleted file mode 100644 index ecb939e3c2d..00000000000 --- a/spec/services/ci/catalog/add_resource_service_spec.rb +++ /dev/null @@ -1,55 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::Catalog::AddResourceService, feature_category: :pipeline_composition do - let_it_be(:project) { create(:project, :repository, description: 'Our components') } - let_it_be(:user) { create(:user) } - - let(:service) { described_class.new(project, user) } - - describe '#execute' do - context 'with an unauthorized user' do - it 'raises an AccessDeniedError' do - expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError) - end - end - - context 'with an authorized user' do - before do - project.add_owner(user) - end - - context 'and a valid project' do - it 'creates a catalog resource' do - response = service.execute - - expect(response.payload.project).to eq(project) - end - end - - context 'with an invalid project' do - let_it_be(:project) { create(:project, :repository) } - - it 'does not create a catalog resource' do - response = service.execute - - expect(response.message).to eq('Project must have a description') - end - end - - context 'with an invalid catalog resource' do - it 'does not save the catalog resource' do - catalog_resource = instance_double(::Ci::Catalog::Resource, - valid?: false, - errors: instance_double(ActiveModel::Errors, full_messages: ['not valid'])) - allow(::Ci::Catalog::Resource).to receive(:new).and_return(catalog_resource) - - response = service.execute - - expect(response.message).to eq('not valid') - end - end - end - end -end diff --git a/spec/services/ci/change_variable_service_spec.rb b/spec/services/ci/change_variable_service_spec.rb index a9f9e4233d7..fd2ddded375 100644 --- a/spec/services/ci/change_variable_service_spec.rb +++ b/spec/services/ci/change_variable_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::ChangeVariableService, feature_category: :pipeline_composition do +RSpec.describe Ci::ChangeVariableService, feature_category: :secrets_management do let(:service) { described_class.new(container: group, current_user: user, params: params) } let_it_be(:user) { create(:user) } diff --git a/spec/services/ci/change_variables_service_spec.rb b/spec/services/ci/change_variables_service_spec.rb index 1bc36a78762..e22aebb8f5d 100644 --- a/spec/services/ci/change_variables_service_spec.rb +++ b/spec/services/ci/change_variables_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::ChangeVariablesService, feature_category: :pipeline_composition do +RSpec.describe Ci::ChangeVariablesService, feature_category: :secrets_management do let(:service) { described_class.new(container: group, current_user: user, params: params) } let_it_be(:user) { create(:user) } diff --git a/spec/services/ci/create_pipeline_service/variables_spec.rb b/spec/services/ci/create_pipeline_service/variables_spec.rb index 64f8b90f2f2..aac9a0c9c2d 100644 --- a/spec/services/ci/create_pipeline_service/variables_spec.rb +++ b/spec/services/ci/create_pipeline_service/variables_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, - feature_category: :pipeline_composition do + feature_category: :secrets_management do let_it_be(:project) { create(:project, :repository) } let_it_be(:user) { project.first_owner } diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb index b0ba07ea295..9e1a1a9e445 100644 --- a/spec/services/ci/create_pipeline_service_spec.rb +++ b/spec/services/ci/create_pipeline_service_spec.rb @@ -794,7 +794,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes before do config = YAML.dump( deploy: { - environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_BUILD_ID" }, + environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_JOB_ID" }, script: 'ls' } ) @@ -802,7 +802,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes stub_ci_pipeline_yaml_file(config) end - it 'skipps persisted variables in environment name' do + it 'skips persisted variables in environment name' do result = execute_service.payload expect(result).to be_persisted @@ -810,6 +810,32 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes end end + context 'when FF `ci_remove_legacy_predefined_variables` is disabled' do + before do + stub_feature_flags(ci_remove_legacy_predefined_variables: false) + end + + context 'with environment name including persisted variables' do + before do + config = YAML.dump( + deploy: { + environment: { name: "review/id1$CI_PIPELINE_ID/id2$CI_BUILD_ID" }, + script: 'ls' + } + ) + + stub_ci_pipeline_yaml_file(config) + end + + it 'skips persisted variables in environment name' do + result = execute_service.payload + + expect(result).to be_persisted + expect(Environment.find_by(name: "review/id1/id2")).to be_present + end + end + end + context 'environment with Kubernetes configuration' do let(:kubernetes_namespace) { 'custom-namespace' } @@ -1898,5 +1924,141 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes end end end + + describe 'pipeline components' do + let(:components_project) do + create(:project, :repository, creator: user, namespace: user.namespace) + end + + let(:component_path) do + "#{Gitlab.config.gitlab.host}/#{components_project.full_path}/my-component@v0.1" + end + + let(:template) do + <<~YAML + spec: + inputs: + stage: + suffix: + default: my-job + --- + test-$[[ inputs.suffix ]]: + stage: $[[ inputs.stage ]] + script: run tests + YAML + end + + let(:sha) do + components_project.repository.create_file( + user, + 'my-component/template.yml', + template, + message: 'Add my first CI component', + branch_name: 'master' + ) + end + + let(:config) do + <<~YAML + include: + - component: #{component_path} + with: + stage: my-stage + + stages: + - my-stage + + test-1: + stage: my-stage + script: run test-1 + YAML + end + + before do + stub_ci_pipeline_yaml_file(config) + end + + context 'when there is no version with specified tag' do + before do + components_project.repository.add_tag(user, 'v0.01', sha) + end + + it 'does not create a pipeline' do + response = execute_service(save_on_errors: true) + + pipeline = response.payload + + expect(pipeline).to be_persisted + expect(pipeline.yaml_errors) + .to include "my-component@v0.1' - content not found" + end + end + + context 'when there is a proper revision available' do + before do + components_project.repository.add_tag(user, 'v0.1', sha) + end + + context 'when component is valid' do + it 'creates a pipeline using a pipeline component' do + response = execute_service(save_on_errors: true) + + pipeline = response.payload + + expect(pipeline).to be_persisted + expect(pipeline.yaml_errors).to be_blank + expect(pipeline.statuses.count).to eq 2 + expect(pipeline.statuses.map(&:name)).to match_array %w[test-1 test-my-job] + end + end + + context 'when interpolation is invalid' do + let(:template) do + <<~YAML + spec: + inputs: + stage: + --- + test: + stage: $[[ inputs.stage ]] + script: rspec --suite $[[ inputs.suite ]] + YAML + end + + it 'does not create a pipeline' do + response = execute_service(save_on_errors: true) + + pipeline = response.payload + + expect(pipeline).to be_persisted + expect(pipeline.yaml_errors) + .to include 'interpolation interrupted by errors, unknown interpolation key: `suite`' + end + end + + context 'when there is a syntax error in the template' do + let(:template) do + <<~YAML + spec: + inputs: + stage: + --- + :test + stage: $[[ inputs.stage ]] + YAML + end + + it 'does not create a pipeline' do + response = execute_service(save_on_errors: true) + + pipeline = response.payload + + expect(pipeline).to be_persisted + expect(pipeline.yaml_errors) + .to include 'content does not have a valid YAML syntax' + end + end + end + end end end diff --git a/spec/services/ci/delete_objects_service_spec.rb b/spec/services/ci/delete_objects_service_spec.rb index d84ee596721..939b72cef3b 100644 --- a/spec/services/ci/delete_objects_service_spec.rb +++ b/spec/services/ci/delete_objects_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::DeleteObjectsService, :aggregate_failure, feature_category: :continuous_integration do +RSpec.describe Ci::DeleteObjectsService, :aggregate_failures, feature_category: :continuous_integration do let(:service) { described_class.new } let(:artifact) { create(:ci_job_artifact, :archive) } let(:data) { [artifact] } diff --git a/spec/services/ci/generate_kubeconfig_service_spec.rb b/spec/services/ci/generate_kubeconfig_service_spec.rb index da18dfe04c3..a03c6ef0c9d 100644 --- a/spec/services/ci/generate_kubeconfig_service_spec.rb +++ b/spec/services/ci/generate_kubeconfig_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_management do +RSpec.describe Ci::GenerateKubeconfigService, feature_category: :deployment_management do describe '#execute' do let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, group: group) } @@ -13,12 +13,12 @@ RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_mana let_it_be(:project_agent_authorization) do agent = create(:cluster_agent, project: agent_project) - create(:agent_project_authorization, agent: agent, project: project) + create(:agent_ci_access_project_authorization, agent: agent, project: project) end let_it_be(:group_agent_authorization) do agent = create(:cluster_agent, project: agent_project) - create(:agent_group_authorization, agent: agent, group: group) + create(:agent_ci_access_group_authorization, agent: agent, group: group) end let(:template) do @@ -33,7 +33,7 @@ RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_mana let(:agent_authorizations) { [project_agent_authorization, group_agent_authorization] } let(:filter_service) do instance_double( - ::Clusters::Agents::FilterAuthorizationsService, + ::Clusters::Agents::Authorizations::CiAccess::FilterService, execute: agent_authorizations ) end @@ -42,7 +42,7 @@ RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_mana before do allow(Gitlab::Kubernetes::Kubeconfig::Template).to receive(:new).and_return(template) - allow(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).and_return(filter_service) + allow(::Clusters::Agents::Authorizations::CiAccess::FilterService).to receive(:new).and_return(filter_service) end it 'returns a Kubeconfig Template' do @@ -59,7 +59,7 @@ RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_mana end it "filters the pipeline's agents by `nil` environment" do - expect(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).with( + expect(::Clusters::Agents::Authorizations::CiAccess::FilterService).to receive(:new).with( pipeline.cluster_agent_authorizations, environment: nil ) @@ -89,7 +89,7 @@ RSpec.describe Ci::GenerateKubeconfigService, feature_category: :kubernetes_mana subject(:execute) { described_class.new(pipeline, token: build.token, environment: 'production').execute } it "filters the pipeline's agents by the specified environment" do - expect(::Clusters::Agents::FilterAuthorizationsService).to receive(:new).with( + expect(::Clusters::Agents::Authorizations::CiAccess::FilterService).to receive(:new).with( pipeline.cluster_agent_authorizations, environment: 'production' ) diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb index 69f760e28ca..5d9f30c11eb 100644 --- a/spec/services/ci/job_artifacts/create_service_spec.rb +++ b/spec/services/ci/job_artifacts/create_service_spec.rb @@ -2,160 +2,187 @@ require 'spec_helper' -RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifacts do +RSpec.describe Ci::JobArtifacts::CreateService, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do + include WorkhorseHelpers + include Gitlab::Utils::Gzip + let_it_be(:project) { create(:project) } let(:service) { described_class.new(job) } let(:job) { create(:ci_build, project: project) } - let(:artifacts_sha256) { '0' * 64 } - let(:metadata_file) { nil } - - let(:artifacts_file) do - file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256) - end - - let(:params) do - { - 'artifact_type' => 'archive', - 'artifact_format' => 'zip' - }.with_indifferent_access - end - - def file_to_upload(path, params = {}) - upload = Tempfile.new('upload') - FileUtils.copy(path, upload.path) - # This is a workaround for https://github.com/docker/for-linux/issues/1015 - FileUtils.touch(upload.path) - UploadedFile.new(upload.path, **params) - end + describe '#authorize', :aggregate_failures do + let(:artifact_type) { 'archive' } + let(:filesize) { nil } - describe '#execute' do - subject { service.execute(artifacts_file, params, metadata_file: metadata_file) } - - def expect_accessibility_be(accessibility) - if accessibility == :public - expect(job.job_artifacts).to all be_public_accessibility - else - expect(job.job_artifacts).to all be_private_accessibility - end - end + subject(:authorize) { service.authorize(artifact_type: artifact_type, filesize: filesize) } - shared_examples 'job does not have public artifacts in the CI config' do |expected_artifacts_count, accessibility| - it "sets accessibility by default to #{accessibility}" do - expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count) + shared_examples_for 'handling lsif artifact' do + context 'when artifact is lsif' do + let(:artifact_type) { 'lsif' } - expect_accessibility_be(accessibility) + it 'includes ProcessLsif in the headers' do + expect(authorize[:headers][:ProcessLsif]).to eq(true) + end end end - shared_examples 'job artifact set as private in the CI config' do |expected_artifacts_count, accessibility| - let!(:job) { create(:ci_build, :with_private_artifacts_config, project: project) } + shared_examples_for 'validating requirements' do + context 'when filesize is specified' do + let(:max_artifact_size) { 10 } - it "sets accessibility to #{accessibility}" do - expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count) + before do + allow(Ci::JobArtifact) + .to receive(:max_artifact_size) + .with(type: artifact_type, project: project) + .and_return(max_artifact_size) + end - expect_accessibility_be(accessibility) - end - end + context 'and filesize exceeds the limit' do + let(:filesize) { max_artifact_size + 1 } - shared_examples 'job artifact set as public in the CI config' do |expected_artifacts_count, accessibility| - let!(:job) { create(:ci_build, :with_public_artifacts_config, project: project) } + it 'returns error' do + expect(authorize[:status]).to eq(:error) + end + end - it "sets accessibility to #{accessibility}" do - expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count) + context 'and filesize does not exceed the limit' do + let(:filesize) { max_artifact_size - 1 } - expect_accessibility_be(accessibility) + it 'returns success' do + expect(authorize[:status]).to eq(:success) + end + end end end - shared_examples 'when accessibility level passed as private' do |expected_artifacts_count, accessibility| - before do - params.merge!('accessibility' => 'private') + shared_examples_for 'uploading to temp location' do |store_type| + # We are not testing the entire headers here because this is fully tested + # in workhorse_authorize's spec. We just want to confirm that it indeed used the temp path + # by checking some indicators in the headers returned. + if store_type == :object_storage + it 'includes the authorize headers' do + expect(authorize[:status]).to eq(:success) + expect(authorize[:headers][:RemoteObject][:StoreURL]).to include(ObjectStorage::TMP_UPLOAD_PATH) + end + else + it 'includes the authorize headers' do + expect(authorize[:status]).to eq(:success) + expect(authorize[:headers][:TempPath]).to include(ObjectStorage::TMP_UPLOAD_PATH) + end end - it 'sets accessibility to private level' do - expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count) - - expect_accessibility_be(accessibility) - end + it_behaves_like 'handling lsif artifact' + it_behaves_like 'validating requirements' end - shared_examples 'when accessibility passed as public' do |expected_artifacts_count| - before do - params.merge!('accessibility' => 'public') + context 'when object storage is enabled' do + context 'and direct upload is enabled' do + before do + stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) + end + + it_behaves_like 'uploading to temp location', :object_storage end - it 'sets accessibility level to public' do - expect { subject }.to change { Ci::JobArtifact.count }.by(expected_artifacts_count) + context 'and direct upload is disabled' do + before do + stub_artifacts_object_storage(JobArtifactUploader, direct_upload: false) + end - expect(job.job_artifacts).to all be_public_accessibility + it_behaves_like 'uploading to temp location', :local_storage end end - context 'when artifacts file is uploaded' do - it 'logs the created artifact' do - expect(Gitlab::Ci::Artifacts::Logger) - .to receive(:log_created) - .with(an_instance_of(Ci::JobArtifact)) + context 'when object storage is disabled' do + it_behaves_like 'uploading to temp location', :local_storage + end + end - subject - end + describe '#execute' do + let(:artifacts_sha256) { '0' * 64 } + let(:metadata_file) { nil } + + let(:params) do + { + 'artifact_type' => 'archive', + 'artifact_format' => 'zip' + }.with_indifferent_access + end - it 'returns artifact in the response' do - response = subject - new_artifact = job.job_artifacts.last + subject(:execute) { service.execute(artifacts_file, params, metadata_file: metadata_file) } - expect(response[:artifact]).to eq(new_artifact) + shared_examples_for 'handling accessibility' do + shared_examples 'public accessibility' do + it 'sets accessibility to public level' do + expect(job.job_artifacts).to all be_public_accessibility + end end - it 'saves artifact for the given type' do - expect { subject }.to change { Ci::JobArtifact.count }.by(1) - - new_artifact = job.job_artifacts.last - expect(new_artifact.project).to eq(job.project) - expect(new_artifact.file).to be_present - expect(new_artifact.file_type).to eq(params['artifact_type']) - expect(new_artifact.file_format).to eq(params['artifact_format']) - expect(new_artifact.file_sha256).to eq(artifacts_sha256) - expect(new_artifact.locked).to eq(job.pipeline.locked) + shared_examples 'private accessibility' do + it 'sets accessibility to private level' do + expect(job.job_artifacts).to all be_private_accessibility + end end - context 'when non_public_artifacts feature flag is disabled' do + context 'when non_public_artifacts flag is disabled' do before do stub_feature_flags(non_public_artifacts: false) end - context 'when accessibility level not passed to the service' do - it_behaves_like 'job does not have public artifacts in the CI config', 1, :public - it_behaves_like 'job artifact set as private in the CI config', 1, :public - it_behaves_like 'job artifact set as public in the CI config', 1, :public + it_behaves_like 'public accessibility' + end + + context 'when non_public_artifacts flag is enabled' do + context 'and accessibility is defined in the params' do + context 'and is passed as private' do + before do + params.merge!('accessibility' => 'private') + end + + it_behaves_like 'private accessibility' + end + + context 'and is passed as public' do + before do + params.merge!('accessibility' => 'public') + end + + it_behaves_like 'public accessibility' + end end - it_behaves_like 'when accessibility level passed as private', 1, :public - it_behaves_like 'when accessibility passed as public', 1 + context 'and accessibility is not defined in the params' do + context 'and job has no public artifacts defined in its CI config' do + it_behaves_like 'public accessibility' + end + + context 'and job artifacts defined as private in the CI config' do + let(:job) { create(:ci_build, :with_private_artifacts_config, project: project) } + + it_behaves_like 'private accessibility' + end + + context 'and job artifacts defined as public in the CI config' do + let(:job) { create(:ci_build, :with_public_artifacts_config, project: project) } + + it_behaves_like 'public accessibility' + end + end end context 'when accessibility passed as invalid value' do before do - params.merge!('accessibility' => 'invalid_value') + params.merge!('accessibility' => 'foo') end it 'fails with argument error' do - expect { subject }.to raise_error(ArgumentError) + expect { execute }.to raise_error(ArgumentError, "'foo' is not a valid accessibility") end end + end - context 'when accessibility level not passed to the service' do - it_behaves_like 'job does not have public artifacts in the CI config', 1, :public - it_behaves_like 'job artifact set as private in the CI config', 1, :private - it_behaves_like 'job artifact set as public in the CI config', 1, :public - end - - it_behaves_like 'when accessibility level passed as private', 1, :private - - it_behaves_like 'when accessibility passed as public', 1 - + shared_examples_for 'handling metadata file' do context 'when metadata file is also uploaded' do let(:metadata_file) do file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256) @@ -165,8 +192,8 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac stub_application_setting(default_artifacts_expire_in: '1 day') end - it 'saves metadata artifact' do - expect { subject }.to change { Ci::JobArtifact.count }.by(2) + it 'creates a new metadata job artifact' do + expect { execute }.to change { Ci::JobArtifact.where(file_type: :metadata).count }.by(1) new_artifact = job.job_artifacts.last expect(new_artifact.project).to eq(job.project) @@ -177,16 +204,6 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac expect(new_artifact.locked).to eq(job.pipeline.locked) end - context 'when accessibility level not passed to the service' do - it_behaves_like 'job does not have public artifacts in the CI config', 2, :public - it_behaves_like 'job artifact set as private in the CI config', 2, :private - it_behaves_like 'job artifact set as public in the CI config', 2, :public - end - - it_behaves_like 'when accessibility level passed as private', 2, :privatge - - it_behaves_like 'when accessibility passed as public', 2 - it 'logs the created artifact and metadata' do expect(Gitlab::Ci::Artifacts::Logger) .to receive(:log_created) @@ -195,10 +212,12 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac subject end + it_behaves_like 'handling accessibility' + it 'sets expiration date according to application settings' do expected_expire_at = 1.day.from_now - expect(subject).to match(a_hash_including(status: :success, artifact: anything)) + expect(execute).to match(a_hash_including(status: :success, artifact: anything)) archive_artifact, metadata_artifact = job.job_artifacts.last(2) expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at) @@ -214,7 +233,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac it 'sets expiration date according to the parameter' do expected_expire_at = 2.hours.from_now - expect(subject).to match(a_hash_including(status: :success, artifact: anything)) + expect(execute).to match(a_hash_including(status: :success, artifact: anything)) archive_artifact, metadata_artifact = job.job_artifacts.last(2) expect(job.artifacts_expire_at).to be_within(1.minute).of(expected_expire_at) @@ -231,7 +250,7 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac it 'sets expiration date according to the parameter' do expected_expire_at = nil - expect(subject).to be_truthy + expect(execute).to be_truthy archive_artifact, metadata_artifact = job.job_artifacts.last(2) expect(job.artifacts_expire_at).to eq(expected_expire_at) @@ -242,96 +261,237 @@ RSpec.describe Ci::JobArtifacts::CreateService, feature_category: :build_artifac end end - context 'when artifacts file already exists' do - let!(:existing_artifact) do - create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job) - end + shared_examples_for 'handling dotenv' do |storage_type| + context 'when artifact type is dotenv' do + let(:params) do + { + 'artifact_type' => 'dotenv', + 'artifact_format' => 'gzip' + }.with_indifferent_access + end + + if storage_type == :object_storage + let(:object_body) { File.read('spec/fixtures/build.env.gz') } + let(:upload_filename) { 'build.env.gz' } + + before do + stub_request(:get, %r{s3.amazonaws.com/#{remote_path}}) + .to_return(status: 200, body: File.read('spec/fixtures/build.env.gz')) + end + else + let(:artifacts_file) do + file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256) + end + end - context 'when sha256 of uploading artifact is the same of the existing one' do - let(:existing_sha256) { artifacts_sha256 } + it 'calls parse service' do + expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service| + expect(service).to receive(:execute).once.and_call_original + end - it 'ignores the changes' do - expect { subject }.not_to change { Ci::JobArtifact.count } - expect(subject).to match(a_hash_including(status: :success)) + expect(execute[:status]).to eq(:success) + expect(job.job_variables.as_json(only: [:key, :value, :source])).to contain_exactly( + hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'), + hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv')) end end + end - context 'when sha256 of uploading artifact is different than the existing one' do - let(:existing_sha256) { '1' * 64 } + shared_examples_for 'handling object storage errors' do + shared_examples 'rescues object storage error' do |klass, message, expected_message| + it "handles #{klass}" do + allow_next_instance_of(JobArtifactUploader) do |uploader| + allow(uploader).to receive(:store!).and_raise(klass, message) + end - it 'returns error status' do - expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original + expect(Gitlab::ErrorTracking) + .to receive(:track_exception) + .and_call_original - expect { subject }.not_to change { Ci::JobArtifact.count } - expect(subject).to match( + expect(execute).to match( a_hash_including( - http_status: :bad_request, message: 'another artifact of the same type already exists', status: :error)) + http_status: :service_unavailable, + message: expected_message || message, + status: :error)) end end + + it_behaves_like 'rescues object storage error', + Errno::EIO, 'some/path', 'Input/output error - some/path' + + it_behaves_like 'rescues object storage error', + Google::Apis::ServerError, 'Server error' + + it_behaves_like 'rescues object storage error', + Signet::RemoteServerError, 'The service is currently unavailable' end - context 'when artifact type is dotenv' do - let(:artifacts_file) do - file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256) - end + shared_examples_for 'validating requirements' do + context 'when filesize is specified' do + let(:max_artifact_size) { 10 } + + before do + allow(Ci::JobArtifact) + .to receive(:max_artifact_size) + .with(type: 'archive', project: project) + .and_return(max_artifact_size) + + allow(artifacts_file).to receive(:size).and_return(filesize) + end + + context 'and filesize exceeds the limit' do + let(:filesize) { max_artifact_size + 1 } + + it 'returns error' do + expect(execute[:status]).to eq(:error) + end + end - let(:params) do - { - 'artifact_type' => 'dotenv', - 'artifact_format' => 'gzip' - }.with_indifferent_access + context 'and filesize does not exceed the limit' do + let(:filesize) { max_artifact_size - 1 } + + it 'returns success' do + expect(execute[:status]).to eq(:success) + end + end end + end - it 'calls parse service' do - expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service| - expect(service).to receive(:execute).once.and_call_original + shared_examples_for 'handling existing artifact' do + context 'when job already has an artifact of the same file type' do + let!(:existing_artifact) do + create(:ci_job_artifact, params[:artifact_type], file_sha256: existing_sha256, job: job) end - expect(subject[:status]).to eq(:success) - expect(job.job_variables.as_json(only: [:key, :value, :source])).to contain_exactly( - hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'), - hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv')) + context 'when sha256 of uploading artifact is the same of the existing one' do + let(:existing_sha256) { artifacts_sha256 } + + it 'ignores the changes' do + expect { execute }.not_to change { Ci::JobArtifact.count } + expect(execute).to match(a_hash_including(status: :success)) + end + end + + context 'when sha256 of uploading artifact is different than the existing one' do + let(:existing_sha256) { '1' * 64 } + + it 'returns error status' do + expect(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original + + expect { execute }.not_to change { Ci::JobArtifact.count } + expect(execute).to match( + a_hash_including( + http_status: :bad_request, + message: 'another artifact of the same type already exists', + status: :error + ) + ) + end + end + end + end + + shared_examples_for 'logging artifact' do + it 'logs the created artifact' do + expect(Gitlab::Ci::Artifacts::Logger) + .to receive(:log_created) + .with(an_instance_of(Ci::JobArtifact)) + + execute end end - context 'with job partitioning', :ci_partitionable do - let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) } - let(:job) { create(:ci_build, pipeline: pipeline) } + shared_examples_for 'handling remote uploads to temporary location' do + context 'when artifacts file is uploaded' do + it 'creates a new job artifact' do + expect { execute }.to change { Ci::JobArtifact.count }.by(1) - it 'sets partition_id on artifacts' do - expect { subject }.to change { Ci::JobArtifact.count } + new_artifact = execute[:artifact] + expect(new_artifact).to eq(job.job_artifacts.last) + expect(new_artifact.project).to eq(job.project) + expect(new_artifact.file.filename).to eq(artifacts_file.original_filename) + expect(new_artifact.file_identifier).to eq(artifacts_file.original_filename) + expect(new_artifact.file_type).to eq(params['artifact_type']) + expect(new_artifact.file_format).to eq(params['artifact_format']) + expect(new_artifact.file_sha256).to eq(artifacts_sha256) + expect(new_artifact.locked).to eq(job.pipeline.locked) + expect(new_artifact.size).to eq(artifacts_file.size) - artifacts_partitions = job.job_artifacts.map(&:partition_id).uniq + expect(execute[:status]).to eq(:success) + end - expect(artifacts_partitions).to eq([ci_testing_partition_id]) + it_behaves_like 'handling accessibility' + it_behaves_like 'handling metadata file' + it_behaves_like 'handling partitioning' + it_behaves_like 'logging artifact' end end - shared_examples 'rescues object storage error' do |klass, message, expected_message| - it "handles #{klass}" do - allow_next_instance_of(JobArtifactUploader) do |uploader| - allow(uploader).to receive(:store!).and_raise(klass, message) + shared_examples_for 'handling partitioning' do + context 'with job partitioned', :ci_partitionable do + let(:pipeline) { create(:ci_pipeline, project: project, partition_id: ci_testing_partition_id) } + let(:job) { create(:ci_build, pipeline: pipeline) } + + it 'sets partition_id on artifacts' do + expect { execute }.to change { Ci::JobArtifact.count } + + artifacts_partitions = job.job_artifacts.map(&:partition_id).uniq + + expect(artifacts_partitions).to eq([ci_testing_partition_id]) end + end + end - expect(Gitlab::ErrorTracking) - .to receive(:track_exception) - .and_call_original + context 'when object storage and direct upload is enabled' do + let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) } + let(:remote_path) { File.join(remote_store_path, remote_id) } + let(:object_body) { File.open('spec/fixtures/ci_build_artifacts.zip') } + let(:upload_filename) { 'artifacts.zip' } + let(:object) do + fog_connection.directories + .new(key: 'artifacts') + .files + .create( # rubocop:disable Rails/SaveBang + key: remote_path, + body: object_body + ) + end - expect(subject).to match( - a_hash_including( - http_status: :service_unavailable, - message: expected_message || message, - status: :error)) + let(:artifacts_file) do + fog_to_uploaded_file( + object, + filename: upload_filename, + sha256: artifacts_sha256, + remote_id: remote_id + ) end + + let(:remote_id) { 'generated-remote-id-12345' } + let(:remote_store_path) { ObjectStorage::TMP_UPLOAD_PATH } + + it_behaves_like 'handling remote uploads to temporary location' + it_behaves_like 'handling dotenv', :object_storage + it_behaves_like 'handling object storage errors' + it_behaves_like 'validating requirements' end - it_behaves_like 'rescues object storage error', - Errno::EIO, 'some/path', 'Input/output error - some/path' + context 'when using local storage' do + let(:artifacts_file) do + file_to_upload('spec/fixtures/ci_build_artifacts.zip', sha256: artifacts_sha256) + end - it_behaves_like 'rescues object storage error', - Google::Apis::ServerError, 'Server error' + it_behaves_like 'handling remote uploads to temporary location' + it_behaves_like 'handling dotenv', :local_storage + it_behaves_like 'validating requirements' + end + end + + def file_to_upload(path, params = {}) + upload = Tempfile.new('upload') + FileUtils.copy(path, upload.path) + # This is a workaround for https://github.com/docker/for-linux/issues/1015 + FileUtils.touch(upload.path) - it_behaves_like 'rescues object storage error', - Signet::RemoteServerError, 'The service is currently unavailable' + UploadedFile.new(upload.path, **params) end end diff --git a/spec/services/ci/list_config_variables_service_spec.rb b/spec/services/ci/list_config_variables_service_spec.rb index 56a392221be..febb1533b0f 100644 --- a/spec/services/ci/list_config_variables_service_spec.rb +++ b/spec/services/ci/list_config_variables_service_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Ci::ListConfigVariablesService, -:use_clean_rails_memory_store_caching, feature_category: :pipeline_composition do +:use_clean_rails_memory_store_caching, feature_category: :secrets_management do include ReactiveCachingHelpers let(:ci_config) { {} } diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb index 46ea0036e49..89b3c45485b 100644 --- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb +++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb @@ -32,15 +32,15 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection let(:collection) { described_class.new(pipeline) } - describe '#set_processable_status' do - it 'does update existing status of processable' do - collection.set_processable_status(test_a.id, 'success', 100) + describe '#set_job_status' do + it 'does update existing status of job' do + collection.set_job_status(test_a.id, 'success', 100) - expect(collection.status_of_processables(['test-a'], dag: false)).to eq('success') + expect(collection.status_of_jobs(['test-a'])).to eq('success') end - it 'ignores a missing processable' do - collection.set_processable_status(-1, 'failed', 100) + it 'ignores a missing job' do + collection.set_job_status(-1, 'failed', 100) end end @@ -50,24 +50,21 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection end end - describe '#status_of_processables' do - where(:names, :status, :dag) do - %w[build-a] | 'success' | false - %w[build-a build-b] | 'failed' | false - %w[build-a test-a] | 'running' | false - %w[build-a] | 'success' | true - %w[build-a build-b] | 'failed' | true - %w[build-a test-a] | 'pending' | true + describe '#status_of_jobs' do + where(:names, :status) do + %w[build-a] | 'success' + %w[build-a build-b] | 'failed' + %w[build-a test-a] | 'running' end with_them do it 'returns composite status of given names' do - expect(collection.status_of_processables(names, dag: dag)).to eq(status) + expect(collection.status_of_jobs(names)).to eq(status) end end end - describe '#status_of_processables_prior_to_stage' do + describe '#status_of_jobs_prior_to_stage' do where(:stage, :status) do 0 | 'success' 1 | 'failed' @@ -75,8 +72,8 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection end with_them do - it 'returns composite status for processables in prior stages' do - expect(collection.status_of_processables_prior_to_stage(stage)).to eq(status) + it 'returns composite status for jobs in prior stages' do + expect(collection.status_of_jobs_prior_to_stage(stage)).to eq(status) end end end @@ -89,23 +86,23 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection end with_them do - it 'returns composite status for processables at a given stages' do + it 'returns composite status for jobs at a given stages' do expect(collection.status_of_stage(stage)).to eq(status) end end end - describe '#created_processable_ids_in_stage' do - it 'returns IDs of processables at a given stage position' do - expect(collection.created_processable_ids_in_stage(0)).to be_empty - expect(collection.created_processable_ids_in_stage(1)).to be_empty - expect(collection.created_processable_ids_in_stage(2)).to contain_exactly(deploy.id) + describe '#created_job_ids_in_stage' do + it 'returns IDs of jobs at a given stage position' do + expect(collection.created_job_ids_in_stage(0)).to be_empty + expect(collection.created_job_ids_in_stage(1)).to be_empty + expect(collection.created_job_ids_in_stage(2)).to contain_exactly(deploy.id) end end - describe '#processing_processables' do - it 'returns processables marked as processing' do - expect(collection.processing_processables.map { |processable| processable[:id] }) + describe '#processing_jobs' do + it 'returns jobs marked as processing' do + expect(collection.processing_jobs.map { |job| job[:id] }) .to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id) end end diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb index c1669e0424a..d0496acc6fe 100644 --- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb +++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb @@ -59,17 +59,17 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category end def event_on_jobs(event, job_names) - statuses = pipeline.latest_statuses.by_name(job_names).to_a - expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts + jobs = pipeline.latest_statuses.by_name(job_names).to_a + expect(jobs.count).to eq(job_names.count) # ensure that we have the same counts - statuses.each do |status| + jobs.each do |job| case event when 'play' - status.play(user) + job.play(user) when 'retry' - ::Ci::RetryJobService.new(project, user).execute(status) + ::Ci::RetryJobService.new(project, user).execute(job) else - status.public_send("#{event}!") + job.public_send("#{event}!") end end end @@ -983,8 +983,8 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]') bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]') - downstream_job1 = bridge1.downstream_pipeline.processables.first - downstream_job2 = bridge2.downstream_pipeline.processables.first + downstream_job1 = bridge1.downstream_pipeline.all_jobs.first + downstream_job2 = bridge2.downstream_pipeline.all_jobs.first expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring') expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app') @@ -1068,7 +1068,7 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category private def all_builds - pipeline.processables.order(:stage_idx, :id) + pipeline.all_jobs.order(:stage_idx, :id) end def builds diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_no_needs.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_no_needs.yml new file mode 100644 index 00000000000..12c51828628 --- /dev/null +++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_no_needs.yml @@ -0,0 +1,31 @@ +config: + test1: + stage: test + script: exit 0 + needs: [] + + test2: + stage: test + when: on_failure + script: exit 0 + needs: [] + +init: + expect: + pipeline: pending + stages: + test: pending + jobs: + test1: pending + test2: skipped + +transitions: + - event: success + jobs: [test1] + expect: + pipeline: success + stages: + test: success + jobs: + test1: success + test2: skipped diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_no_prev_stage.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_no_prev_stage.yml new file mode 100644 index 00000000000..57b3aa9ae80 --- /dev/null +++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_no_prev_stage.yml @@ -0,0 +1,29 @@ +config: + test1: + stage: test + script: exit 0 + + test2: + stage: test + when: on_failure + script: exit 0 + +init: + expect: + pipeline: pending + stages: + test: pending + jobs: + test1: pending + test2: skipped + +transitions: + - event: success + jobs: [test1] + expect: + pipeline: success + stages: + test: success + jobs: + test1: success + test2: skipped diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb index 18cb016f94a..6fb61bb3ec5 100644 --- a/spec/services/ci/register_job_service_spec.rb +++ b/spec/services/ci/register_job_service_spec.rb @@ -14,9 +14,9 @@ module Ci let!(:pending_job) { create(:ci_build, :pending, :queued, pipeline: pipeline) } describe '#execute' do - subject(:execute) { described_class.new(runner, runner_machine).execute } + subject(:execute) { described_class.new(runner, runner_manager).execute } - let(:runner_machine) { nil } + let(:runner_manager) { nil } context 'checks database loadbalancing stickiness' do let(:runner) { shared_runner } @@ -28,7 +28,7 @@ module Ci it 'result is valid if replica did caught-up', :aggregate_failures do expect(ApplicationRecord.sticking).to receive(:all_caught_up?).with(:runner, runner.id) { true } - expect { execute }.not_to change { Ci::RunnerMachineBuild.count }.from(0) + expect { execute }.not_to change { Ci::RunnerManagerBuild.count }.from(0) expect(execute).to be_valid expect(execute.build).to be_nil expect(execute.build_json).to be_nil @@ -46,9 +46,9 @@ module Ci shared_examples 'handles runner assignment' do context 'runner follows tag list' do - subject(:build) { build_on(project_runner, runner_machine: project_runner_machine) } + subject(:build) { build_on(project_runner, runner_manager: project_runner_manager) } - let(:project_runner_machine) { nil } + let(:project_runner_manager) { nil } context 'when job has tag' do before do @@ -62,19 +62,19 @@ module Ci project_runner.update!(tag_list: ["linux"]) end - context 'with no runner machine specified' do + context 'with no runner manager specified' do it 'picks build' do expect(build).to eq(pending_job) - expect(pending_job.runner_machine).to be_nil + expect(pending_job.runner_manager).to be_nil end end - context 'with runner machine specified' do - let(:project_runner_machine) { create(:ci_runner_machine, runner: project_runner) } + context 'with runner manager specified' do + let(:project_runner_manager) { create(:ci_runner_machine, runner: project_runner) } - it 'picks build and assigns runner machine' do + it 'picks build and assigns runner manager' do expect(build).to eq(pending_job) - expect(pending_job.runner_machine).to eq(project_runner_machine) + expect(pending_job.runner_manager).to eq(project_runner_manager) end end end @@ -123,27 +123,27 @@ module Ci end context 'for project runner' do - subject(:build) { build_on(project_runner, runner_machine: project_runner_machine) } + subject(:build) { build_on(project_runner, runner_manager: project_runner_manager) } - let(:project_runner_machine) { nil } + let(:project_runner_manager) { nil } - context 'with no runner machine specified' do + context 'with no runner manager specified' do it 'does not pick a build' do expect(build).to be_nil expect(pending_job.reload).to be_failed expect(pending_job.queuing_entry).to be_nil - expect(Ci::RunnerMachineBuild.all).to be_empty + expect(Ci::RunnerManagerBuild.all).to be_empty end end - context 'with runner machine specified' do - let(:project_runner_machine) { create(:ci_runner_machine, runner: project_runner) } + context 'with runner manager specified' do + let(:project_runner_manager) { create(:ci_runner_machine, runner: project_runner) } it 'does not pick a build' do expect(build).to be_nil expect(pending_job.reload).to be_failed expect(pending_job.queuing_entry).to be_nil - expect(Ci::RunnerMachineBuild.all).to be_empty + expect(Ci::RunnerManagerBuild.all).to be_empty end end end @@ -164,7 +164,7 @@ module Ci pending_job.update!(user: user) end - context 'with no runner machine specified' do + context 'with no runner manager specified' do it 'does not pick the build and drops the build' do expect(build_on(shared_runner)).to be_falsey @@ -172,13 +172,13 @@ module Ci end end - context 'with runner machine specified' do - let(:runner_machine) { create(:ci_runner_machine, runner: runner) } + context 'with runner manager specified' do + let(:runner_manager) { create(:ci_runner_machine, runner: runner) } it 'does not pick the build and does not create join record' do - expect(build_on(shared_runner, runner_machine: runner_machine)).to be_falsey + expect(build_on(shared_runner, runner_manager: runner_manager)).to be_falsey - expect(Ci::RunnerMachineBuild.all).to be_empty + expect(Ci::RunnerManagerBuild.all).to be_empty end end end @@ -1037,8 +1037,8 @@ module Ci end end - def build_on(runner, runner_machine: nil, params: {}) - described_class.new(runner, runner_machine).execute(params).build + def build_on(runner, runner_manager: nil, params: {}) + described_class.new(runner, runner_manager).execute(params).build end end end diff --git a/spec/services/ci/runners/create_runner_service_spec.rb b/spec/services/ci/runners/create_runner_service_spec.rb index 52acfcbb7af..db337b0b005 100644 --- a/spec/services/ci/runners/create_runner_service_spec.rb +++ b/spec/services/ci/runners/create_runner_service_spec.rb @@ -3,24 +3,20 @@ require 'spec_helper' RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: :runner_fleet do - subject(:execute) { described_class.new(user: current_user, type: type, params: params).execute } + subject(:execute) { described_class.new(user: current_user, params: params).execute } let(:runner) { execute.payload[:runner] } let_it_be(:admin) { create(:admin) } let_it_be(:non_admin_user) { create(:user) } let_it_be(:anonymous) { nil } + let_it_be(:group_owner) { create(:user) } - shared_context 'when admin user' do - let(:current_user) { admin } - - before do - allow(current_user).to receive(:can?).with(:create_instance_runners).and_return true - end - end + let_it_be(:group) { create(:group) } shared_examples 'it can create a runner' do - it 'creates a runner of the specified type' do + it 'creates a runner of the specified type', :aggregate_failures do + is_expected.to be_success expect(runner.runner_type).to eq expected_type end @@ -42,7 +38,7 @@ RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: expect(runner.active).to be true expect(runner.creator).to be current_user expect(runner.authenticated_user_registration_type?).to be_truthy - expect(runner.runner_type).to eq 'instance_type' + expect(runner.runner_type).to eq expected_type end end @@ -81,7 +77,7 @@ RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: expect(runner.maximum_timeout).to eq args[:maximum_timeout] expect(runner.authenticated_user_registration_type?).to be_truthy - expect(runner.runner_type).to eq 'instance_type' + expect(runner.runner_type).to eq expected_type end context 'with a nil paused value' do @@ -138,7 +134,6 @@ RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: end shared_examples 'it can return an error' do - let(:group) { create(:group) } let(:runner_double) { Ci::Runner.new } context 'when the runner fails to save' do @@ -154,25 +149,148 @@ RSpec.describe ::Ci::Runners::CreateRunnerService, "#execute", feature_category: end end - context 'with type param set to nil' do + context 'with :runner_type param set to instance_type' do let(:expected_type) { 'instance_type' } - let(:type) { nil } - let(:params) { {} } + let(:params) { { runner_type: 'instance_type' } } + + context 'when anonymous user' do + let(:current_user) { anonymous } + + it_behaves_like 'it cannot create a runner' + end + + context 'when non-admin user' do + let(:current_user) { non_admin_user } + + it_behaves_like 'it cannot create a runner' + end + + context 'when admin user' do + let(:current_user) { admin } + + it_behaves_like 'it cannot create a runner' + + context 'when admin mode is enabled', :enable_admin_mode do + it_behaves_like 'it can create a runner' + it_behaves_like 'it can return an error' + + context 'with unexpected scope param specified' do + let(:params) { { runner_type: 'instance_type', scope: group } } - it_behaves_like 'it cannot create a runner' do + it_behaves_like 'it cannot create a runner' + end + + context 'when model validation fails' do + let(:params) { { runner_type: 'instance_type', run_untagged: false, tag_list: [] } } + + it_behaves_like 'it cannot create a runner' + + it 'returns error message and reason', :aggregate_failures do + expect(execute.reason).to eq(:save_error) + expect(execute.message).to contain_exactly(a_string_including('Tags list can not be empty')) + end + end + end + end + end + + context 'with :runner_type param set to group_type' do + let(:expected_type) { 'group_type' } + let(:params) { { runner_type: 'group_type', scope: group } } + + before do + group.add_developer(non_admin_user) + group.add_owner(group_owner) + end + + context 'when anonymous user' do let(:current_user) { anonymous } + + it_behaves_like 'it cannot create a runner' end - it_behaves_like 'it cannot create a runner' do + context 'when non-admin user' do let(:current_user) { non_admin_user } + + it_behaves_like 'it cannot create a runner' + end + + context 'when group owner' do + let(:current_user) { group_owner } + + it_behaves_like 'it can create a runner' + + context 'with missing scope param' do + let(:params) { { runner_type: 'group_type' } } + + it_behaves_like 'it cannot create a runner' + end + end + + context 'when admin user' do + let(:current_user) { admin } + + it_behaves_like 'it cannot create a runner' + + context 'when admin mode is enabled', :enable_admin_mode do + it_behaves_like 'it can create a runner' + it_behaves_like 'it can return an error' + end + end + end + + context 'with :runner_type param set to project_type' do + let_it_be(:project) { create(:project, namespace: group) } + + let(:expected_type) { 'project_type' } + let(:params) { { runner_type: 'project_type', scope: project } } + + before do + group.add_developer(non_admin_user) + group.add_owner(group_owner) + end + + context 'when anonymous user' do + let(:current_user) { anonymous } + + it_behaves_like 'it cannot create a runner' end - it_behaves_like 'it can create a runner' do - include_context 'when admin user' + context 'when group owner' do + let(:current_user) { group_owner } + + it_behaves_like 'it can create a runner' + + context 'with missing scope param' do + let(:params) { { runner_type: 'project_type' } } + + it_behaves_like 'it cannot create a runner' + end end - it_behaves_like 'it can return an error' do - include_context 'when admin user' + context 'when non-admin user' do + let(:current_user) { non_admin_user } + + it_behaves_like 'it cannot create a runner' + + context 'with project permissions to create runner' do + before do + project.add_maintainer(current_user) + end + + it_behaves_like 'it can create a runner' + end + end + + context 'when admin user' do + let(:current_user) { admin } + + it_behaves_like 'it cannot create a runner' + + context 'when admin mode is enabled', :enable_admin_mode do + it_behaves_like 'it can create a runner' + it_behaves_like 'it can return an error' + end end end end diff --git a/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb b/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb deleted file mode 100644 index 456dbcebb84..00000000000 --- a/spec/services/ci/runners/stale_machines_cleanup_service_spec.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Ci::Runners::StaleMachinesCleanupService, feature_category: :runner_fleet do - let(:service) { described_class.new } - let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } - - subject(:response) { service.execute } - - context 'with no stale runner machines' do - it 'does not clean any runner machines and returns :success status' do - expect do - expect(response).to be_success - expect(response.payload).to match({ deleted_machines: false }) - end.not_to change { Ci::RunnerMachine.count }.from(1) - end - end - - context 'with some stale runner machines' do - before do - create(:ci_runner_machine, :stale) - create(:ci_runner_machine, :stale, contacted_at: nil) - end - - it 'only leaves non-stale runners' do - expect(response).to be_success - expect(response.payload).to match({ deleted_machines: true }) - expect(Ci::RunnerMachine.all).to contain_exactly(runner_machine3) - end - - context 'with more stale runners than MAX_DELETIONS' do - before do - stub_const("#{described_class}::MAX_DELETIONS", 1) - end - - it 'only leaves non-stale runners' do - expect do - expect(response).to be_success - expect(response.payload).to match({ deleted_machines: true }) - end.to change { Ci::RunnerMachine.count }.by(-Ci::Runners::StaleMachinesCleanupService::MAX_DELETIONS) - end - end - end -end diff --git a/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb new file mode 100644 index 00000000000..a78506ca5f7 --- /dev/null +++ b/spec/services/ci/runners/stale_managers_cleanup_service_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ci::Runners::StaleManagersCleanupService, feature_category: :runner_fleet do + let(:service) { described_class.new } + let!(:runner_manager3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) } + + subject(:response) { service.execute } + + context 'with no stale runner managers' do + it 'does not clean any runner managers and returns :success status' do + expect do + expect(response).to be_success + expect(response.payload).to match({ deleted_managers: false }) + end.not_to change { Ci::RunnerManager.count }.from(1) + end + end + + context 'with some stale runner managers' do + before do + create(:ci_runner_machine, :stale) + create(:ci_runner_machine, :stale, contacted_at: nil) + end + + it 'only leaves non-stale runners' do + expect(response).to be_success + expect(response.payload).to match({ deleted_managers: true }) + expect(Ci::RunnerManager.all).to contain_exactly(runner_manager3) + end + + context 'with more stale runners than MAX_DELETIONS' do + before do + stub_const("#{described_class}::MAX_DELETIONS", 1) + end + + it 'only leaves non-stale runners' do + expect do + expect(response).to be_success + expect(response.payload).to match({ deleted_managers: true }) + end.to change { Ci::RunnerManager.count }.by(-Ci::Runners::StaleManagersCleanupService::MAX_DELETIONS) + end + end + end +end diff --git a/spec/services/ci/update_instance_variables_service_spec.rb b/spec/services/ci/update_instance_variables_service_spec.rb index 19f28793f90..889f49eca5a 100644 --- a/spec/services/ci/update_instance_variables_service_spec.rb +++ b/spec/services/ci/update_instance_variables_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Ci::UpdateInstanceVariablesService, feature_category: :pipeline_composition do +RSpec.describe Ci::UpdateInstanceVariablesService, feature_category: :secrets_management do let(:params) { { variables_attributes: variables_attributes } } subject { described_class.new(params) } diff --git a/spec/services/clusters/agent_tokens/create_service_spec.rb b/spec/services/clusters/agent_tokens/create_service_spec.rb index 519a3ba7ce5..803bd947629 100644 --- a/spec/services/clusters/agent_tokens/create_service_spec.rb +++ b/spec/services/clusters/agent_tokens/create_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::AgentTokens::CreateService, feature_category: :kubernetes_management do +RSpec.describe Clusters::AgentTokens::CreateService, feature_category: :deployment_management do subject(:service) { described_class.new(agent: cluster_agent, current_user: user, params: params) } let_it_be(:user) { create(:user) } diff --git a/spec/services/clusters/agent_tokens/revoke_service_spec.rb b/spec/services/clusters/agent_tokens/revoke_service_spec.rb index 9e511de0a13..a1537658723 100644 --- a/spec/services/clusters/agent_tokens/revoke_service_spec.rb +++ b/spec/services/clusters/agent_tokens/revoke_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::AgentTokens::RevokeService, feature_category: :kubernetes_management do +RSpec.describe Clusters::AgentTokens::RevokeService, feature_category: :deployment_management do describe '#execute' do subject { described_class.new(token: agent_token, current_user: user).execute } diff --git a/spec/services/clusters/agent_tokens/track_usage_service_spec.rb b/spec/services/clusters/agent_tokens/track_usage_service_spec.rb index e9e1a5f7ad9..6bea8afcc80 100644 --- a/spec/services/clusters/agent_tokens/track_usage_service_spec.rb +++ b/spec/services/clusters/agent_tokens/track_usage_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::AgentTokens::TrackUsageService, feature_category: :kubernetes_management do +RSpec.describe Clusters::AgentTokens::TrackUsageService, feature_category: :deployment_management do let_it_be(:agent) { create(:cluster_agent) } describe '#execute', :clean_gitlab_redis_cache do diff --git a/spec/services/clusters/agents/authorizations/ci_access/filter_service_spec.rb b/spec/services/clusters/agents/authorizations/ci_access/filter_service_spec.rb new file mode 100644 index 00000000000..45443cfd887 --- /dev/null +++ b/spec/services/clusters/agents/authorizations/ci_access/filter_service_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::FilterService, feature_category: :continuous_integration do + describe '#execute' do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + + let(:agent_authorizations_without_env) do + [ + build(:agent_ci_access_project_authorization, project: project, agent: build(:cluster_agent, project: project)), + build(:agent_ci_access_group_authorization, group: group, agent: build(:cluster_agent, project: project)), + ::Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization.new(agent: build(:cluster_agent, project: project)) + ] + end + + let(:filter_params) { {} } + + subject(:execute_filter) { described_class.new(agent_authorizations, filter_params).execute } + + context 'when there are no filters' do + let(:agent_authorizations) { agent_authorizations_without_env } + + it 'returns the authorizations as is' do + expect(execute_filter).to eq agent_authorizations + end + end + + context 'when filtering by environment' do + let(:agent_authorizations_with_env) do + [ + build( + :agent_ci_access_project_authorization, + project: project, + agent: build(:cluster_agent, project: project), + environments: ['staging', 'review/*', 'production'] + ), + build( + :agent_ci_access_group_authorization, + group: group, + agent: build(:cluster_agent, project: project), + environments: ['staging', 'review/*', 'production'] + ) + ] + end + + let(:agent_authorizations_with_different_env) do + [ + build( + :agent_ci_access_project_authorization, + project: project, + agent: build(:cluster_agent, project: project), + environments: ['staging'] + ), + build( + :agent_ci_access_group_authorization, + group: group, + agent: build(:cluster_agent, project: project), + environments: ['staging'] + ) + ] + end + + let(:agent_authorizations) do + ( + agent_authorizations_without_env + + agent_authorizations_with_env + + agent_authorizations_with_different_env + ) + end + + let(:filter_params) { { environment: 'production' } } + + it 'returns the authorizations with the given environment AND authorizations without any environment' do + expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env + + expect(execute_filter).to match_array expected_authorizations + end + + context 'when environment filter has a wildcard' do + let(:filter_params) { { environment: 'review/123' } } + + it 'returns the authorizations with matching environments AND authorizations without any environment' do + expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env + + expect(execute_filter).to match_array expected_authorizations + end + end + + context 'when environment filter is nil' do + let(:filter_params) { { environment: nil } } + + it 'returns the authorizations without any environment' do + expect(execute_filter).to match_array agent_authorizations_without_env + end + end + end + end +end diff --git a/spec/services/clusters/agents/authorizations/ci_access/refresh_service_spec.rb b/spec/services/clusters/agents/authorizations/ci_access/refresh_service_spec.rb new file mode 100644 index 00000000000..c12592cc071 --- /dev/null +++ b/spec/services/clusters/agents/authorizations/ci_access/refresh_service_spec.rb @@ -0,0 +1,154 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::CiAccess::RefreshService, feature_category: :deployment_management do + describe '#execute' do + let_it_be(:root_ancestor) { create(:group) } + + let_it_be(:removed_group) { create(:group, parent: root_ancestor) } + let_it_be(:modified_group) { create(:group, parent: root_ancestor) } + let_it_be(:added_group) { create(:group, path: 'group-path-with-UPPERCASE', parent: root_ancestor) } + + let_it_be(:removed_project) { create(:project, namespace: root_ancestor) } + let_it_be(:modified_project) { create(:project, namespace: root_ancestor) } + let_it_be(:added_project) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) } + + let(:project) { create(:project, namespace: root_ancestor) } + let(:agent) { create(:cluster_agent, project: project) } + + let(:config) do + { + ci_access: { + groups: [ + { id: added_group.full_path, default_namespace: 'default' }, + # Uppercase path verifies case-insensitive matching. + { id: modified_group.full_path.upcase, default_namespace: 'new-namespace' } + ], + projects: [ + { id: added_project.full_path, default_namespace: 'default' }, + # Uppercase path verifies case-insensitive matching. + { id: modified_project.full_path.upcase, default_namespace: 'new-namespace' } + ] + } + }.deep_stringify_keys + end + + subject { described_class.new(agent, config: config).execute } + + before do + default_config = { default_namespace: 'default' } + + agent.ci_access_group_authorizations.create!(group: removed_group, config: default_config) + agent.ci_access_group_authorizations.create!(group: modified_group, config: default_config) + + agent.ci_access_project_authorizations.create!(project: removed_project, config: default_config) + agent.ci_access_project_authorizations.create!(project: modified_project, config: default_config) + end + + shared_examples 'removing authorization' do + context 'config contains no groups' do + let(:config) { {} } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + + context 'config contains groups outside of the configuration project hierarchy' do + let(:project) { create(:project, namespace: create(:group)) } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + + context 'configuration project does not belong to a group' do + let(:project) { create(:project) } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + end + + describe 'group authorization' do + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_groups).to contain_exactly(added_group, modified_group) + + added_authorization = agent.ci_access_group_authorizations.find_by(group: added_group) + expect(added_authorization.config).to eq({ 'default_namespace' => 'default' }) + + modified_authorization = agent.ci_access_group_authorizations.find_by(group: modified_group) + expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' }) + end + + context 'config contains too many groups' do + before do + stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1) + end + + it 'authorizes groups up to the limit' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_groups).to contain_exactly(added_group) + end + end + + include_examples 'removing authorization' do + let(:authorizations) { agent.ci_access_authorized_groups } + end + end + + describe 'project authorization' do + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_projects).to contain_exactly(added_project, modified_project) + + added_authorization = agent.ci_access_project_authorizations.find_by(project: added_project) + expect(added_authorization.config).to eq({ 'default_namespace' => 'default' }) + + modified_authorization = agent.ci_access_project_authorizations.find_by(project: modified_project) + expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' }) + end + + context 'project does not belong to a group, and is in the same namespace as the agent' do + let(:root_ancestor) { create(:namespace) } + let(:added_project) { create(:project, namespace: root_ancestor) } + + it 'creates an authorization record for the project' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_projects).to contain_exactly(added_project) + end + end + + context 'project does not belong to a group, and is authorizing itself' do + let(:root_ancestor) { create(:namespace) } + let(:added_project) { project } + + it 'creates an authorization record for the project' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_projects).to contain_exactly(added_project) + end + end + + context 'config contains too many projects' do + before do + stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1) + end + + it 'authorizes projects up to the limit' do + expect(subject).to be_truthy + expect(agent.ci_access_authorized_projects).to contain_exactly(added_project) + end + end + + include_examples 'removing authorization' do + let(:authorizations) { agent.ci_access_authorized_projects } + end + end + end +end diff --git a/spec/services/clusters/agents/authorizations/user_access/refresh_service_spec.rb b/spec/services/clusters/agents/authorizations/user_access/refresh_service_spec.rb new file mode 100644 index 00000000000..da546ca44a9 --- /dev/null +++ b/spec/services/clusters/agents/authorizations/user_access/refresh_service_spec.rb @@ -0,0 +1,181 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Clusters::Agents::Authorizations::UserAccess::RefreshService, feature_category: :deployment_management do + describe '#execute' do + let_it_be(:root_ancestor) { create(:group) } + let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) } + let_it_be(:group_1) { create(:group, path: 'group-path-with-UPPERCASE', parent: root_ancestor) } + let_it_be(:group_2) { create(:group, parent: root_ancestor) } + let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) } + let_it_be(:project_2) { create(:project, namespace: root_ancestor) } + + let(:agent) { create(:cluster_agent, project: agent_management_project) } + + let(:config) do + { + user_access: { + groups: [ + { id: group_2.full_path } + ], + projects: [ + { id: project_2.full_path } + ] + } + }.deep_merge(extra_config).deep_stringify_keys + end + + let(:extra_config) { {} } + + subject { described_class.new(agent, config: config).execute } + + before do + agent.user_access_group_authorizations.create!(group: group_1, config: {}) + agent.user_access_project_authorizations.create!(project: project_1, config: {}) + end + + shared_examples 'removing authorization' do + context 'when config contains no groups or projects' do + let(:config) { {} } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + + context 'when config contains groups or projects outside of the configuration project hierarchy' do + let_it_be(:agent_management_project) { create(:project, namespace: create(:group)) } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + + context 'when configuration project does not belong to a group' do + let_it_be(:agent_management_project) { create(:project) } + + it 'removes all authorizations' do + expect(subject).to be_truthy + expect(authorizations).to be_empty + end + end + end + + describe 'group authorization' do + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_groups).to contain_exactly(group_2) + + added_authorization = agent.user_access_group_authorizations.find_by(group: group_2) + expect(added_authorization.config).to eq({}) + end + + context 'when config contains "access_as" keyword' do + let(:extra_config) do + { + user_access: { + access_as: { + agent: {} + } + } + } + end + + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_groups).to contain_exactly(group_2) + + added_authorization = agent.user_access_group_authorizations.find_by(group: group_2) + expect(added_authorization.config).to eq({ 'access_as' => { 'agent' => {} } }) + end + end + + context 'when config contains too many groups' do + before do + stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 0) + end + + it 'authorizes groups up to the limit' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_groups).to be_empty + end + end + + include_examples 'removing authorization' do + let(:authorizations) { agent.user_access_authorized_groups } + end + end + + describe 'project authorization' do + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_projects).to contain_exactly(project_2) + + added_authorization = agent.user_access_project_authorizations.find_by(project: project_2) + expect(added_authorization.config).to eq({}) + end + + context 'when config contains "access_as" keyword' do + let(:extra_config) do + { + user_access: { + access_as: { + agent: {} + } + } + } + end + + it 'refreshes authorizations for the agent' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_projects).to contain_exactly(project_2) + + added_authorization = agent.user_access_project_authorizations.find_by(project: project_2) + expect(added_authorization.config).to eq({ 'access_as' => { 'agent' => {} } }) + end + end + + context 'when project belongs to a user namespace, and is in the same namespace as the agent' do + let_it_be(:root_ancestor) { create(:namespace) } + let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) } + let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) } + let_it_be(:project_2) { create(:project, namespace: root_ancestor) } + + it 'creates an authorization record for the project' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_projects).to contain_exactly(project_2) + end + end + + context 'when project belongs to a user namespace, and is authorizing itself' do + let_it_be(:root_ancestor) { create(:namespace) } + let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) } + let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) } + let_it_be(:project_2) { agent_management_project } + + it 'creates an authorization record for the project' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_projects).to contain_exactly(project_2) + end + end + + context 'when config contains too many projects' do + before do + stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 0) + end + + it 'authorizes projects up to the limit' do + expect(subject).to be_truthy + expect(agent.user_access_authorized_projects).to be_empty + end + end + + include_examples 'removing authorization' do + let(:authorizations) { agent.user_access_authorized_projects } + end + end + end +end diff --git a/spec/services/clusters/agents/authorize_proxy_user_service_spec.rb b/spec/services/clusters/agents/authorize_proxy_user_service_spec.rb index c099d87f6eb..28c36e3aa36 100644 --- a/spec/services/clusters/agents/authorize_proxy_user_service_spec.rb +++ b/spec/services/clusters/agents/authorize_proxy_user_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::AuthorizeProxyUserService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::AuthorizeProxyUserService, feature_category: :deployment_management do subject(:service_response) { service.execute } let(:service) { described_class.new(user, agent) } diff --git a/spec/services/clusters/agents/create_activity_event_service_spec.rb b/spec/services/clusters/agents/create_activity_event_service_spec.rb index 3da8ecddb8d..0d784bb69c7 100644 --- a/spec/services/clusters/agents/create_activity_event_service_spec.rb +++ b/spec/services/clusters/agents/create_activity_event_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::CreateActivityEventService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::CreateActivityEventService, feature_category: :deployment_management do let_it_be(:agent) { create(:cluster_agent) } let_it_be(:token) { create(:cluster_agent_token, agent: agent) } let_it_be(:user) { create(:user) } diff --git a/spec/services/clusters/agents/create_service_spec.rb b/spec/services/clusters/agents/create_service_spec.rb index dc69dfb5e27..85607fcdf3a 100644 --- a/spec/services/clusters/agents/create_service_spec.rb +++ b/spec/services/clusters/agents/create_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::CreateService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::CreateService, feature_category: :deployment_management do subject(:service) { described_class.new(project, user) } let(:project) { create(:project, :public, :repository) } diff --git a/spec/services/clusters/agents/delete_expired_events_service_spec.rb b/spec/services/clusters/agents/delete_expired_events_service_spec.rb index 892cd5a70ea..7dc9c280ab4 100644 --- a/spec/services/clusters/agents/delete_expired_events_service_spec.rb +++ b/spec/services/clusters/agents/delete_expired_events_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::DeleteExpiredEventsService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::DeleteExpiredEventsService, feature_category: :deployment_management do let_it_be(:agent) { create(:cluster_agent) } describe '#execute' do diff --git a/spec/services/clusters/agents/delete_service_spec.rb b/spec/services/clusters/agents/delete_service_spec.rb index da97cdee4ca..febbb7ba5c8 100644 --- a/spec/services/clusters/agents/delete_service_spec.rb +++ b/spec/services/clusters/agents/delete_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::DeleteService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::DeleteService, feature_category: :deployment_management do subject(:service) { described_class.new(container: project, current_user: user) } let(:cluster_agent) { create(:cluster_agent) } diff --git a/spec/services/clusters/agents/filter_authorizations_service_spec.rb b/spec/services/clusters/agents/filter_authorizations_service_spec.rb deleted file mode 100644 index 62cff405d0c..00000000000 --- a/spec/services/clusters/agents/filter_authorizations_service_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::FilterAuthorizationsService, feature_category: :continuous_integration do - describe '#execute' do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, group: group) } - - let(:agent_authorizations_without_env) do - [ - build(:agent_project_authorization, project: project, agent: build(:cluster_agent, project: project)), - build(:agent_group_authorization, group: group, agent: build(:cluster_agent, project: project)), - ::Clusters::Agents::ImplicitAuthorization.new(agent: build(:cluster_agent, project: project)) - ] - end - - let(:filter_params) { {} } - - subject(:execute_filter) { described_class.new(agent_authorizations, filter_params).execute } - - context 'when there are no filters' do - let(:agent_authorizations) { agent_authorizations_without_env } - - it 'returns the authorizations as is' do - expect(execute_filter).to eq agent_authorizations - end - end - - context 'when filtering by environment' do - let(:agent_authorizations_with_env) do - [ - build( - :agent_project_authorization, - project: project, - agent: build(:cluster_agent, project: project), - environments: ['staging', 'review/*', 'production'] - ), - build( - :agent_group_authorization, - group: group, - agent: build(:cluster_agent, project: project), - environments: ['staging', 'review/*', 'production'] - ) - ] - end - - let(:agent_authorizations_with_different_env) do - [ - build( - :agent_project_authorization, - project: project, - agent: build(:cluster_agent, project: project), - environments: ['staging'] - ), - build( - :agent_group_authorization, - group: group, - agent: build(:cluster_agent, project: project), - environments: ['staging'] - ) - ] - end - - let(:agent_authorizations) do - ( - agent_authorizations_without_env + - agent_authorizations_with_env + - agent_authorizations_with_different_env - ) - end - - let(:filter_params) { { environment: 'production' } } - - it 'returns the authorizations with the given environment AND authorizations without any environment' do - expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env - - expect(execute_filter).to match_array expected_authorizations - end - - context 'when environment filter has a wildcard' do - let(:filter_params) { { environment: 'review/123' } } - - it 'returns the authorizations with matching environments AND authorizations without any environment' do - expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env - - expect(execute_filter).to match_array expected_authorizations - end - end - - context 'when environment filter is nil' do - let(:filter_params) { { environment: nil } } - - it 'returns the authorizations without any environment' do - expect(execute_filter).to match_array agent_authorizations_without_env - end - end - end - end -end diff --git a/spec/services/clusters/agents/refresh_authorization_service_spec.rb b/spec/services/clusters/agents/refresh_authorization_service_spec.rb deleted file mode 100644 index 51c054ddc98..00000000000 --- a/spec/services/clusters/agents/refresh_authorization_service_spec.rb +++ /dev/null @@ -1,154 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Clusters::Agents::RefreshAuthorizationService, feature_category: :kubernetes_management do - describe '#execute' do - let_it_be(:root_ancestor) { create(:group) } - - let_it_be(:removed_group) { create(:group, parent: root_ancestor) } - let_it_be(:modified_group) { create(:group, parent: root_ancestor) } - let_it_be(:added_group) { create(:group, path: 'group-path-with-UPPERCASE', parent: root_ancestor) } - - let_it_be(:removed_project) { create(:project, namespace: root_ancestor) } - let_it_be(:modified_project) { create(:project, namespace: root_ancestor) } - let_it_be(:added_project) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) } - - let(:project) { create(:project, namespace: root_ancestor) } - let(:agent) { create(:cluster_agent, project: project) } - - let(:config) do - { - ci_access: { - groups: [ - { id: added_group.full_path, default_namespace: 'default' }, - # Uppercase path verifies case-insensitive matching. - { id: modified_group.full_path.upcase, default_namespace: 'new-namespace' } - ], - projects: [ - { id: added_project.full_path, default_namespace: 'default' }, - # Uppercase path verifies case-insensitive matching. - { id: modified_project.full_path.upcase, default_namespace: 'new-namespace' } - ] - } - }.deep_stringify_keys - end - - subject { described_class.new(agent, config: config).execute } - - before do - default_config = { default_namespace: 'default' } - - agent.group_authorizations.create!(group: removed_group, config: default_config) - agent.group_authorizations.create!(group: modified_group, config: default_config) - - agent.project_authorizations.create!(project: removed_project, config: default_config) - agent.project_authorizations.create!(project: modified_project, config: default_config) - end - - shared_examples 'removing authorization' do - context 'config contains no groups' do - let(:config) { {} } - - it 'removes all authorizations' do - expect(subject).to be_truthy - expect(authorizations).to be_empty - end - end - - context 'config contains groups outside of the configuration project hierarchy' do - let(:project) { create(:project, namespace: create(:group)) } - - it 'removes all authorizations' do - expect(subject).to be_truthy - expect(authorizations).to be_empty - end - end - - context 'configuration project does not belong to a group' do - let(:project) { create(:project) } - - it 'removes all authorizations' do - expect(subject).to be_truthy - expect(authorizations).to be_empty - end - end - end - - describe 'group authorization' do - it 'refreshes authorizations for the agent' do - expect(subject).to be_truthy - expect(agent.authorized_groups).to contain_exactly(added_group, modified_group) - - added_authorization = agent.group_authorizations.find_by(group: added_group) - expect(added_authorization.config).to eq({ 'default_namespace' => 'default' }) - - modified_authorization = agent.group_authorizations.find_by(group: modified_group) - expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' }) - end - - context 'config contains too many groups' do - before do - stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1) - end - - it 'authorizes groups up to the limit' do - expect(subject).to be_truthy - expect(agent.authorized_groups).to contain_exactly(added_group) - end - end - - include_examples 'removing authorization' do - let(:authorizations) { agent.authorized_groups } - end - end - - describe 'project authorization' do - it 'refreshes authorizations for the agent' do - expect(subject).to be_truthy - expect(agent.authorized_projects).to contain_exactly(added_project, modified_project) - - added_authorization = agent.project_authorizations.find_by(project: added_project) - expect(added_authorization.config).to eq({ 'default_namespace' => 'default' }) - - modified_authorization = agent.project_authorizations.find_by(project: modified_project) - expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' }) - end - - context 'project does not belong to a group, and is in the same namespace as the agent' do - let(:root_ancestor) { create(:namespace) } - let(:added_project) { create(:project, namespace: root_ancestor) } - - it 'creates an authorization record for the project' do - expect(subject).to be_truthy - expect(agent.authorized_projects).to contain_exactly(added_project) - end - end - - context 'project does not belong to a group, and is authorizing itself' do - let(:root_ancestor) { create(:namespace) } - let(:added_project) { project } - - it 'creates an authorization record for the project' do - expect(subject).to be_truthy - expect(agent.authorized_projects).to contain_exactly(added_project) - end - end - - context 'config contains too many projects' do - before do - stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1) - end - - it 'authorizes projects up to the limit' do - expect(subject).to be_truthy - expect(agent.authorized_projects).to contain_exactly(added_project) - end - end - - include_examples 'removing authorization' do - let(:authorizations) { agent.authorized_projects } - end - end - end -end diff --git a/spec/services/clusters/build_kubernetes_namespace_service_spec.rb b/spec/services/clusters/build_kubernetes_namespace_service_spec.rb index b1be3eb4199..fea17495914 100644 --- a/spec/services/clusters/build_kubernetes_namespace_service_spec.rb +++ b/spec/services/clusters/build_kubernetes_namespace_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::BuildKubernetesNamespaceService, feature_category: :kubernetes_management do +RSpec.describe Clusters::BuildKubernetesNamespaceService, feature_category: :deployment_management do let(:cluster) { create(:cluster, :project, :provided_by_gcp) } let(:environment) { create(:environment) } let(:project) { environment.project } diff --git a/spec/services/clusters/build_service_spec.rb b/spec/services/clusters/build_service_spec.rb index 9e71b7a8115..909d3f58c48 100644 --- a/spec/services/clusters/build_service_spec.rb +++ b/spec/services/clusters/build_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::BuildService, feature_category: :kubernetes_management do +RSpec.describe Clusters::BuildService, feature_category: :deployment_management do describe '#execute' do subject { described_class.new(cluster_subject).execute } diff --git a/spec/services/clusters/cleanup/project_namespace_service_spec.rb b/spec/services/clusters/cleanup/project_namespace_service_spec.rb index 366e4fa9c03..34311d6e830 100644 --- a/spec/services/clusters/cleanup/project_namespace_service_spec.rb +++ b/spec/services/clusters/cleanup/project_namespace_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Cleanup::ProjectNamespaceService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Cleanup::ProjectNamespaceService, feature_category: :deployment_management do describe '#execute' do subject { service.execute } diff --git a/spec/services/clusters/cleanup/service_account_service_spec.rb b/spec/services/clusters/cleanup/service_account_service_spec.rb index 881ec85b3d5..d6b5d1a5d5c 100644 --- a/spec/services/clusters/cleanup/service_account_service_spec.rb +++ b/spec/services/clusters/cleanup/service_account_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Cleanup::ServiceAccountService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Cleanup::ServiceAccountService, feature_category: :deployment_management do describe '#execute' do subject { service.execute } diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb index 0d170f66f4a..e130f713cb2 100644 --- a/spec/services/clusters/create_service_spec.rb +++ b/spec/services/clusters/create_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::CreateService, feature_category: :kubernetes_management do +RSpec.describe Clusters::CreateService, feature_category: :deployment_management do let(:access_token) { 'xxx' } let(:project) { create(:project) } let(:user) { create(:user) } @@ -50,7 +50,7 @@ RSpec.describe Clusters::CreateService, feature_category: :kubernetes_management end context 'when project has a cluster' do - include_context 'valid cluster create params' + include_context 'with valid cluster create params' let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) } it 'creates another cluster' do diff --git a/spec/services/clusters/destroy_service_spec.rb b/spec/services/clusters/destroy_service_spec.rb index 2bc0099ff04..dd3e24d0e12 100644 --- a/spec/services/clusters/destroy_service_spec.rb +++ b/spec/services/clusters/destroy_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::DestroyService, feature_category: :kubernetes_management do +RSpec.describe Clusters::DestroyService, feature_category: :deployment_management do describe '#execute' do subject { described_class.new(cluster.user, params).execute(cluster) } diff --git a/spec/services/clusters/integrations/create_service_spec.rb b/spec/services/clusters/integrations/create_service_spec.rb index fa47811dc6b..b716e4f4651 100644 --- a/spec/services/clusters/integrations/create_service_spec.rb +++ b/spec/services/clusters/integrations/create_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Integrations::CreateService, '#execute', feature_category: :kubernetes_management do +RSpec.describe Clusters::Integrations::CreateService, '#execute', feature_category: :deployment_management do let_it_be(:project) { create(:project) } let_it_be_with_reload(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) } diff --git a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb index 2d527bb0872..9390d4b368b 100644 --- a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb +++ b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute', feature_category: :kubernetes_management do +RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute', feature_category: :deployment_management do let(:service) { described_class.new(cluster) } subject { service.execute } diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb index 8ae34e4f9ab..7e61d690ddd 100644 --- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb +++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute', feature_category: :kubernetes_management do +RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute', feature_category: :deployment_management do include KubernetesHelpers let(:cluster) { create(:cluster, :project, :provided_by_gcp) } diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb index bdf46c19e36..ab0c5691b06 100644 --- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb +++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService, feature_category: :deployment_management do include KubernetesHelpers let(:api_url) { 'http://111.111.111.111' } diff --git a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb index 2b77df1eb6d..439dc37e684 100644 --- a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb +++ b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService, feature_category: :deployment_management do include KubernetesHelpers describe '#execute' do diff --git a/spec/services/clusters/kubernetes_spec.rb b/spec/services/clusters/kubernetes_spec.rb index 7e22c2f95df..cd430f81a65 100644 --- a/spec/services/clusters/kubernetes_spec.rb +++ b/spec/services/clusters/kubernetes_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Kubernetes, feature_category: :kubernetes_management do +RSpec.describe Clusters::Kubernetes, feature_category: :deployment_management do it { is_expected.to be_const_defined(:GITLAB_SERVICE_ACCOUNT_NAME) } it { is_expected.to be_const_defined(:GITLAB_SERVICE_ACCOUNT_NAMESPACE) } it { is_expected.to be_const_defined(:GITLAB_ADMIN_TOKEN_NAME) } diff --git a/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb b/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb index 8a49d90aa48..46032de600d 100644 --- a/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb +++ b/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Management::ValidateManagementProjectPermissionsService, feature_category: :kubernetes_management do +RSpec.describe Clusters::Management::ValidateManagementProjectPermissionsService, feature_category: :deployment_management do describe '#execute' do subject { described_class.new(user).execute(cluster, management_project_id) } diff --git a/spec/services/clusters/update_service_spec.rb b/spec/services/clusters/update_service_spec.rb index 31661d30f41..cc759407376 100644 --- a/spec/services/clusters/update_service_spec.rb +++ b/spec/services/clusters/update_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::UpdateService, feature_category: :kubernetes_management do +RSpec.describe Clusters::UpdateService, feature_category: :deployment_management do include KubernetesHelpers describe '#execute' do diff --git a/spec/services/database/consistency_check_service_spec.rb b/spec/services/database/consistency_check_service_spec.rb index 6288fedfb59..8b7560f80ad 100644 --- a/spec/services/database/consistency_check_service_spec.rb +++ b/spec/services/database/consistency_check_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Database::ConsistencyCheckService, feature_category: :pods do +RSpec.describe Database::ConsistencyCheckService, feature_category: :cell do let(:batch_size) { 5 } let(:max_batches) { 2 } diff --git a/spec/services/database/consistency_fix_service_spec.rb b/spec/services/database/consistency_fix_service_spec.rb index fcc776cbc2a..ea0916e8d2b 100644 --- a/spec/services/database/consistency_fix_service_spec.rb +++ b/spec/services/database/consistency_fix_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Database::ConsistencyFixService, feature_category: :pods do +RSpec.describe Database::ConsistencyFixService, feature_category: :cell do describe '#execute' do context 'fixing namespaces inconsistencies' do subject(:consistency_fix_service) do diff --git a/spec/services/git/wiki_push_service/change_spec.rb b/spec/services/git/wiki_push_service/change_spec.rb index ad3c4ae68c0..719e67666ce 100644 --- a/spec/services/git/wiki_push_service/change_spec.rb +++ b/spec/services/git/wiki_push_service/change_spec.rb @@ -60,11 +60,13 @@ RSpec.describe Git::WikiPushService::Change, feature_category: :source_code_mana end %i[added renamed modified].each do |op| - let(:operation) { op } - let(:slug) { new_path.chomp('.md') } - let(:revision) { change[:newrev] } + context "the operation is #{op}" do + let(:operation) { op } + let(:slug) { new_path.chomp('.md') } + let(:revision) { change[:newrev] } - it { is_expected.to have_attributes(page: wiki_page) } + it { is_expected.to have_attributes(page: wiki_page) } + end end end end diff --git a/spec/services/issuable/callbacks/milestone_spec.rb b/spec/services/issuable/callbacks/milestone_spec.rb new file mode 100644 index 00000000000..085ed029a6c --- /dev/null +++ b/spec/services/issuable/callbacks/milestone_spec.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Issuable::Callbacks::Milestone, feature_category: :team_planning do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :private, group: group) } + let_it_be(:project_milestone) { create(:milestone, project: project) } + let_it_be(:group_milestone) { create(:milestone, group: group) } + let_it_be(:reporter) do + create(:user).tap { |u| project.add_reporter(u) } + end + + let(:issuable) { build(:issue, project: project) } + let(:current_user) { reporter } + let(:params) { { milestone_id: project_milestone.id } } + let(:callback) { described_class.new(issuable: issuable, current_user: current_user, params: params) } + + describe '#after_initialize' do + it "sets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(nil).to(project_milestone) + end + + context 'when assigning a group milestone' do + let(:params) { { milestone_id: group_milestone.id } } + + it "sets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(nil).to(group_milestone) + end + end + + context 'when assigning a group milestone outside the project ancestors' do + let(:another_group_milestone) { create(:milestone, group: create(:group)) } + let(:params) { { milestone_id: another_group_milestone.id } } + + it "does not change the issuable's milestone" do + expect { callback.after_initialize }.not_to change { issuable.milestone } + end + end + + context 'when user is not allowed to set issuable metadata' do + let(:current_user) { create(:user) } + + it "does not change the issuable's milestone" do + expect { callback.after_initialize }.not_to change { issuable.milestone } + end + end + + context 'when unsetting a milestone' do + let(:issuable) { create(:issue, project: project, milestone: project_milestone) } + + context 'when milestone_id is nil' do + let(:params) { { milestone_id: nil } } + + it "unsets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(project_milestone).to(nil) + end + end + + context 'when milestone_id is an empty string' do + let(:params) { { milestone_id: '' } } + + it "unsets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(project_milestone).to(nil) + end + end + + context 'when milestone_id is 0' do + let(:params) { { milestone_id: '0' } } + + it "unsets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(project_milestone).to(nil) + end + end + + context "when milestone_id is '0'" do + let(:params) { { milestone_id: 0 } } + + it "unsets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(project_milestone).to(nil) + end + end + + context 'when milestone_id is not given' do + let(:params) { {} } + + it "does not unset the issuable's milestone" do + expect { callback.after_initialize }.not_to change { issuable.milestone } + end + end + + context 'when new type does not support milestones' do + let(:params) { { excluded_in_new_type: true } } + + it "unsets the issuable's milestone" do + expect { callback.after_initialize }.to change { issuable.milestone }.from(project_milestone).to(nil) + end + end + end + end +end diff --git a/spec/services/issues/after_create_service_spec.rb b/spec/services/issues/after_create_service_spec.rb index 594caed23d7..b59578b14a0 100644 --- a/spec/services/issues/after_create_service_spec.rb +++ b/spec/services/issues/after_create_service_spec.rb @@ -28,13 +28,6 @@ RSpec.describe Issues::AfterCreateService, feature_category: :team_planning do expect { after_create_service.execute(issue) }.to change { Todo.where(attributes).count }.by(1) end - it 'deletes milestone issues count cache' do - expect_next(Milestones::IssuesCountService, milestone) - .to receive(:delete_cache).and_call_original - - after_create_service.execute(issue) - end - context 'with a regular issue' do it_behaves_like 'does not track incident management event', :incident_management_incident_created do subject { after_create_service.execute(issue) } diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb index 0f89a746520..bca6a3cd4f9 100644 --- a/spec/services/issues/build_service_spec.rb +++ b/spec/services/issues/build_service_spec.rb @@ -161,8 +161,8 @@ RSpec.describe Issues::BuildService, feature_category: :team_planning do end end - context 'when guest' do - let(:user) { guest } + context 'when user is not a project member' do + let(:user) { create(:user) } it 'cannot set milestone' do milestone = create(:milestone, project: project) @@ -172,37 +172,5 @@ RSpec.describe Issues::BuildService, feature_category: :team_planning do end end end - - describe 'setting issue type' do - context 'with a corresponding WorkItems::Type' do - let_it_be(:type_issue_id) { WorkItems::Type.default_issue_type.id } - let_it_be(:type_incident_id) { WorkItems::Type.default_by_type(:incident).id } - - where(:issue_type, :current_user, :work_item_type_id, :resulting_issue_type) do - nil | ref(:guest) | ref(:type_issue_id) | 'issue' - 'issue' | ref(:guest) | ref(:type_issue_id) | 'issue' - 'incident' | ref(:guest) | ref(:type_issue_id) | 'issue' - 'incident' | ref(:reporter) | ref(:type_incident_id) | 'incident' - # update once support for test_case is enabled - 'test_case' | ref(:guest) | ref(:type_issue_id) | 'issue' - # update once support for requirement is enabled - 'requirement' | ref(:guest) | ref(:type_issue_id) | 'issue' - 'invalid' | ref(:guest) | ref(:type_issue_id) | 'issue' - # ensure that we don't set a value which has a permission check but is an invalid issue type - 'project' | ref(:guest) | ref(:type_issue_id) | 'issue' - end - - with_them do - let(:user) { current_user } - - it 'builds an issue' do - issue = build_issue(issue_type: issue_type) - - expect(issue.issue_type).to eq(resulting_issue_type) - expect(issue.work_item_type_id).to eq(work_item_type_id) - end - end - end - end end end diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb index 0d9b3306540..47925236a74 100644 --- a/spec/services/issues/close_service_spec.rb +++ b/spec/services/issues/close_service_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Issues::CloseService, feature_category: :team_planning do let(:project) { create(:project, :repository) } + let(:delegated_project) { project.project_namespace.project } let(:user) { create(:user, email: "user@example.com") } let(:user2) { create(:user, email: "user2@example.com") } let(:guest) { create(:user) } @@ -201,34 +202,17 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do end it 'mentions closure via a merge request' do - close_issue - - email = ActionMailer::Base.deliveries.last + expect_next_instance_of(NotificationService::Async) do |service| + expect(service).to receive(:close_issue).with(issue, user, { closed_via: closing_merge_request }) + end - expect(email.to.first).to eq(user2.email) - expect(email.subject).to include(issue.title) - expect(email.body.parts.map(&:body)).to all(include(closing_merge_request.to_reference)) + close_issue end it_behaves_like 'records an onboarding progress action', :issue_auto_closed do let(:namespace) { project.namespace } end - context 'when user cannot read merge request' do - it 'does not mention merge request' do - project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED) - - close_issue - - email = ActionMailer::Base.deliveries.last - body_text = email.body.parts.map(&:body).join(" ") - - expect(email.to.first).to eq(user2.email) - expect(email.subject).to include(issue.title) - expect(body_text).not_to include(closing_merge_request.to_reference) - end - end - context 'updating `metrics.first_mentioned_in_commit_at`' do context 'when `metrics.first_mentioned_in_commit_at` is not set' do it 'uses the first commit authored timestamp' do @@ -264,31 +248,11 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do context "closed by a commit", :sidekiq_might_not_need_inline do it 'mentions closure via a commit' do - perform_enqueued_jobs do - described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_commit) + expect_next_instance_of(NotificationService::Async) do |service| + expect(service).to receive(:close_issue).with(issue, user, { closed_via: "commit #{closing_commit.id}" }) end - email = ActionMailer::Base.deliveries.last - - expect(email.to.first).to eq(user2.email) - expect(email.subject).to include(issue.title) - expect(email.body.parts.map(&:body)).to all(include(closing_commit.id)) - end - - context 'when user cannot read the commit' do - it 'does not mention the commit id' do - project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED) - perform_enqueued_jobs do - described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_commit) - end - - email = ActionMailer::Base.deliveries.last - body_text = email.body.parts.map(&:body).join(" ") - - expect(email.to.first).to eq(user2.email) - expect(email.subject).to include(issue.title) - expect(body_text).not_to include(closing_commit.id) - end + described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_commit) end end @@ -320,12 +284,12 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do expect(issue.reload.closed_by_id).to be(user.id) end - it 'sends email to user2 about assign of new issue', :sidekiq_might_not_need_inline do - close_issue + it 'sends notification', :sidekiq_might_not_need_inline do + expect_next_instance_of(NotificationService::Async) do |service| + expect(service).to receive(:close_issue).with(issue, user, { closed_via: nil }) + end - email = ActionMailer::Base.deliveries.last - expect(email.to.first).to eq(user2.email) - expect(email.subject).to include(issue.title) + close_issue end it 'creates resource state event about the issue being closed' do @@ -434,10 +398,10 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do end it 'executes issue hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, :issue_hooks) - expect(project).to receive(:execute_integrations).with(expected_payload, :issue_hooks) + expect(delegated_project).to receive(:execute_hooks).with(expected_payload, :issue_hooks) + expect(delegated_project).to receive(:execute_integrations).with(expected_payload, :issue_hooks) - described_class.new(container: project, current_user: user).close_issue(issue) + described_class.new(container: delegated_project, current_user: user).close_issue(issue) end end @@ -445,8 +409,8 @@ RSpec.describe Issues::CloseService, feature_category: :team_planning do it 'executes confidential issue hooks' do issue = create(:issue, :confidential, project: project) - expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) + expect(delegated_project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) + expect(delegated_project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) described_class.new(container: project, current_user: user).close_issue(issue) end diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb index d5d88baca1f..46c2f03dadc 100644 --- a/spec/services/issues/create_service_spec.rb +++ b/spec/services/issues/create_service_spec.rb @@ -124,6 +124,15 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do expect(issue.issue_customer_relations_contacts).to be_empty end + context 'with milestone' do + it 'deletes milestone issues count cache' do + expect_next(Milestones::IssuesCountService, milestone) + .to receive(:delete_cache).and_call_original + + expect(result).to be_success + end + end + context 'when the work item type is not allowed to create' do before do allow_next_instance_of(::Issues::BuildService) do |instance| @@ -372,6 +381,13 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do expect(assignee.assigned_open_issues_count).to eq 1 end + + it 'records the assignee assignment event' do + result = described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute + + issue = result.payload[:issue] + expect(issue.assignment_events).to match([have_attributes(user_id: assignee.id, action: 'add')]) + end end context 'when duplicate label titles are given' do @@ -436,8 +452,8 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do end it 'executes issue hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, :issue_hooks) - expect(project).to receive(:execute_integrations).with(expected_payload, :issue_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :issue_hooks) described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute end @@ -459,8 +475,8 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do end it 'executes confidential issue hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, :confidential_issue_hooks) - expect(project).to receive(:execute_integrations).with(expected_payload, :confidential_issue_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :confidential_issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :confidential_issue_hooks) described_class.new(container: project, current_user: user, params: opts, spam_params: spam_params).execute end @@ -493,7 +509,7 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do end it 'schedules a namespace onboarding create action worker' do - expect(Onboarding::IssueCreatedWorker).to receive(:perform_async).with(project.namespace.id) + expect(Onboarding::IssueCreatedWorker).to receive(:perform_async).with(project.project_namespace_id) issue end @@ -565,36 +581,6 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do end context 'Quick actions' do - context 'as work item' do - let(:opts) do - { - title: "My work item", - work_item_type: work_item_type, - description: "/shrug" - } - end - - context 'when work item type is not the default Issue' do - let(:work_item_type) { create(:work_item_type, namespace: project.namespace) } - - it 'saves the work item without applying the quick action' do - expect(result).to be_success - expect(issue).to be_persisted - expect(issue.description).to eq("/shrug") - end - end - - context 'when work item type is the default Issue' do - let(:work_item_type) { WorkItems::Type.default_by_type(:issue) } - - it 'saves the work item and applies the quick action' do - expect(result).to be_success - expect(issue).to be_persisted - expect(issue.description).to eq(" ¯\\_(ツ)_/¯") - end - end - end - context 'with assignee, milestone, and contact in params and command' do let_it_be(:contact) { create(:contact, group: group) } @@ -687,6 +673,23 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do expect(issue.labels).to eq([label]) end end + + context 'when using promote_to_incident' do + let(:opts) { { title: 'Title', description: '/promote_to_incident' } } + + before do + project.add_developer(user) + end + + it 'creates an issue with the correct issue type' do + expect { result }.to change(Issue, :count).by(1) + + created_issue = Issue.last + + expect(created_issue.issue_type).to eq('incident') + expect(created_issue.work_item_type).to eq(WorkItems::Type.default_by_type('incident')) + end + end end context 'resolving discussions' do @@ -855,5 +858,49 @@ RSpec.describe Issues::CreateService, feature_category: :team_planning do subject.execute end end + + describe 'setting issue type' do + using RSpec::Parameterized::TableSyntax + + let_it_be(:guest) { user.tap { |u| project.add_guest(u) } } + let_it_be(:reporter) { assignee.tap { |u| project.add_reporter(u) } } + + context 'with a corresponding WorkItems::Type' do + let_it_be(:type_issue_id) { WorkItems::Type.default_issue_type.id } + let_it_be(:type_incident_id) { WorkItems::Type.default_by_type(:incident).id } + + where(:issue_type, :current_user, :work_item_type_id, :resulting_issue_type) do + nil | ref(:guest) | ref(:type_issue_id) | 'issue' + 'issue' | ref(:guest) | ref(:type_issue_id) | 'issue' + 'incident' | ref(:guest) | ref(:type_issue_id) | 'issue' + 'incident' | ref(:reporter) | ref(:type_incident_id) | 'incident' + # update once support for test_case is enabled + 'test_case' | ref(:guest) | ref(:type_issue_id) | 'issue' + # update once support for requirement is enabled + 'requirement' | ref(:guest) | ref(:type_issue_id) | 'issue' + 'invalid' | ref(:guest) | ref(:type_issue_id) | 'issue' + # ensure that we don't set a value which has a permission check but is an invalid issue type + 'project' | ref(:guest) | ref(:type_issue_id) | 'issue' + end + + with_them do + let(:user) { current_user } + let(:params) { { title: 'title', issue_type: issue_type } } + let(:issue) do + described_class.new( + container: project, + current_user: user, + params: params, + spam_params: spam_params + ).execute[:issue] + end + + it 'creates an issue' do + expect(issue.issue_type).to eq(resulting_issue_type) + expect(issue.work_item_type_id).to eq(work_item_type_id) + end + end + end + end end end diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb index 0f89844a2c1..bb1151dfac7 100644 --- a/spec/services/issues/reopen_service_spec.rb +++ b/spec/services/issues/reopen_service_spec.rb @@ -109,8 +109,8 @@ RSpec.describe Issues::ReopenService, feature_category: :team_planning do end it 'executes issue hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, :issue_hooks) - expect(project).to receive(:execute_integrations).with(expected_payload, :issue_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :issue_hooks) execute end @@ -120,8 +120,9 @@ RSpec.describe Issues::ReopenService, feature_category: :team_planning do let(:issue) { create(:issue, :confidential, :closed, project: project) } it 'executes confidential issue hooks' do - expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) + issue_hooks = :confidential_issue_hooks + expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), issue_hooks) execute end diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb index 9c81015e05e..f96fbf54f08 100644 --- a/spec/services/issues/update_service_spec.rb +++ b/spec/services/issues/update_service_spec.rb @@ -259,7 +259,7 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning it 'creates system note about issue type' do update_issue(issue_type: 'incident') - note = find_note('changed issue type to incident') + note = find_note('changed type from issue to incident') expect(note).not_to eq(nil) end @@ -592,8 +592,8 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning end it 'executes confidential issue hooks' do - expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) update_issue(confidential: true) end @@ -1107,19 +1107,37 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning end context 'updating asssignee_id' do + it 'changes assignee' do + expect_next_instance_of(NotificationService::Async) do |service| + expect(service).to receive(:reassigned_issue).with(issue, user, [user3]) + end + + update_issue(assignee_ids: [user2.id]) + + expect(issue.reload.assignees).to eq([user2]) + end + it 'does not update assignee when assignee_id is invalid' do + expect(NotificationService).not_to receive(:new) + update_issue(assignee_ids: [-1]) expect(issue.reload.assignees).to eq([user3]) end it 'unassigns assignee when user id is 0' do + expect_next_instance_of(NotificationService::Async) do |service| + expect(service).to receive(:reassigned_issue).with(issue, user, [user3]) + end + update_issue(assignee_ids: [0]) expect(issue.reload.assignees).to be_empty end it 'does not update assignee_id when user cannot read issue' do + expect(NotificationService).not_to receive(:new) + update_issue(assignee_ids: [create(:user).id]) expect(issue.reload.assignees).to eq([user3]) @@ -1130,6 +1148,8 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning levels.each do |level| it "does not update with unauthorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do + expect(NotificationService).not_to receive(:new) + assignee = create(:user) project.update!(visibility_level: level) feature_visibility_attr = :"#{issue.model_name.plural}_access_level" @@ -1139,6 +1159,39 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning end end end + + it 'tracks the assignment events' do + original_assignee = issue.assignees.first! + + update_issue(assignee_ids: [user2.id]) + update_issue(assignee_ids: []) + update_issue(assignee_ids: [user3.id]) + + expected_events = [ + have_attributes({ + issue_id: issue.id, + user_id: original_assignee.id, + action: 'remove' + }), + have_attributes({ + issue_id: issue.id, + user_id: user2.id, + action: 'add' + }), + have_attributes({ + issue_id: issue.id, + user_id: user2.id, + action: 'remove' + }), + have_attributes({ + issue_id: issue.id, + user_id: user3.id, + action: 'add' + }) + ] + + expect(issue.assignment_events).to match_array(expected_events) + end end context 'updating mentions' do @@ -1164,9 +1217,9 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning end it 'triggers webhooks' do - expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) update_issue(opts) end @@ -1278,9 +1331,9 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning end it 'triggers webhooks' do - expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) - expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) + expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) + expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) update_issue(opts) end @@ -1473,31 +1526,5 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning let(:existing_issue) { create(:issue, project: project) } let(:issuable) { described_class.new(container: project, current_user: user, params: params).execute(existing_issue) } end - - context 'with quick actions' do - context 'as work item' do - let(:opts) { { description: "/shrug" } } - - context 'when work item type is not the default Issue' do - let(:issue) { create(:work_item, :task, description: "") } - - it 'does not apply the quick action' do - expect do - update_issue(opts) - end.to change(issue, :description).to("/shrug") - end - end - - context 'when work item type is the default Issue' do - let(:issue) { create(:work_item, :issue, description: "") } - - it 'does not apply the quick action' do - expect do - update_issue(opts) - end.to change(issue, :description).to(" ¯\\_(ツ)_/¯") - end - end - end - end end end diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb index 48c971297c1..4c13106145e 100644 --- a/spec/services/members/groups/creator_service_spec.rb +++ b/spec/services/members/groups/creator_service_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Members::Groups::CreatorService, feature_category: :subgroups do let_it_be(:source, reload: true) { create(:group, :public) } + let_it_be(:source2, reload: true) { create(:group, :public) } let_it_be(:user) { create(:user) } describe '.access_levels' do @@ -16,6 +17,7 @@ RSpec.describe Members::Groups::CreatorService, feature_category: :subgroups do describe '.add_members' do it_behaves_like 'bulk member creation' do + let_it_be(:source_type) { Group } let_it_be(:member_type) { GroupMember } end end diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb index f09682347ef..7ec7361a285 100644 --- a/spec/services/members/projects/creator_service_spec.rb +++ b/spec/services/members/projects/creator_service_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe Members::Projects::CreatorService, feature_category: :projects do let_it_be(:source, reload: true) { create(:project, :public) } + let_it_be(:source2, reload: true) { create(:project, :public) } let_it_be(:user) { create(:user) } describe '.access_levels' do @@ -16,6 +17,7 @@ RSpec.describe Members::Projects::CreatorService, feature_category: :projects do describe '.add_members' do it_behaves_like 'bulk member creation' do + let_it_be(:source_type) { Project } let_it_be(:member_type) { ProjectMember } end end diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb index f2823b1f0c7..9361ec44e30 100644 --- a/spec/services/merge_requests/after_create_service_spec.rb +++ b/spec/services/merge_requests/after_create_service_spec.rb @@ -143,22 +143,6 @@ RSpec.describe MergeRequests::AfterCreateService, feature_category: :code_review expect { execute_service }.to change { counter.read(:create) }.by(1) end - context 'with a milestone' do - let(:milestone) { create(:milestone, project: merge_request.target_project) } - - before do - merge_request.update!(milestone_id: milestone.id) - end - - it 'deletes the cache key for milestone merge request counter', :use_clean_rails_memory_store_caching do - expect_next_instance_of(Milestones::MergeRequestsCountService, milestone) do |service| - expect(service).to receive(:delete_cache).and_call_original - end - - execute_service - end - end - context 'todos' do it 'does not creates todos' do attributes = { diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb index 7e20af32985..efbd693fc82 100644 --- a/spec/services/merge_requests/create_service_spec.rb +++ b/spec/services/merge_requests/create_service_spec.rb @@ -4,6 +4,7 @@ require 'spec_helper' RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state, feature_category: :code_review_workflow do include ProjectForksHelper + include AfterNextHelpers let(:project) { create(:project, :repository) } let(:user) { create(:user) } @@ -336,6 +337,19 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state, f end end + context 'with a milestone' do + let(:milestone) { create(:milestone, project: project) } + + let(:opts) { { title: 'Awesome merge_request', source_branch: 'feature', target_branch: 'master', milestone_id: milestone.id } } + + it 'deletes the cache key for milestone merge request counter' do + expect_next(Milestones::MergeRequestsCountService, milestone) + .to receive(:delete_cache).and_call_original + + expect(merge_request).to be_persisted + end + end + it_behaves_like 'reviewer_ids filter' do let(:execute) { service.execute } end @@ -431,13 +445,21 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state, f } end - it 'invalidates open merge request counter for assignees when merge request is assigned' do + before do project.add_maintainer(user2) + end + it 'invalidates open merge request counter for assignees when merge request is assigned' do described_class.new(project: project, current_user: user, params: opts).execute expect(user2.assigned_open_merge_requests_count).to eq 1 end + + it 'records the assignee assignment event', :sidekiq_inline do + mr = described_class.new(project: project, current_user: user, params: opts).execute.reload + + expect(mr.assignment_events).to match([have_attributes(user_id: user2.id, action: 'add')]) + end end context "when issuable feature is private" do diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index e20ebf18e7c..000c85fd1f8 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -782,6 +782,27 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re expect(user3.assigned_open_merge_requests_count).to eq 0 expect(user2.assigned_open_merge_requests_count).to eq 1 end + + it 'records the assignment history', :sidekiq_inline do + original_assignee = merge_request.assignees.first! + + update_merge_request(assignee_ids: [user2.id]) + + expected_events = [ + have_attributes({ + merge_request_id: merge_request.id, + user_id: original_assignee.id, + action: 'remove' + }), + have_attributes({ + merge_request_id: merge_request.id, + user_id: user2.id, + action: 'add' + }) + ] + + expect(merge_request.assignment_events).to match_array(expected_events) + end end context 'when the target branch changes' do diff --git a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb index d26b27d7a18..a6fcb6b4842 100644 --- a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb +++ b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Metrics::Dashboard::PodDashboardService, :use_clean_rails_memory_store_caching, - feature_category: :pods do + feature_category: :cell do include MetricsDashboardHelpers let_it_be(:user) { create(:user) } diff --git a/spec/services/metrics/global_metrics_update_service_spec.rb b/spec/services/metrics/global_metrics_update_service_spec.rb new file mode 100644 index 00000000000..38c7f9282d9 --- /dev/null +++ b/spec/services/metrics/global_metrics_update_service_spec.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Metrics::GlobalMetricsUpdateService, :prometheus, feature_category: :metrics do + describe '#execute' do + it 'sets gitlab_maintenance_mode gauge metric' do + metric = subject.maintenance_mode_metric + expect(Gitlab).to receive(:maintenance_mode?).and_return(true) + + expect { subject.execute }.to change { metric.get }.from(0).to(1) + end + end +end diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb index 45a4792426c..50f9e4d9a90 100644 --- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb +++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb @@ -6,19 +6,19 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) } - let_it_be(:candidate) { create(:ml_candidates, user: user, experiment: experiment) } + let_it_be(:candidate) { create(:ml_candidates, user: user, experiment: experiment, project: project) } let(:repository) { described_class.new(project, user) } - describe '#by_iid' do - let(:iid) { candidate.iid } + describe '#by_eid' do + let(:eid) { candidate.eid } - subject { repository.by_iid(iid) } + subject { repository.by_eid(eid) } it { is_expected.to eq(candidate) } context 'when iid does not exist' do - let(:iid) { non_existing_record_iid.to_s } + let(:eid) { non_existing_record_iid.to_s } it { is_expected.to be_nil } end @@ -38,7 +38,7 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository, feature_category: it 'creates the candidate' do expect(subject.start_time).to eq(1234) - expect(subject.iid).not_to be_nil + expect(subject.eid).not_to be_nil expect(subject.end_time).to be_nil expect(subject.name).to eq('some_candidate') end diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb index 05a41ddc6c5..7a31fd6a77d 100644 --- a/spec/services/notes/create_service_spec.rb +++ b/spec/services/notes/create_service_spec.rb @@ -136,7 +136,6 @@ RSpec.describe Notes::CreateService, feature_category: :team_planning do let(:action) { 'create_commit_comment' } let(:label) { 'counts.commit_comment' } let(:namespace) { project.namespace } - let(:feature_flag_name) { :route_hll_to_snowplow_phase4 } end end diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb index b474285e67e..78a17aed707 100644 --- a/spec/services/notes/quick_actions_service_spec.rb +++ b/spec/services/notes/quick_actions_service_spec.rb @@ -182,7 +182,7 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do context 'on an incident' do before do - issue.update!(issue_type: :incident) + issue.update!(issue_type: :incident, work_item_type: WorkItems::Type.default_by_type(:incident)) end it 'leaves the note empty' do @@ -224,7 +224,7 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do context 'on an incident' do before do - issue.update!(issue_type: :incident) + issue.update!(issue_type: :incident, work_item_type: WorkItems::Type.default_by_type(:incident)) end it 'leaves the note empty' do diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb index 4161f93cdac..f63f982708d 100644 --- a/spec/services/notification_service_spec.rb +++ b/spec/services/notification_service_spec.rb @@ -253,6 +253,16 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do it_behaves_like 'participating by assignee notification', check_delivery_jobs_queue: check_delivery_jobs_queue end + shared_examples 'declines the invite' do + specify do + member = source.members.last + + expect do + notification.decline_invite(member) + end.to change { ActionMailer::Base.deliveries.size }.by(1) + end + end + describe '.permitted_actions' do it 'includes public methods' do expect(described_class.permitted_actions).to include(:access_token_created) @@ -518,8 +528,8 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do allow(Notify).to receive(:service_desk_new_note_email) .with(Integer, Integer, String).and_return(mailer) - allow(::Gitlab::IncomingEmail).to receive(:enabled?) { true } - allow(::Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:enabled?) { true } + allow(::Gitlab::Email::IncomingEmail).to receive(:supports_wildcard?) { true } end let(:subject) { NotificationService.new } @@ -3029,7 +3039,7 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do end end - describe '#decline_group_invite' do + describe '#decline_invite' do let(:creator) { create(:user) } let(:group) { create(:group) } let(:member) { create(:user) } @@ -3039,12 +3049,8 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do group.add_developer(member, creator) end - it do - group_member = group.members.last - - expect do - notification.decline_group_invite(group_member) - end.to change { ActionMailer::Base.deliveries.size }.by(1) + it_behaves_like 'declines the invite' do + let(:source) { group } end end @@ -3201,19 +3207,15 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do end end - describe '#decline_project_invite' do + describe '#decline_invite' do let(:member) { create(:user) } before do project.add_developer(member, current_user: project.first_owner) end - it do - project_member = project.members.last - - expect do - notification.decline_project_invite(project_member) - end.to change { ActionMailer::Base.deliveries.size }.by(1) + it_behaves_like 'declines the invite' do + let(:source) { project } end end diff --git a/spec/services/packages/create_event_service_spec.rb b/spec/services/packages/create_event_service_spec.rb index 44ad3f29c58..45c758ec866 100644 --- a/spec/services/packages/create_event_service_spec.rb +++ b/spec/services/packages/create_event_service_spec.rb @@ -15,47 +15,6 @@ RSpec.describe Packages::CreateEventService, feature_category: :package_registry subject { described_class.new(nil, user, params).execute } describe '#execute' do - shared_examples 'db package event creation' do |originator_type, expected_scope| - before do - allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event) - end - - context 'with feature flag disable' do - before do - stub_feature_flags(collect_package_events: false) - end - - it 'does not create an event' do - expect { subject }.not_to change { Packages::Event.count } - end - end - - context 'with feature flag enabled' do - before do - stub_feature_flags(collect_package_events: true) - end - - it 'creates the event' do - expect { subject }.to change { Packages::Event.count }.by(1) - - expect(subject.originator_type).to eq(originator_type) - expect(subject.originator).to eq(user&.id) - expect(subject.event_scope).to eq(expected_scope) - expect(subject.event_type).to eq(event_name) - end - - context 'on a read-only instance' do - before do - allow(Gitlab::Database).to receive(:read_only?).and_return(true) - end - - it 'does not create an event' do - expect { subject }.not_to change { Packages::Event.count } - end - end - end - end - shared_examples 'redis package unique event creation' do |originator_type, expected_scope| it 'tracks the event' do expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(/package/, values: user.id) @@ -75,7 +34,6 @@ RSpec.describe Packages::CreateEventService, feature_category: :package_registry context 'with a user' do let(:user) { create(:user) } - it_behaves_like 'db package event creation', 'user', 'generic' it_behaves_like 'redis package unique event creation', 'user', 'generic' it_behaves_like 'redis package count event creation', 'user', 'generic' end @@ -83,7 +41,6 @@ RSpec.describe Packages::CreateEventService, feature_category: :package_registry context 'with a deploy token' do let(:user) { create(:deploy_token) } - it_behaves_like 'db package event creation', 'deploy_token', 'generic' it_behaves_like 'redis package unique event creation', 'deploy_token', 'generic' it_behaves_like 'redis package count event creation', 'deploy_token', 'generic' end @@ -91,7 +48,6 @@ RSpec.describe Packages::CreateEventService, feature_category: :package_registry context 'with no user' do let(:user) { nil } - it_behaves_like 'db package event creation', 'guest', 'generic' it_behaves_like 'redis package count event creation', 'guest', 'generic' end @@ -101,14 +57,12 @@ RSpec.describe Packages::CreateEventService, feature_category: :package_registry context 'as guest' do let(:user) { nil } - it_behaves_like 'db package event creation', 'guest', 'npm' it_behaves_like 'redis package count event creation', 'guest', 'npm' end context 'with user' do let(:user) { create(:user) } - it_behaves_like 'db package event creation', 'user', 'npm' it_behaves_like 'redis package unique event creation', 'user', 'npm' it_behaves_like 'redis package count event creation', 'user', 'npm' end diff --git a/spec/services/packages/debian/find_or_create_package_service_spec.rb b/spec/services/packages/debian/find_or_create_package_service_spec.rb index 36f96008582..c2ae3d56864 100644 --- a/spec/services/packages/debian/find_or_create_package_service_spec.rb +++ b/spec/services/packages/debian/find_or_create_package_service_spec.rb @@ -4,13 +4,17 @@ require 'spec_helper' RSpec.describe Packages::Debian::FindOrCreatePackageService, feature_category: :package_registry do let_it_be(:distribution) { create(:debian_project_distribution, :with_suite) } + let_it_be(:distribution2) { create(:debian_project_distribution, :with_suite) } + let_it_be(:project) { distribution.project } let_it_be(:user) { create(:user) } let(:service) { described_class.new(project, user, params) } + let(:params2) { params } + let(:service2) { described_class.new(project, user, params2) } let(:package) { subject.payload[:package] } - let(:package2) { service.execute.payload[:package] } + let(:package2) { service2.execute.payload[:package] } shared_examples 'find or create Debian package' do it 'returns the same object' do @@ -55,11 +59,24 @@ RSpec.describe Packages::Debian::FindOrCreatePackageService, feature_category: : it_behaves_like 'find or create Debian package' end + context 'with existing package in another distribution' do + let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: distribution.codename } } + let(:params2) { { name: 'foo', version: '1.0+debian', distribution_name: distribution2.codename } } + + it 'raises ArgumentError' do + expect { subject }.to change { ::Packages::Package.count }.by(1) + + expect { package2 }.to raise_error(ArgumentError, "Debian package #{package.name} #{package.version} exists " \ + "in distribution #{distribution.codename}") + end + end + context 'with non-existing distribution' do let(:params) { { name: 'foo', version: '1.0+debian', distribution_name: 'not-existing' } } it 'raises ActiveRecord::RecordNotFound' do - expect { package }.to raise_error(ActiveRecord::RecordNotFound) + expect { package }.to raise_error(ActiveRecord::RecordNotFound, + /^Couldn't find Packages::Debian::ProjectDistribution/) end end end diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb index d2c05b678ea..dbfcc359f9c 100644 --- a/spec/services/packages/debian/process_changes_service_spec.rb +++ b/spec/services/packages/debian/process_changes_service_spec.rb @@ -1,4 +1,5 @@ # frozen_string_literal: true + require 'spec_helper' RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :package_registry do @@ -55,7 +56,7 @@ RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :packa it_behaves_like 'raises error with missing field', 'Distribution' end - context 'with existing package' do + context 'with existing package in the same distribution' do let_it_be_with_reload(:existing_package) do create(:debian_package, name: 'sample', version: '1.2.3~alpha2', project: distribution.project, published_in: distribution) end @@ -64,10 +65,37 @@ RSpec.describe Packages::Debian::ProcessChangesService, feature_category: :packa expect { subject.execute } .to not_change { Packages::Package.count } .and not_change { Packages::PackageFile.count } - .and change(package_file, :package).to(existing_package) + .and change { package_file.package }.to(existing_package) + end + + context 'and marked as pending_destruction' do + it 'does not re-use the existing package' do + existing_package.pending_destruction! + + expect { subject.execute } + .to change { Packages::Package.count }.by(1) + .and not_change { Packages::PackageFile.count } + end + end + end + + context 'with existing package in another distribution' do + let_it_be_with_reload(:existing_package) do + create(:debian_package, name: 'sample', version: '1.2.3~alpha2', project: distribution.project) + end + + it 'raise ExtractionError' do + expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async) + expect { subject.execute } + .to not_change { Packages::Package.count } + .and not_change { Packages::PackageFile.count } + .and not_change { incoming.package_files.count } + .and raise_error(ArgumentError, + "Debian package #{existing_package.name} #{existing_package.version} exists " \ + "in distribution #{existing_package.debian_distribution.codename}") end - context 'marked as pending_destruction' do + context 'and marked as pending_destruction' do it 'does not re-use the existing package' do existing_package.pending_destruction! diff --git a/spec/services/packages/debian/process_package_file_service_spec.rb b/spec/services/packages/debian/process_package_file_service_spec.rb index 2684b69785a..7782b5fc1a6 100644 --- a/spec/services/packages/debian/process_package_file_service_spec.rb +++ b/spec/services/packages/debian/process_package_file_service_spec.rb @@ -1,4 +1,5 @@ # frozen_string_literal: true + require 'spec_helper' RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :package_registry do @@ -19,14 +20,14 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p expect { subject.execute } .to not_change(Packages::Package, :count) .and not_change(Packages::PackageFile, :count) - .and change(Packages::Debian::Publication, :count).by(1) + .and change { Packages::Debian::Publication.count }.by(1) .and not_change(package.package_files, :count) .and change { package.reload.name }.to('sample') .and change { package.reload.version }.to('1.2.3~alpha2') .and change { package.reload.status }.from('processing').to('default') .and change { package.reload.debian_publication }.from(nil) - .and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type) - .and change(debian_file_metadatum, :component).from(nil).to(component_name) + .and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type) + .and change { debian_file_metadatum.component }.from(nil).to(component_name) end end @@ -67,21 +68,42 @@ RSpec.describe Packages::Debian::ProcessPackageFileService, feature_category: :p expect(::Packages::Debian::GenerateDistributionWorker) .to receive(:perform_async).with(:project, distribution.id) expect { subject.execute } - .to change(Packages::Package, :count).from(2).to(1) - .and change(Packages::PackageFile, :count).from(16).to(9) + .to change { Packages::Package.count }.from(2).to(1) + .and change { Packages::PackageFile.count }.from(16).to(9) .and not_change(Packages::Debian::Publication, :count) - .and change(package.package_files, :count).from(8).to(0) - .and change(package_file, :package).from(package).to(matching_package) + .and change { package.package_files.count }.from(8).to(0) + .and change { package_file.package }.from(package).to(matching_package) .and not_change(matching_package, :name) .and not_change(matching_package, :version) - .and change(debian_file_metadatum, :file_type).from('unknown').to(expected_file_type) - .and change(debian_file_metadatum, :component).from(nil).to(component_name) + .and change { debian_file_metadatum.file_type }.from('unknown').to(expected_file_type) + .and change { debian_file_metadatum.component }.from(nil).to(component_name) expect { package.reload } .to raise_error(ActiveRecord::RecordNotFound) end end + context 'when there is a matching published package in another distribution' do + let!(:matching_package) do + create( + :debian_package, + project: distribution.project, + name: 'sample', + version: '1.2.3~alpha2' + ) + end + + it 'raise ArgumentError', :aggregate_failures do + expect(::Packages::Debian::GenerateDistributionWorker).not_to receive(:perform_async) + expect { subject.execute } + .to not_change(Packages::Package, :count) + .and not_change(Packages::PackageFile, :count) + .and not_change(package.package_files, :count) + .and raise_error(ArgumentError, "Debian package sample 1.2.3~alpha2 exists " \ + "in distribution #{matching_package.debian_distribution.codename}") + end + end + context 'when there is a matching published package pending destruction' do let!(:matching_package) do create( diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb index 70c79dae437..d21b11f8ecb 100644 --- a/spec/services/packages/npm/create_package_service_spec.rb +++ b/spec/services/packages/npm/create_package_service_spec.rb @@ -2,6 +2,8 @@ require 'spec_helper' RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_registry do + include ExclusiveLeaseHelpers + let(:namespace) { create(:namespace) } let(:project) { create(:project, namespace: namespace) } let(:user) { create(:user) } @@ -14,9 +16,11 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r end let(:package_name) { "@#{namespace.path}/my-app" } - let(:version_data) { params.dig('versions', '1.0.1') } + let(:version_data) { params.dig('versions', version) } + let(:lease_key) { "packages:npm:create_package_service:packages:#{project.id}_#{package_name}_#{version}" } + let(:service) { described_class.new(project, user, params) } - subject { described_class.new(project, user, params).execute } + subject { service.execute } shared_examples 'valid package' do it 'creates a package' do @@ -216,5 +220,89 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r it { expect { subject }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Version is invalid') } end end + + it 'obtains a lease to create a new package' do + expect_to_obtain_exclusive_lease(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT) + + subject + end + + context 'with npm_obtain_lease_to_create_package disabled' do + before do + stub_feature_flags(npm_obtain_lease_to_create_package: false) + end + + it 'does not obtain a lease' do + lease = stub_exclusive_lease(lease_key, 'uuid', timeout: described_class::DEFAULT_LEASE_TIMEOUT) + + expect(lease).not_to receive(:try_obtain) + + subject + end + end + + context 'when the lease is already taken' do + before do + stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT) + end + + it { expect(subject[:http_status]).to eq 400 } + it { expect(subject[:message]).to eq 'Could not obtain package lease.' } + end + + context 'when many of the same packages are created at the same time', :delete do + it 'only creates one package' do + expect { create_packages(project, user, params) }.to change { Packages::Package.count }.by(1) + end + end + + context 'when many packages with different versions are created at the same time', :delete do + it 'creates all packages' do + expect { create_packages_with_versions(project, user, params) }.to change { Packages::Package.count }.by(5) + end + end + + def create_packages(project, user, params) + with_threads do + described_class.new(project, user, params).execute + end + end + + def create_packages_with_versions(project, user, params) + with_threads do |i| + # Modify the package's version + modified_params = Gitlab::Json.parse(params.to_json + .gsub(version, "1.0.#{i}")).with_indifferent_access + + described_class.new(project, user, modified_params).execute + end + end + + def with_threads(count: 5, &block) + return unless block + + # create a race condition - structure from https://blog.arkency.com/2015/09/testing-race-conditions/ + wait_for_it = true + + threads = Array.new(count) do |i| + Thread.new do + # A loop to make threads busy until we `join` them + true while wait_for_it + + yield(i) + end + end + + wait_for_it = false + threads.each(&:join) + end + end + + describe '#lease_key' do + subject { service.send(:lease_key) } + + it 'returns an unique key' do + is_expected.to eq lease_key + end end end diff --git a/spec/services/packages/npm/deprecate_package_service_spec.rb b/spec/services/packages/npm/deprecate_package_service_spec.rb new file mode 100644 index 00000000000..a3686e3a8b5 --- /dev/null +++ b/spec/services/packages/npm/deprecate_package_service_spec.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::Npm::DeprecatePackageService, feature_category: :package_registry do + let_it_be(:namespace) { create(:namespace) } + let_it_be(:project) { create(:project, namespace: namespace) } + + let_it_be(:package_name) { "@#{namespace.path}/my-app" } + let_it_be_with_reload(:package_1) do + create(:npm_package, project: project, name: package_name, version: '1.0.1').tap do |package| + create(:npm_metadatum, package: package) + end + end + + let_it_be(:package_2) do + create(:npm_package, project: project, name: package_name, version: '1.0.2').tap do |package| + create(:npm_metadatum, package: package) + end + end + + let(:service) { described_class.new(project, params) } + + subject(:execute) { service.execute } + + describe '#execute' do + context 'when passing deprecatation message' do + let(:params) do + { + 'package_name' => package_name, + 'versions' => { + '1.0.1' => { + 'name' => package_name, + 'deprecated' => 'This version is deprecated' + }, + '1.0.2' => { + 'name' => package_name, + 'deprecated' => 'This version is deprecated' + } + } + } + end + + before do + package_json = package_2.npm_metadatum.package_json + package_2.npm_metadatum.update!(package_json: package_json.merge('deprecated' => 'old deprecation message')) + end + + it 'adds or updates the deprecated field' do + expect { execute } + .to change { package_1.reload.npm_metadatum.package_json['deprecated'] }.to('This version is deprecated') + .and change { package_2.reload.npm_metadatum.package_json['deprecated'] } + .from('old deprecation message').to('This version is deprecated') + end + + it 'executes 5 queries' do + queries = ActiveRecord::QueryRecorder.new do + execute + end + + # 1. each_batch lower bound + # 2. each_batch upper bound + # 3. SELECT packages_packages.id, packages_packages.version FROM packages_packages + # 4. SELECT packages_npm_metadata.* FROM packages_npm_metadata + # 5. UPDATE packages_npm_metadata SET package_json = + expect(queries.count).to eq(5) + end + end + + context 'when passing deprecated as empty string' do + let(:params) do + { + 'package_name' => package_name, + 'versions' => { + '1.0.1' => { + 'name' => package_name, + 'deprecated' => '' + } + } + } + end + + before do + package_json = package_1.npm_metadatum.package_json + package_1.npm_metadatum.update!(package_json: package_json.merge('deprecated' => 'This version is deprecated')) + end + + it 'removes the deprecation warning' do + expect { execute } + .to change { package_1.reload.npm_metadatum.package_json['deprecated'] } + .from('This version is deprecated').to(nil) + end + end + + context 'when passing async: true to execute' do + let(:params) do + { + package_name: package_name, + versions: { + '1.0.1': { + deprecated: 'This version is deprecated' + } + } + } + end + + it 'calls the worker and return' do + expect(::Packages::Npm::DeprecatePackageWorker).to receive(:perform_async).with(project.id, params) + expect(service).not_to receive(:packages) + + service.execute(async: true) + end + end + end +end diff --git a/spec/services/packages/npm/generate_metadata_service_spec.rb b/spec/services/packages/npm/generate_metadata_service_spec.rb new file mode 100644 index 00000000000..c22a9ef1428 --- /dev/null +++ b/spec/services/packages/npm/generate_metadata_service_spec.rb @@ -0,0 +1,173 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Packages::Npm::GenerateMetadataService, feature_category: :package_registry do + using RSpec::Parameterized::TableSyntax + + let_it_be(:project) { create(:project) } + let_it_be(:package_name) { "@#{project.root_namespace.path}/test" } + let_it_be(:package1) { create(:npm_package, version: '2.0.4', project: project, name: package_name) } + let_it_be(:package2) { create(:npm_package, version: '2.0.6', project: project, name: package_name) } + let_it_be(:latest_package) { create(:npm_package, version: '2.0.11', project: project, name: package_name) } + + let(:packages) { project.packages.npm.with_name(package_name).last_of_each_version } + let(:metadata) { described_class.new(package_name, packages).execute } + + describe '#versions' do + let_it_be(:version_schema) { 'public_api/v4/packages/npm_package_version' } + let_it_be(:package_json) do + { + name: package_name, + version: '2.0.4', + deprecated: 'warning!', + bin: './cli.js', + directories: ['lib'], + engines: { npm: '^7.5.6' }, + _hasShrinkwrap: false, + dist: { + tarball: 'http://localhost/tarball.tgz', + shasum: '1234567890' + }, + custom_field: 'foo_bar' + } + end + + subject { metadata[:versions] } + + where(:has_dependencies, :has_metadatum) do + true | true + false | true + true | false + false | false + end + + with_them do + if params[:has_dependencies] + ::Packages::DependencyLink.dependency_types.each_key do |dependency_type| + let_it_be("package_dependency_link_for_#{dependency_type}") do + create(:packages_dependency_link, package: package1, dependency_type: dependency_type) + end + end + end + + if params[:has_metadatum] + let_it_be(:package_metadatadum) { create(:npm_metadatum, package: package1, package_json: package_json) } + end + + it { is_expected.to be_a(Hash) } + it { expect(subject[package1.version].with_indifferent_access).to match_schema(version_schema) } + it { expect(subject[package2.version].with_indifferent_access).to match_schema(version_schema) } + it { expect(subject[package1.version]['custom_field']).to be_blank } + + context 'for dependencies' do + ::Packages::DependencyLink.dependency_types.each_key do |dependency_type| + if params[:has_dependencies] + it { expect(subject.dig(package1.version, dependency_type.to_s)).to be_any } + else + it { expect(subject.dig(package1.version, dependency_type)).to be nil } + end + + it { expect(subject.dig(package2.version, dependency_type)).to be nil } + end + end + + context 'for metadatum' do + ::Packages::Npm::GenerateMetadataService::PACKAGE_JSON_ALLOWED_FIELDS.each do |metadata_field| + if params[:has_metadatum] + it { expect(subject.dig(package1.version, metadata_field)).not_to be nil } + else + it { expect(subject.dig(package1.version, metadata_field)).to be nil } + end + + it { expect(subject.dig(package2.version, metadata_field)).to be nil } + end + end + + it 'avoids N+1 database queries' do + check_n_plus_one do + create_list(:npm_package, 5, project: project, name: package_name).each do |npm_package| + next unless has_dependencies + + ::Packages::DependencyLink.dependency_types.each_key do |dependency_type| + create(:packages_dependency_link, package: npm_package, dependency_type: dependency_type) + end + end + end + end + end + + context 'with package files pending destruction' do + let_it_be(:package_file_pending_destruction) do + create(:package_file, :pending_destruction, package: package2, file_sha1: 'pending_destruction_sha1') + end + + let(:shasums) { subject.values.map { |v| v.dig(:dist, :shasum) } } + + it 'does not return them' do + expect(shasums).not_to include(package_file_pending_destruction.file_sha1) + end + end + end + + describe '#dist_tags' do + subject { metadata[:dist_tags] } + + context 'for packages without tags' do + it { is_expected.to be_a(Hash) } + it { expect(subject['latest']).to eq(latest_package.version) } + + it 'avoids N+1 database queries' do + check_n_plus_one(only_dist_tags: true) do + create_list(:npm_package, 5, project: project, name: package_name) + end + end + end + + context 'for packages with tags' do + let_it_be(:package_tag1) { create(:packages_tag, package: package1, name: 'release_a') } + let_it_be(:package_tag2) { create(:packages_tag, package: package1, name: 'test_release') } + let_it_be(:package_tag3) { create(:packages_tag, package: package2, name: 'release_b') } + let_it_be(:package_tag4) { create(:packages_tag, package: latest_package, name: 'release_c') } + let_it_be(:package_tag5) { create(:packages_tag, package: latest_package, name: 'latest') } + + it { is_expected.to be_a(Hash) } + it { expect(subject[package_tag1.name]).to eq(package1.version) } + it { expect(subject[package_tag2.name]).to eq(package1.version) } + it { expect(subject[package_tag3.name]).to eq(package2.version) } + it { expect(subject[package_tag4.name]).to eq(latest_package.version) } + it { expect(subject[package_tag5.name]).to eq(latest_package.version) } + + it 'avoids N+1 database queries' do + check_n_plus_one(only_dist_tags: true) do + create_list(:npm_package, 5, project: project, name: package_name).each_with_index do |npm_package, index| + create(:packages_tag, package: npm_package, name: "tag_#{index}") + end + end + end + end + end + + context 'when passing only_dist_tags: true' do + subject { described_class.new(package_name, packages).execute(only_dist_tags: true) } + + it 'returns only dist tags' do + expect(subject.payload.keys).to contain_exactly(:dist_tags) + end + end + + def check_n_plus_one(only_dist_tags: false) + pkgs = project.packages.npm.with_name(package_name).last_of_each_version.preload_files + control = ActiveRecord::QueryRecorder.new do + described_class.new(package_name, pkgs).execute(only_dist_tags: only_dist_tags) + end + + yield + + pkgs = project.packages.npm.with_name(package_name).last_of_each_version.preload_files + + expect do + described_class.new(package_name, pkgs).execute(only_dist_tags: only_dist_tags) + end.not_to exceed_query_limit(control) + end +end diff --git a/spec/services/projects/all_merge_requests_count_service_spec.rb b/spec/services/projects/all_merge_requests_count_service_spec.rb index dc7038611ed..ca10fbc00ad 100644 --- a/spec/services/projects/all_merge_requests_count_service_spec.rb +++ b/spec/services/projects/all_merge_requests_count_service_spec.rb @@ -11,18 +11,9 @@ RSpec.describe Projects::AllMergeRequestsCountService, :use_clean_rails_memory_s describe '#count' do it 'returns the number of all merge requests' do - create(:merge_request, - :opened, - source_project: project, - target_project: project) - create(:merge_request, - :closed, - source_project: project, - target_project: project) - create(:merge_request, - :merged, - source_project: project, - target_project: project) + create(:merge_request, :opened, source_project: project, target_project: project) + create(:merge_request, :closed, source_project: project, target_project: project) + create(:merge_request, :merged, source_project: project, target_project: project) expect(subject.count).to eq(3) end diff --git a/spec/services/projects/blame_service_spec.rb b/spec/services/projects/blame_service_spec.rb deleted file mode 100644 index e3df69b3b7b..00000000000 --- a/spec/services/projects/blame_service_spec.rb +++ /dev/null @@ -1,131 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Projects::BlameService, :aggregate_failures, feature_category: :source_code_management do - subject(:service) { described_class.new(blob, commit, params) } - - let_it_be(:project) { create(:project, :repository) } - let_it_be(:commit) { project.repository.commit } - let_it_be(:blob) { project.repository.blob_at('HEAD', 'README.md') } - - let(:params) { { page: page } } - let(:page) { nil } - - before do - stub_const("#{described_class.name}::PER_PAGE", 2) - end - - describe '#blame' do - subject { service.blame } - - it 'returns a correct Gitlab::Blame object' do - is_expected.to be_kind_of(Gitlab::Blame) - - expect(subject.blob).to eq(blob) - expect(subject.commit).to eq(commit) - expect(subject.range).to eq(1..2) - end - - describe 'Pagination range calculation' do - subject { service.blame.range } - - context 'with page = 1' do - let(:page) { 1 } - - it { is_expected.to eq(1..2) } - end - - context 'with page = 2' do - let(:page) { 2 } - - it { is_expected.to eq(3..4) } - end - - context 'with page = 3 (overlimit)' do - let(:page) { 3 } - - it { is_expected.to eq(1..2) } - end - - context 'with page = 0 (incorrect)' do - let(:page) { 0 } - - it { is_expected.to eq(1..2) } - end - - context 'when user disabled the pagination' do - let(:params) { super().merge(no_pagination: 1) } - - it { is_expected.to be_nil } - end - - context 'when feature flag disabled' do - before do - stub_feature_flags(blame_page_pagination: false) - end - - it { is_expected.to be_nil } - end - end - end - - describe '#pagination' do - subject { service.pagination } - - it 'returns a pagination object' do - is_expected.to be_kind_of(Kaminari::PaginatableArray) - - expect(subject.current_page).to eq(1) - expect(subject.total_pages).to eq(2) - expect(subject.total_count).to eq(4) - end - - context 'when user disabled the pagination' do - let(:params) { super().merge(no_pagination: 1) } - - it { is_expected.to be_nil } - end - - context 'when feature flag disabled' do - before do - stub_feature_flags(blame_page_pagination: false) - end - - it { is_expected.to be_nil } - end - - context 'when per_page is above the global max per page limit' do - before do - stub_const("#{described_class.name}::PER_PAGE", 1000) - allow(blob).to receive_message_chain(:data, :lines, :count) { 500 } - end - - it 'returns a correct pagination object' do - is_expected.to be_kind_of(Kaminari::PaginatableArray) - - expect(subject.current_page).to eq(1) - expect(subject.total_pages).to eq(1) - expect(subject.total_count).to eq(500) - end - end - - describe 'Pagination attributes' do - using RSpec::Parameterized::TableSyntax - - where(:page, :current_page, :total_pages) do - 1 | 1 | 2 - 2 | 2 | 2 - 3 | 1 | 2 # Overlimit - 0 | 1 | 2 # Incorrect - end - - with_them do - it 'returns the correct pagination attributes' do - expect(subject.current_page).to eq(current_page) - expect(subject.total_pages).to eq(total_pages) - end - end - end - end -end diff --git a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb index 416a3ed9782..f662d8bfc0c 100644 --- a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb +++ b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb @@ -49,23 +49,23 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, featur let(:tags_page_size) { 2 } it_behaves_like 'when regex matching everything is specified', - delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]] + delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]] it_behaves_like 'when regex matching everything is specified and latest is not kept', - delete_expectations: [%w[latest A], %w[Ba Bb], %w[C D], %w[E]] + delete_expectations: [%w[latest A], %w[Ba Bb], %w[C D], %w[E]] it_behaves_like 'when delete regex matching specific tags is used' it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex' it_behaves_like 'with allow regex value', - delete_expectations: [%w[A], %w[C D], %w[E]] + delete_expectations: [%w[A], %w[C D], %w[E]] it_behaves_like 'when keeping only N tags', - delete_expectations: [%w[Bb]] + delete_expectations: [%w[Bb]] it_behaves_like 'when not keeping N tags', - delete_expectations: [%w[A], %w[Ba Bb], %w[C]] + delete_expectations: [%w[A], %w[Ba Bb], %w[C]] context 'when removing keeping only 3' do let(:params) do @@ -79,13 +79,13 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, featur end it_behaves_like 'when removing older than 1 day', - delete_expectations: [%w[Ba Bb], %w[C]] + delete_expectations: [%w[Ba Bb], %w[C]] it_behaves_like 'when combining all parameters', - delete_expectations: [%w[Bb], %w[C]] + delete_expectations: [%w[Bb], %w[C]] it_behaves_like 'when running a container_expiration_policy', - delete_expectations: [%w[Bb], %w[C]] + delete_expectations: [%w[Bb], %w[C]] context 'with a timeout' do let(:params) do @@ -113,7 +113,7 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, featur end it_behaves_like 'when regex matching everything is specified', - delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]] + delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]] end end end @@ -122,32 +122,32 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, featur let(:tags_page_size) { 1000 } it_behaves_like 'when regex matching everything is specified', - delete_expectations: [%w[A Ba Bb C D E]] + delete_expectations: [%w[A Ba Bb C D E]] it_behaves_like 'when delete regex matching specific tags is used' it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex' it_behaves_like 'with allow regex value', - delete_expectations: [%w[A C D E]] + delete_expectations: [%w[A C D E]] it_behaves_like 'when keeping only N tags', - delete_expectations: [%w[Ba Bb C]] + delete_expectations: [%w[Ba Bb C]] it_behaves_like 'when not keeping N tags', - delete_expectations: [%w[A Ba Bb C]] + delete_expectations: [%w[A Ba Bb C]] it_behaves_like 'when removing keeping only 3', - delete_expectations: [%w[Ba Bb C]] + delete_expectations: [%w[Ba Bb C]] it_behaves_like 'when removing older than 1 day', - delete_expectations: [%w[Ba Bb C]] + delete_expectations: [%w[Ba Bb C]] it_behaves_like 'when combining all parameters', - delete_expectations: [%w[Ba Bb C]] + delete_expectations: [%w[Ba Bb C]] it_behaves_like 'when running a container_expiration_policy', - delete_expectations: [%w[Ba Bb C]] + delete_expectations: [%w[Ba Bb C]] end context 'with no tags page' do diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb index d9b30428fb5..836e722eb99 100644 --- a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb +++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb @@ -42,112 +42,112 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :c subject { service.execute } it_behaves_like 'when regex matching everything is specified', - delete_expectations: [%w[A Ba Bb C D E]], - service_response_extra: { - before_truncate_size: 6, - after_truncate_size: 6, - before_delete_size: 6, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[A Ba Bb C D E]], + service_response_extra: { + before_truncate_size: 6, + after_truncate_size: 6, + before_delete_size: 6, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when regex matching everything is specified and latest is not kept', - delete_expectations: [%w[A Ba Bb C D E latest]], - service_response_extra: { - before_truncate_size: 7, - after_truncate_size: 7, - before_delete_size: 7, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[A Ba Bb C D E latest]], + service_response_extra: { + before_truncate_size: 7, + after_truncate_size: 7, + before_delete_size: 7, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when delete regex matching specific tags is used', - service_response_extra: { - before_truncate_size: 2, - after_truncate_size: 2, - before_delete_size: 2, - cached_tags_count: 0 - }, - supports_caching: true + service_response_extra: { + before_truncate_size: 2, + after_truncate_size: 2, + before_delete_size: 2, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex', - service_response_extra: { - before_truncate_size: 1, - after_truncate_size: 1, - before_delete_size: 1, - cached_tags_count: 0 - }, - supports_caching: true + service_response_extra: { + before_truncate_size: 1, + after_truncate_size: 1, + before_delete_size: 1, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'with allow regex value', - delete_expectations: [%w[A C D E]], - service_response_extra: { - before_truncate_size: 4, - after_truncate_size: 4, - before_delete_size: 4, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[A C D E]], + service_response_extra: { + before_truncate_size: 4, + after_truncate_size: 4, + before_delete_size: 4, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when keeping only N tags', - delete_expectations: [%w[Bb Ba C]], - service_response_extra: { - before_truncate_size: 4, - after_truncate_size: 4, - before_delete_size: 3, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[Bb Ba C]], + service_response_extra: { + before_truncate_size: 4, + after_truncate_size: 4, + before_delete_size: 3, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when not keeping N tags', - delete_expectations: [%w[A Ba Bb C]], - service_response_extra: { - before_truncate_size: 4, - after_truncate_size: 4, - before_delete_size: 4, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[A Ba Bb C]], + service_response_extra: { + before_truncate_size: 4, + after_truncate_size: 4, + before_delete_size: 4, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when removing keeping only 3', - delete_expectations: [%w[Bb Ba C]], - service_response_extra: { - before_truncate_size: 6, - after_truncate_size: 6, - before_delete_size: 3, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[Bb Ba C]], + service_response_extra: { + before_truncate_size: 6, + after_truncate_size: 6, + before_delete_size: 3, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when removing older than 1 day', - delete_expectations: [%w[Ba Bb C]], - service_response_extra: { - before_truncate_size: 6, - after_truncate_size: 6, - before_delete_size: 3, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[Ba Bb C]], + service_response_extra: { + before_truncate_size: 6, + after_truncate_size: 6, + before_delete_size: 3, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when combining all parameters', - delete_expectations: [%w[Bb Ba C]], - service_response_extra: { - before_truncate_size: 6, - after_truncate_size: 6, - before_delete_size: 3, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[Bb Ba C]], + service_response_extra: { + before_truncate_size: 6, + after_truncate_size: 6, + before_delete_size: 3, + cached_tags_count: 0 + }, + supports_caching: true it_behaves_like 'when running a container_expiration_policy', - delete_expectations: [%w[Bb Ba C]], - service_response_extra: { - before_truncate_size: 6, - after_truncate_size: 6, - before_delete_size: 3, - cached_tags_count: 0 - }, - supports_caching: true + delete_expectations: [%w[Bb Ba C]], + service_response_extra: { + before_truncate_size: 6, + after_truncate_size: 6, + before_delete_size: 3, + cached_tags_count: 0 + }, + supports_caching: true context 'when running a container_expiration_policy with caching' do let(:user) { nil } diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb index e435db4efa6..495e2277d43 100644 --- a/spec/services/projects/create_service_spec.rb +++ b/spec/services/projects/create_service_spec.rb @@ -339,9 +339,12 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :projects before do group.add_maintainer(group_maintainer) - create(:group_group_link, shared_group: subgroup_for_projects, - shared_with_group: subgroup_for_access, - group_access: share_max_access_level) + create( + :group_group_link, + shared_group: subgroup_for_projects, + shared_with_group: subgroup_for_access, + group_access: share_max_access_level + ) end context 'membership is higher from group hierarchy' do @@ -956,11 +959,11 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :projects receive(:perform_async).and_call_original ) expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to( - receive(:bulk_perform_in) - .with(1.hour, - array_including([user.id], [other_user.id]), - batch_delay: 30.seconds, batch_size: 100) - .and_call_original + receive(:bulk_perform_in).with( + 1.hour, + array_including([user.id], [other_user.id]), + batch_delay: 30.seconds, batch_size: 100 + ).and_call_original ) project = create_project(user, opts) diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb index 0689a65c2f4..665f930a0a8 100644 --- a/spec/services/projects/destroy_service_spec.rb +++ b/spec/services/projects/destroy_service_spec.rb @@ -207,9 +207,11 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi context 'when project has exports' do let!(:project_with_export) do create(:project, :repository, namespace: user.namespace).tap do |project| - create(:import_export_upload, - project: project, - export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')) + create( + :import_export_upload, + project: project, + export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz') + ) end end @@ -337,8 +339,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi let(:container_repository) { create(:container_repository) } before do - stub_container_registry_tags(repository: project.full_path + '/image', - tags: ['tag']) + stub_container_registry_tags(repository: project.full_path + '/image', tags: ['tag']) project.container_repositories << container_repository end @@ -387,8 +388,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi context 'when there are tags for legacy root repository' do before do - stub_container_registry_tags(repository: project.full_path, - tags: ['tag']) + stub_container_registry_tags(repository: project.full_path, tags: ['tag']) end context 'when image repository tags deletion succeeds' do @@ -414,8 +414,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi context 'when there are no tags for legacy root repository' do before do - stub_container_registry_tags(repository: project.full_path, - tags: []) + stub_container_registry_tags(repository: project.full_path, tags: []) end it 'does not try to destroy the repository' do diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb index 8f42f5c8f87..2aba2303dd1 100644 --- a/spec/services/projects/fork_service_spec.rb +++ b/spec/services/projects/fork_service_spec.rb @@ -22,14 +22,16 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management @from_user = create(:user) @from_namespace = @from_user.namespace avatar = fixture_file_upload("spec/fixtures/dk.png", "image/png") - @from_project = create(:project, - :repository, - creator_id: @from_user.id, - namespace: @from_namespace, - star_count: 107, - avatar: avatar, - description: 'wow such project', - external_authorization_classification_label: 'classification-label') + @from_project = create( + :project, + :repository, + creator_id: @from_user.id, + namespace: @from_namespace, + star_count: 107, + avatar: avatar, + description: 'wow such project', + external_authorization_classification_label: 'classification-label' + ) @to_user = create(:user) @to_namespace = @to_user.namespace @from_project.add_member(@to_user, :developer) @@ -258,11 +260,13 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management before do @group_owner = create(:user) @developer = create(:user) - @project = create(:project, :repository, - creator_id: @group_owner.id, - star_count: 777, - description: 'Wow, such a cool project!', - ci_config_path: 'debian/salsa-ci.yml') + @project = create( + :project, :repository, + creator_id: @group_owner.id, + star_count: 777, + description: 'Wow, such a cool project!', + ci_config_path: 'debian/salsa-ci.yml' + ) @group = create(:group) @group.add_member(@group_owner, GroupMember::OWNER) @group.add_member(@developer, GroupMember::DEVELOPER) @@ -297,9 +301,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management context 'project already exists in group' do it 'fails due to validation, not transaction failure' do - existing_project = create(:project, :repository, - name: @project.name, - namespace: @group) + existing_project = create(:project, :repository, name: @project.name, namespace: @group) to_project = fork_project(@project, @group_owner, @opts) expect(existing_project.persisted?).to be_truthy expect(to_project.errors[:name]).to eq(['has already been taken']) diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb index b62fd0ecb68..4f2f480cf1c 100644 --- a/spec/services/projects/group_links/create_service_spec.rb +++ b/spec/services/projects/group_links/create_service_spec.rb @@ -69,11 +69,11 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category .and_call_original ) expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to( - receive(:bulk_perform_in) - .with(1.hour, - array_including([user.id], [other_user.id]), - batch_delay: 30.seconds, batch_size: 100) - .and_call_original + receive(:bulk_perform_in).with( + 1.hour, + array_including([user.id], [other_user.id]), + batch_delay: 30.seconds, batch_size: 100 + ).and_call_original ) subject.execute @@ -82,8 +82,7 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category context 'when sharing outside the hierarchy is disabled' do let_it_be(:shared_group_parent) do - create(:group, - namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true)) + create(:group, namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true)) end let_it_be(:project, reload: true) { create(:project, group: shared_group_parent) } diff --git a/spec/services/projects/group_links/destroy_service_spec.rb b/spec/services/projects/group_links/destroy_service_spec.rb index e1f915e18bd..76bdd536a0d 100644 --- a/spec/services/projects/group_links/destroy_service_spec.rb +++ b/spec/services/projects/group_links/destroy_service_spec.rb @@ -31,10 +31,11 @@ RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_categor stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false) expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to( - receive(:bulk_perform_in) - .with(1.hour, - [[user.id]], - batch_delay: 30.seconds, batch_size: 100) + receive(:bulk_perform_in).with( + 1.hour, + [[user.id]], + batch_delay: 30.seconds, batch_size: 100 + ) ) subject.execute(group_link) diff --git a/spec/services/projects/group_links/update_service_spec.rb b/spec/services/projects/group_links/update_service_spec.rb index b3336cb91fd..4232412cf54 100644 --- a/spec/services/projects/group_links/update_service_spec.rb +++ b/spec/services/projects/group_links/update_service_spec.rb @@ -45,10 +45,11 @@ RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false) expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to( - receive(:bulk_perform_in) - .with(1.hour, - [[user.id]], - batch_delay: 30.seconds, batch_size: 100) + receive(:bulk_perform_in).with( + 1.hour, + [[user.id]], + batch_delay: 30.seconds, batch_size: 100 + ) ) subject diff --git a/spec/services/projects/hashed_storage/migration_service_spec.rb b/spec/services/projects/hashed_storage/migration_service_spec.rb index 14bfa645be2..89bc55dbaf6 100644 --- a/spec/services/projects/hashed_storage/migration_service_spec.rb +++ b/spec/services/projects/hashed_storage/migration_service_spec.rb @@ -16,9 +16,11 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :pro describe '#execute' do context 'repository migration' do let(:repository_service) do - Projects::HashedStorage::MigrateRepositoryService.new(project: project, - old_disk_path: project.full_path, - logger: logger) + Projects::HashedStorage::MigrateRepositoryService.new( + project: project, + old_disk_path: project.full_path, + logger: logger + ) end it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do @@ -53,9 +55,11 @@ RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :pro let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) } let(:attachments_service) do - Projects::HashedStorage::MigrateAttachmentsService.new(project: project, - old_disk_path: project.full_path, - logger: logger) + Projects::HashedStorage::MigrateAttachmentsService.new( + project: project, + old_disk_path: project.full_path, + logger: logger + ) end it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do diff --git a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb index e8a08d95bba..fb3cc9bdac9 100644 --- a/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb +++ b/spec/services/projects/lfs_pointers/lfs_link_service_spec.rb @@ -56,13 +56,13 @@ RSpec.describe Projects::LfsPointers::LfsLinkService, feature_category: :source_ it 'links in batches' do stub_const("#{described_class}::BATCH_SIZE", 3) - expect(Gitlab::Import::Logger) - .to receive(:info) - .with(class: described_class.name, - project_id: project.id, - project_path: project.full_path, - lfs_objects_linked_count: 7, - iterations: 3) + expect(Gitlab::Import::Logger).to receive(:info).with( + class: described_class.name, + project_id: project.id, + project_path: project.full_path, + lfs_objects_linked_count: 7, + iterations: 3 + ) lfs_objects = create_list(:lfs_object, 7) linked = subject.execute(lfs_objects.pluck(:oid)) diff --git a/spec/services/projects/open_merge_requests_count_service_spec.rb b/spec/services/projects/open_merge_requests_count_service_spec.rb index 74eead39ec4..9d94fff2d20 100644 --- a/spec/services/projects/open_merge_requests_count_service_spec.rb +++ b/spec/services/projects/open_merge_requests_count_service_spec.rb @@ -11,10 +11,7 @@ RSpec.describe Projects::OpenMergeRequestsCountService, :use_clean_rails_memory_ describe '#count' do it 'returns the number of open merge requests' do - create(:merge_request, - :opened, - source_project: project, - target_project: project) + create(:merge_request, :opened, source_project: project, target_project: project) expect(subject.count).to eq(1) end diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb index d747cc4b424..0feac6c3e72 100644 --- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb +++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb @@ -45,10 +45,8 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m end before do - create(:clusters_integrations_prometheus, - cluster: prd_cluster, alert_manager_token: token) - create(:clusters_integrations_prometheus, - cluster: stg_cluster, alert_manager_token: nil) + create(:clusters_integrations_prometheus, cluster: prd_cluster, alert_manager_token: token) + create(:clusters_integrations_prometheus, cluster: stg_cluster, alert_manager_token: nil) end context 'without token' do @@ -78,10 +76,12 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m cluster.update!(enabled: cluster_enabled) unless integration_enabled.nil? - create(:clusters_integrations_prometheus, - cluster: cluster, - enabled: integration_enabled, - alert_manager_token: configured_token) + create( + :clusters_integrations_prometheus, + cluster: cluster, + enabled: integration_enabled, + alert_manager_token: configured_token + ) end end @@ -118,9 +118,11 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :m create(:prometheus_integration, project: project) if alerting_setting - create(:project_alerting_setting, - project: project, - token: configured_token) + create( + :project_alerting_setting, + project: project, + token: configured_token + ) end end diff --git a/spec/services/projects/protect_default_branch_service_spec.rb b/spec/services/projects/protect_default_branch_service_spec.rb index 21743e2a656..a4fdd9983b8 100644 --- a/spec/services/projects/protect_default_branch_service_spec.rb +++ b/spec/services/projects/protect_default_branch_service_spec.rb @@ -247,6 +247,7 @@ RSpec.describe Projects::ProtectDefaultBranchService, feature_category: :source_ context 'when feature flag `group_protected_branches` disabled' do before do stub_feature_flags(group_protected_branches: false) + stub_feature_flags(allow_protected_branches_for_group: false) end it 'return false' do @@ -257,6 +258,7 @@ RSpec.describe Projects::ProtectDefaultBranchService, feature_category: :source_ context 'when feature flag `group_protected_branches` enabled' do before do stub_feature_flags(group_protected_branches: true) + stub_feature_flags(allow_protected_branches_for_group: true) end it 'return true' do diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb index 755ee795ebe..92ed5ef3f0a 100644 --- a/spec/services/projects/transfer_service_spec.rb +++ b/spec/services/projects/transfer_service_spec.rb @@ -667,10 +667,11 @@ RSpec.describe Projects::TransferService, feature_category: :projects do user_ids = [user.id, member_of_old_group.id, member_of_new_group.id].map { |id| [id] } expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to( - receive(:bulk_perform_in) - .with(1.hour, - user_ids, - batch_delay: 30.seconds, batch_size: 100) + receive(:bulk_perform_in).with( + 1.hour, + user_ids, + batch_delay: 30.seconds, batch_size: 100 + ) ) subject diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb index f9fb1f65550..872e38aba1d 100644 --- a/spec/services/projects/unlink_fork_service_spec.rb +++ b/spec/services/projects/unlink_fork_service_spec.rb @@ -116,8 +116,10 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin expect(project.fork_network_member).to be_nil expect(project.fork_network).to be_nil - expect(forked_project.fork_network).to have_attributes(root_project_id: nil, - deleted_root_project_name: project.full_name) + expect(forked_project.fork_network).to have_attributes( + root_project_id: nil, + deleted_root_project_name: project.full_name + ) expect(project.forked_to_members.count).to eq(0) expect(forked_project.forked_to_members.count).to eq(1) expect(fork_of_fork.forked_to_members.count).to eq(0) diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb index 8157dce4ce8..a97369c4b08 100644 --- a/spec/services/projects/update_pages_service_spec.rb +++ b/spec/services/projects/update_pages_service_spec.rb @@ -8,13 +8,16 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do let_it_be(:old_pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } - let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') } + let(:options) { {} } + let(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD', options: options) } let(:invalid_file) { fixture_file_upload('spec/fixtures/dk.png') } let(:file) { fixture_file_upload("spec/fixtures/pages.zip") } + let(:custom_root_file) { fixture_file_upload("spec/fixtures/pages_with_custom_root.zip") } let(:empty_file) { fixture_file_upload("spec/fixtures/pages_empty.zip") } let(:empty_metadata_filename) { "spec/fixtures/pages_empty.zip.meta" } let(:metadata_filename) { "spec/fixtures/pages.zip.meta" } + let(:custom_root_file_metadata) { "spec/fixtures/pages_with_custom_root.zip.meta" } let(:metadata) { fixture_file_upload(metadata_filename) if File.exist?(metadata_filename) } subject { described_class.new(project, build) } @@ -97,6 +100,7 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do expect(deployment.file_sha256).to eq(artifacts_archive.file_sha256) expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id) expect(deployment.ci_build_id).to eq(build.id) + expect(deployment.root_directory).to be_nil end it 'does not fail if pages_metadata is absent' do @@ -116,9 +120,11 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do it 'schedules a destruction of older deployments' do expect(DestroyPagesDeploymentsWorker).to( - receive(:perform_in).with(described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY, - project.id, - instance_of(Integer)) + receive(:perform_in).with( + described_class::OLD_DEPLOYMENTS_DESTRUCTION_DELAY, + project.id, + instance_of(Integer) + ) ) execute @@ -140,7 +146,45 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do it 'returns an error' do expect(execute).not_to eq(:success) - expect(GenericCommitStatus.last.description).to eq("Error: The `public/` folder is missing, or not declared in `.gitlab-ci.yml`.") + expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`") + end + end + + context 'when there is a custom root config' do + let(:file) { custom_root_file } + let(:metadata_filename) { custom_root_file_metadata } + + context 'when the directory specified with `publish` is included in the artifacts' do + let(:options) { { publish: 'foo' } } + + it 'creates pages_deployment and saves it in the metadata' do + expect(execute).to eq(:success) + + deployment = project.pages_deployments.last + expect(deployment.root_directory).to eq(options[:publish]) + end + end + + context 'when the directory specified with `publish` is not included in the artifacts' do + let(:options) { { publish: 'bar' } } + + it 'returns an error' do + expect(execute).not_to eq(:success) + + expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`") + end + end + + context 'when there is a folder named `public`, but `publish` specifies a different one' do + let(:options) { { publish: 'foo' } } + let(:file) { fixture_file_upload("spec/fixtures/pages.zip") } + let(:metadata_filename) { "spec/fixtures/pages.zip.meta" } + + it 'returns an error' do + expect(execute).not_to eq(:success) + + expect(GenericCommitStatus.last.description).to eq("Error: You need to either include a `public/` folder in your artifacts, or specify which one to use for Pages using `publish` in `.gitlab-ci.yml`") + end end end @@ -322,10 +366,14 @@ RSpec.describe Projects::UpdatePagesService, feature_category: :pages do context 'when retrying the job' do let(:stage) { create(:ci_stage, position: 1_000_000, name: 'deploy', pipeline: pipeline) } let!(:older_deploy_job) do - create(:generic_commit_status, :failed, pipeline: pipeline, - ref: build.ref, - ci_stage: stage, - name: 'pages:deploy') + create( + :generic_commit_status, + :failed, + pipeline: pipeline, + ref: build.ref, + ci_stage: stage, + name: 'pages:deploy' + ) end before do diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index bdb2e6e09e5..65b1eb9dfa4 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -326,7 +326,9 @@ RSpec.describe Projects::UpdateService, feature_category: :projects do it 'logs an error and creates a metric when wiki can not be created' do project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED) - expect_any_instance_of(ProjectWiki).to receive(:create_wiki_repository).and_raise(Wiki::CouldNotCreateWikiError) + expect_next_instance_of(ProjectWiki) do |project_wiki| + expect(project_wiki).to receive(:create_wiki_repository).and_raise(Wiki::CouldNotCreateWikiError) + end expect_any_instance_of(described_class).to receive(:log_error).with("Could not create wiki for #{project.full_name}") counter = double(:counter) @@ -516,6 +518,25 @@ RSpec.describe Projects::UpdateService, feature_category: :projects do end end + context 'when updating #runner_registration_enabled' do + it 'updates the attribute' do + expect { update_project(project, user, runner_registration_enabled: false) } + .to change { project.runner_registration_enabled } + .to(false) + end + + context 'when runner registration is disabled for all projects' do + before do + stub_application_setting(valid_runner_registrars: []) + end + + it 'restricts updating the attribute' do + expect { update_project(project, user, runner_registration_enabled: false) } + .not_to change { project.runner_registration_enabled } + end + end + end + context 'when updating runners settings' do let(:settings) do { instance_runners_enabled: true, namespace_traversal_ids: [123] } @@ -621,17 +642,19 @@ RSpec.describe Projects::UpdateService, feature_category: :projects do context 'when updating nested attributes for prometheus integration' do context 'prometheus integration exists' do let(:prometheus_integration_attributes) do - attributes_for(:prometheus_integration, - project: project, - properties: { api_url: "http://new.prometheus.com", manual_configuration: "0" } - ) + attributes_for( + :prometheus_integration, + project: project, + properties: { api_url: "http://new.prometheus.com", manual_configuration: "0" } + ) end let!(:prometheus_integration) do - create(:prometheus_integration, - project: project, - properties: { api_url: "http://old.prometheus.com", manual_configuration: "0" } - ) + create( + :prometheus_integration, + project: project, + properties: { api_url: "http://old.prometheus.com", manual_configuration: "0" } + ) end it 'updates existing record' do @@ -645,10 +668,11 @@ RSpec.describe Projects::UpdateService, feature_category: :projects do context 'prometheus integration does not exist' do context 'valid parameters' do let(:prometheus_integration_attributes) do - attributes_for(:prometheus_integration, - project: project, - properties: { api_url: "http://example.prometheus.com", manual_configuration: "0" } - ) + attributes_for( + :prometheus_integration, + project: project, + properties: { api_url: "http://example.prometheus.com", manual_configuration: "0" } + ) end it 'creates new record' do @@ -661,10 +685,11 @@ RSpec.describe Projects::UpdateService, feature_category: :projects do context 'invalid parameters' do let(:prometheus_integration_attributes) do - attributes_for(:prometheus_integration, - project: project, - properties: { api_url: nil, manual_configuration: "1" } - ) + attributes_for( + :prometheus_integration, + project: project, + properties: { api_url: nil, manual_configuration: "1" } + ) end it 'does not create new record' do diff --git a/spec/services/protected_branches/cache_service_spec.rb b/spec/services/protected_branches/cache_service_spec.rb index 3aa3b56640b..0abf8a673f9 100644 --- a/spec/services/protected_branches/cache_service_spec.rb +++ b/spec/services/protected_branches/cache_service_spec.rb @@ -145,6 +145,7 @@ RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache, featu context 'when feature flag disabled' do before do stub_feature_flags(group_protected_branches: false) + stub_feature_flags(allow_protected_branches_for_group: false) end it_behaves_like 'execute with entity' diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb index 9768ceb12e8..ca5dd912e77 100644 --- a/spec/services/releases/create_service_spec.rb +++ b/spec/services/releases/create_service_spec.rb @@ -55,6 +55,26 @@ RSpec.describe Releases::CreateService, feature_category: :continuous_integratio end end + context 'when project is a catalog resource' do + let(:ref) { 'master' } + let!(:catalog_resource) { create(:catalog_resource, project: project) } + + context 'and it is valid' do + let_it_be(:project) { create(:project, :repository, description: 'our components') } + + it_behaves_like 'a successful release creation' + end + + context 'and it is invalid' do + it 'raises an error and does not update the release' do + result = service.execute + + expect(result[:status]).to eq(:error) + expect(result[:message]).to eq('Project must have a description') + end + end + end + context 'when ref is provided' do let(:ref) { 'master' } let(:tag_name) { 'foobar' } diff --git a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb index ca61afc7914..20537aa3685 100644 --- a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb +++ b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb @@ -11,7 +11,8 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService, feature_ca let_it_be(:events) do [ create(:resource_milestone_event, issue: issue, milestone: milestone, action: :add, created_at: '2020-01-01 04:00'), - create(:resource_milestone_event, issue: issue, milestone: milestone, action: :remove, created_at: '2020-01-02 08:00') + create(:resource_milestone_event, issue: issue, milestone: milestone, action: :remove, created_at: '2020-01-02 08:00'), + create(:resource_milestone_event, issue: issue, milestone: nil, action: :remove, created_at: '2020-01-02 08:00') ] end @@ -22,7 +23,8 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService, feature_ca expect(notes.map(&:note)).to eq( [ "changed milestone to %#{milestone.iid}", - 'removed milestone' + "removed milestone %#{milestone.iid}", + "removed milestone " ]) end diff --git a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb index 719a2cf24e9..7ac2249642a 100644 --- a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb +++ b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Security::CiConfiguration::DependencyScanningCreateService, :snowplow, - feature_category: :dependency_scanning do + feature_category: :software_composition_analysis do subject(:result) { described_class.new(project, user).execute } let(:branch_name) { 'set-dependency-scanning-config-1' } diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb index d78b5429189..ace9847185e 100644 --- a/spec/services/snippets/destroy_service_spec.rb +++ b/spec/services/snippets/destroy_service_spec.rb @@ -144,7 +144,7 @@ RSpec.describe Snippets::DestroyService, feature_category: :source_code_manageme end end - context 'when the repository does not exists' do + context 'when the repository does not exist' do let(:snippet) { create(:personal_snippet, author: user) } it 'does not schedule anything and return success' do diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb index 42993491459..e6845517aa7 100644 --- a/spec/services/spam/spam_verdict_service_spec.rb +++ b/spec/services/spam/spam_verdict_service_spec.rb @@ -318,11 +318,13 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency ::Spam::SpamConstants::CONDITIONAL_ALLOW, ::Spam::SpamConstants::DISALLOW, ::Spam::SpamConstants::BLOCK_USER].each do |verdict_value| - let(:verdict) { verdict_value } - let(:expected) { [verdict_value, attribs] } + context "with verdict_value:#{verdict_value}" do + let(:verdict) { verdict_value } + let(:expected) { [verdict_value, attribs] } - it "returns expected spam constant" do - expect(subject).to eq(expected) + it "returns expected spam constant" do + expect(subject).to eq(expected) + end end end end diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb index 38b6943b12a..1eb11c80264 100644 --- a/spec/services/system_note_service_spec.rb +++ b/spec/services/system_note_service_spec.rb @@ -692,10 +692,10 @@ RSpec.describe SystemNoteService, feature_category: :shared do it 'calls IssuableService' do expect_next_instance_of(::SystemNotes::IssuablesService) do |service| - expect(service).to receive(:change_issue_type) + expect(service).to receive(:change_issue_type).with('issue') end - described_class.change_issue_type(incident, author) + described_class.change_issue_type(incident, author, 'issue') end end diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb index 08a91234174..af660a9b72e 100644 --- a/spec/services/system_notes/issuables_service_spec.rb +++ b/spec/services/system_notes/issuables_service_spec.rb @@ -861,15 +861,29 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning end describe '#change_issue_type' do - let(:noteable) { create(:incident, project: project) } + context 'with issue' do + let_it_be_with_reload(:noteable) { create(:issue, project: project) } - subject { service.change_issue_type } + subject { service.change_issue_type('incident') } - it_behaves_like 'a system note' do - let(:action) { 'issue_type' } + it_behaves_like 'a system note' do + let(:action) { 'issue_type' } + end + + it { expect(subject.note).to eq "changed type from incident to issue" } end - it { expect(subject.note).to eq "changed issue type to incident" } + context 'with work item' do + let_it_be_with_reload(:noteable) { create(:work_item, project: project) } + + subject { service.change_issue_type('task') } + + it_behaves_like 'a system note' do + let(:action) { 'issue_type' } + end + + it { expect(subject.note).to eq "changed type from task to issue" } + end end describe '#hierarchy_changed' do diff --git a/spec/services/tasks_to_be_done/base_service_spec.rb b/spec/services/tasks_to_be_done/base_service_spec.rb index ff4eefdfb3a..3ca9d140197 100644 --- a/spec/services/tasks_to_be_done/base_service_spec.rb +++ b/spec/services/tasks_to_be_done/base_service_spec.rb @@ -33,9 +33,9 @@ RSpec.describe TasksToBeDone::BaseService, feature_category: :team_planning do add_labels: label.title } - expect(Issues::BuildService) + expect(Issues::CreateService) .to receive(:new) - .with(container: project, current_user: current_user, params: params) + .with(container: project, current_user: current_user, params: params, spam_params: nil) .and_call_original expect { service.execute }.to change(Issue, :count).by(1) diff --git a/spec/services/terraform/remote_state_handler_spec.rb b/spec/services/terraform/remote_state_handler_spec.rb index f4f7a8a0985..4590a9ad0e9 100644 --- a/spec/services/terraform/remote_state_handler_spec.rb +++ b/spec/services/terraform/remote_state_handler_spec.rb @@ -85,6 +85,7 @@ RSpec.describe Terraform::RemoteStateHandler, feature_category: :infrastructure_ end expect(record.reload.name).to eq 'new-name' + expect(record.reload.project).to eq project end it 'raises exception if lock has not been acquired before' do diff --git a/spec/services/users/approve_service_spec.rb b/spec/services/users/approve_service_spec.rb index 1b063a9ad1c..09379857c38 100644 --- a/spec/services/users/approve_service_spec.rb +++ b/spec/services/users/approve_service_spec.rb @@ -75,6 +75,24 @@ RSpec.describe Users::ApproveService, feature_category: :user_management do expect { subject }.to have_enqueued_mail(DeviseMailer, :user_admin_approval) end + context 'when the user was created via sign up' do + it 'does not send a password reset email' do + expect { subject }.not_to have_enqueued_mail(Notify, :new_user_email) + end + end + + context 'when the user was created by an admin' do + let(:user) { create(:user, :blocked_pending_approval, created_by_id: current_user.id) } + + it 'sends a password reset email' do + allow(user).to receive(:generate_reset_token).and_return(:reset_token) + + expect(Notify).to receive(:new_user_email).with(user.id, :reset_token).and_call_original + + expect { subject }.to have_enqueued_mail(Notify, :new_user_email) + end + end + context 'email confirmation status' do context 'user is unconfirmed' do let(:user) { create(:user, :blocked_pending_approval, :unconfirmed) } diff --git a/spec/services/users/update_canonical_email_service_spec.rb b/spec/services/users/update_canonical_email_service_spec.rb index 559b759a400..d3c414f6db4 100644 --- a/spec/services/users/update_canonical_email_service_spec.rb +++ b/spec/services/users/update_canonical_email_service_spec.rb @@ -92,23 +92,25 @@ RSpec.describe Users::UpdateCanonicalEmailService, feature_category: :user_profi context 'when the user email is not processable' do [nil, 'nonsense'].each do |invalid_address| - before do - user.email = invalid_address - end + context "with #{invalid_address}" do + before do + user.email = invalid_address + end - specify do - subject.execute + specify do + subject.execute - expect(user.user_canonical_email).to be_nil - end + expect(user.user_canonical_email).to be_nil + end - it 'preserves any existing record' do - user.email = nil - user.user_canonical_email = build(:user_canonical_email, canonical_email: other_email) + it 'preserves any existing record' do + user.email = nil + user.user_canonical_email = build(:user_canonical_email, canonical_email: other_email) - subject.execute + subject.execute - expect(user.user_canonical_email.canonical_email).to eq other_email + expect(user.user_canonical_email.canonical_email).to eq other_email + end end end end diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb index ecd7937f933..46e598c3f11 100644 --- a/spec/services/work_items/create_service_spec.rb +++ b/spec/services/work_items/create_service_spec.rb @@ -5,197 +5,252 @@ require 'spec_helper' RSpec.describe WorkItems::CreateService, feature_category: :team_planning do include AfterNextHelpers - let_it_be_with_reload(:project) { create(:project) } - let_it_be(:parent) { create(:work_item, project: project) } - let_it_be(:guest) { create(:user) } - let_it_be(:reporter) { create(:user) } - let_it_be(:user_with_no_access) { create(:user) } - - let(:widget_params) { {} } - let(:spam_params) { double } - let(:current_user) { guest } - let(:opts) do - { - title: 'Awesome work_item', - description: 'please fix' - } - end - - before_all do - project.add_guest(guest) - project.add_reporter(reporter) - end - - describe '#execute' do - let(:service) do - described_class.new( - container: project, - current_user: current_user, - params: opts, - spam_params: spam_params, - widget_params: widget_params - ) + RSpec.shared_examples 'creates work item in container' do |container_type| + let_it_be_with_reload(:project) { create(:project) } + let_it_be_with_reload(:group) { create(:group) } + + let_it_be(:container) do + case container_type + when :project then project + when :project_namespace then project.project_namespace + when :group then group + end end - subject(:service_result) { service.execute } + let_it_be(:container_args) do + case container_type + when :project, :project_namespace then { project: project } + when :group then { namespace: group } + end + end - before do - stub_spam_services + let_it_be(:parent) { create(:work_item, **container_args) } + let_it_be(:guest) { create(:user) } + let_it_be(:reporter) { create(:user) } + let_it_be(:user_with_no_access) { create(:user) } + + let(:widget_params) { {} } + let(:spam_params) { double } + let(:current_user) { guest } + let(:opts) do + { + title: 'Awesome work_item', + description: 'please fix' + } end - context 'when user is not allowed to create a work item in the project' do - let(:current_user) { user_with_no_access } + before_all do + memberships_container = container.is_a?(Namespaces::ProjectNamespace) ? container.reload.project : container + memberships_container.add_guest(guest) + memberships_container.add_reporter(reporter) + end - it { is_expected.to be_error } + describe '#execute' do + shared_examples 'fails creating work item and returns errors' do + it 'does not create new work item if parent can not be set' do + expect { service_result }.not_to change(WorkItem, :count) - it 'returns an access error' do - expect(service_result.errors).to contain_exactly('Operation not allowed') + expect(service_result[:status]).to be(:error) + expect(service_result[:message]).to match(error_message) + end end - end - context 'when params are valid' do - it 'created instance is a WorkItem' do - expect(Issuable::CommonSystemNotesService).to receive_message_chain(:new, :execute) + let(:service) do + described_class.new( + container: container, + current_user: current_user, + params: opts, + spam_params: spam_params, + widget_params: widget_params + ) + end - work_item = service_result[:work_item] + subject(:service_result) { service.execute } - expect(work_item).to be_persisted - expect(work_item).to be_a(::WorkItem) - expect(work_item.title).to eq('Awesome work_item') - expect(work_item.description).to eq('please fix') - expect(work_item.work_item_type.base_type).to eq('issue') + before do + stub_spam_services end - it 'calls NewIssueWorker with correct arguments' do - expect(NewIssueWorker).to receive(:perform_async).with(Integer, current_user.id, 'WorkItem') + context 'when user is not allowed to create a work item in the container' do + let(:current_user) { user_with_no_access } + + it { is_expected.to be_error } - service_result + it 'returns an access error' do + expect(service_result.errors).to contain_exactly('Operation not allowed') + end end - end - context 'when params are invalid' do - let(:opts) { { title: '' } } + context 'when applying quick actions' do + let(:work_item) { service_result[:work_item] } + let(:opts) do + { + title: 'My work item', + work_item_type: work_item_type, + description: '/shrug' + } + end - it { is_expected.to be_error } + context 'when work item type is not the default Issue' do + let(:work_item_type) { create(:work_item_type, :task, namespace: group) } - it 'returns validation errors' do - expect(service_result.errors).to contain_exactly("Title can't be blank") - end + it 'saves the work item without applying the quick action' do + expect(service_result).to be_success + expect(work_item).to be_persisted + expect(work_item.description).to eq('/shrug') + end + end - it 'does not execute after-create transaction widgets' do - expect(service).to receive(:create).and_call_original - expect(service).not_to receive(:execute_widgets) - .with(callback: :after_create_in_transaction, widget_params: widget_params) + context 'when work item type is the default Issue' do + let(:work_item_type) { WorkItems::Type.default_by_type(:issue) } - service_result + it 'saves the work item and applies the quick action' do + expect(service_result).to be_success + expect(work_item).to be_persisted + expect(work_item.description).to eq(' ¯\_(ツ)_/¯') + end + end end - end - context 'checking spam' do - it 'executes SpamActionService' do - expect_next_instance_of( - Spam::SpamActionService, - { - spammable: kind_of(WorkItem), - spam_params: spam_params, - user: an_instance_of(User), - action: :create - } - ) do |instance| - expect(instance).to receive(:execute) + context 'when params are valid' do + it 'created instance is a WorkItem' do + expect(Issuable::CommonSystemNotesService).to receive_message_chain(:new, :execute) + + work_item = service_result[:work_item] + + expect(work_item).to be_persisted + expect(work_item).to be_a(::WorkItem) + expect(work_item.title).to eq('Awesome work_item') + expect(work_item.description).to eq('please fix') + expect(work_item.work_item_type.base_type).to eq('issue') end - service_result - end - end + it 'calls NewIssueWorker with correct arguments' do + expect(NewIssueWorker).to receive(:perform_async).with(Integer, current_user.id, 'WorkItem') - it_behaves_like 'work item widgetable service' do - let(:widget_params) do - { - hierarchy_widget: { parent: parent } - } + service_result + end end - let(:service) do - described_class.new( - container: project, - current_user: current_user, - params: opts, - spam_params: spam_params, - widget_params: widget_params - ) + context 'when params are invalid' do + let(:opts) { { title: '' } } + + it { is_expected.to be_error } + + it 'returns validation errors' do + expect(service_result.errors).to contain_exactly("Title can't be blank") + end + + it 'does not execute after-create transaction widgets' do + expect(service).to receive(:create).and_call_original + expect(service).not_to receive(:execute_widgets) + .with(callback: :after_create_in_transaction, widget_params: widget_params) + + service_result + end end - let(:service_execute) { service.execute } + context 'checking spam' do + it 'executes SpamActionService' do + expect_next_instance_of( + Spam::SpamActionService, + { + spammable: kind_of(WorkItem), + spam_params: spam_params, + user: an_instance_of(User), + action: :create + } + ) do |instance| + expect(instance).to receive(:execute) + end + + service_result + end + end - let(:supported_widgets) do - [ + it_behaves_like 'work item widgetable service' do + let(:widget_params) do { - klass: WorkItems::Widgets::HierarchyService::CreateService, - callback: :after_create_in_transaction, - params: { parent: parent } + hierarchy_widget: { parent: parent } } - ] - end - end + end - describe 'hierarchy widget' do - let(:widget_params) { { hierarchy_widget: { parent: parent } } } + let(:service) do + described_class.new( + container: container, + current_user: current_user, + params: opts, + spam_params: spam_params, + widget_params: widget_params + ) + end - shared_examples 'fails creating work item and returns errors' do - it 'does not create new work item if parent can not be set' do - expect { service_result }.not_to change(WorkItem, :count) + let(:service_execute) { service.execute } - expect(service_result[:status]).to be(:error) - expect(service_result[:message]).to match(error_message) + let(:supported_widgets) do + [ + { + klass: WorkItems::Widgets::HierarchyService::CreateService, + callback: :after_create_in_transaction, + params: { parent: parent } + } + ] end end - context 'when user can admin parent link' do - let(:current_user) { reporter } + describe 'hierarchy widget' do + let(:widget_params) { { hierarchy_widget: { parent: parent } } } - context 'when parent is valid work item' do - let(:opts) do - { - title: 'Awesome work_item', - description: 'please fix', - work_item_type: WorkItems::Type.default_by_type(:task) - } - end + context 'when user can admin parent link' do + let(:current_user) { reporter } - it 'creates new work item and sets parent reference' do - expect { service_result }.to change( - WorkItem, :count).by(1).and(change( - WorkItems::ParentLink, :count).by(1)) + context 'when parent is valid work item' do + let(:opts) do + { + title: 'Awesome work_item', + description: 'please fix', + work_item_type: WorkItems::Type.default_by_type(:task) + } + end - expect(service_result[:status]).to be(:success) + it 'creates new work item and sets parent reference' do + expect { service_result }.to change(WorkItem, :count).by(1).and( + change(WorkItems::ParentLink, :count).by(1) + ) + + expect(service_result[:status]).to be(:success) + end end - end - context 'when parent type is invalid' do - let_it_be(:parent) { create(:work_item, :task, project: project) } + context 'when parent type is invalid' do + let_it_be(:parent) { create(:work_item, :task, **container_args) } - it_behaves_like 'fails creating work item and returns errors' do - let(:error_message) { 'is not allowed to add this type of parent' } + it_behaves_like 'fails creating work item and returns errors' do + let(:error_message) { 'is not allowed to add this type of parent' } + end end end - end - context 'when user cannot admin parent link' do - let(:current_user) { guest } + context 'when user cannot admin parent link' do + let(:current_user) { guest } - let(:opts) do - { - title: 'Awesome work_item', - description: 'please fix', - work_item_type: WorkItems::Type.default_by_type(:task) - } - end + let(:opts) do + { + title: 'Awesome work_item', + description: 'please fix', + work_item_type: WorkItems::Type.default_by_type(:task) + } + end - it_behaves_like 'fails creating work item and returns errors' do - let(:error_message) { 'No matching work item found. Make sure that you are adding a valid work item ID.' } + it_behaves_like 'fails creating work item and returns errors' do + let(:error_message) { 'No matching work item found. Make sure that you are adding a valid work item ID.' } + end end end end end + + it_behaves_like 'creates work item in container', :project + it_behaves_like 'creates work item in container', :project_namespace + it_behaves_like 'creates work item in container', :group end diff --git a/spec/services/work_items/export_csv_service_spec.rb b/spec/services/work_items/export_csv_service_spec.rb index 7c22312ce1f..948ff89245e 100644 --- a/spec/services/work_items/export_csv_service_spec.rb +++ b/spec/services/work_items/export_csv_service_spec.rb @@ -6,7 +6,7 @@ RSpec.describe WorkItems::ExportCsvService, :with_license, feature_category: :te let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, :public, group: group) } - let_it_be(:work_item_1) { create(:work_item, project: project) } + let_it_be(:work_item_1) { create(:work_item, description: 'test', project: project) } let_it_be(:work_item_2) { create(:work_item, :incident, project: project) } subject { described_class.new(WorkItem.all, project) } @@ -64,6 +64,11 @@ RSpec.describe WorkItems::ExportCsvService, :with_license, feature_category: :te expect(csv[0]['Created At (UTC)']).to eq(work_item_1.created_at.to_s(:csv)) end + specify 'description' do + expect(csv[0]['Description']).to be_present + expect(csv[0]['Description']).to eq(work_item_1.description) + end + it 'preloads fields to avoid N+1 queries' do control = ActiveRecord::QueryRecorder.new { subject.csv_data } @@ -73,4 +78,20 @@ RSpec.describe WorkItems::ExportCsvService, :with_license, feature_category: :te end it_behaves_like 'a service that returns invalid fields from selection' + + # TODO - once we have a UI for this feature + # we can turn these into feature specs. + # more info at: https://gitlab.com/gitlab-org/gitlab/-/issues/396943 + context 'when importing an exported file' do + context 'for work item of type issue' do + it_behaves_like 'a exported file that can be imported' do + let_it_be(:user) { create(:user) } + let_it_be(:origin_project) { create(:project) } + let_it_be(:target_project) { create(:project) } + let_it_be(:work_item) { create(:work_item, project: origin_project) } + + let(:expected_matching_fields) { %w[title work_item_type] } + end + end + end end diff --git a/spec/services/work_items/parent_links/base_service_spec.rb b/spec/services/work_items/parent_links/base_service_spec.rb new file mode 100644 index 00000000000..dbdbc774d3c --- /dev/null +++ b/spec/services/work_items/parent_links/base_service_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +module WorkItems + class ParentLinksService < WorkItems::ParentLinks::BaseService; end +end + +RSpec.describe WorkItems::ParentLinks::BaseService, feature_category: :portfolio_management do + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:work_item) { create(:work_item, :objective, project: project) } + let_it_be(:target_work_item) { create(:work_item, :objective, project: project) } + + let(:params) { { target_issuable: target_work_item } } + let(:described_class_descendant) { WorkItems::ParentLinksService } + + before do + project.add_reporter(user) + end + + describe '#execute' do + subject { described_class_descendant.new(work_item, user, params).execute } + + context 'when user has sufficient permissions' do + it 'raises NotImplementedError' do + expect { subject }.to raise_error(NotImplementedError) + end + end + end +end diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb index a989ecf9c07..41ae6398614 100644 --- a/spec/services/work_items/parent_links/create_service_spec.rb +++ b/spec/services/work_items/parent_links/create_service_spec.rb @@ -9,8 +9,8 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol let_it_be(:project) { create(:project) } let_it_be(:work_item) { create(:work_item, project: project) } let_it_be(:task) { create(:work_item, :task, project: project) } - let_it_be(:task1) { create(:work_item, :task, project: project) } - let_it_be(:task2) { create(:work_item, :task, project: project) } + let_it_be_with_reload(:task1) { create(:work_item, :task, project: project) } + let_it_be_with_reload(:task2) { create(:work_item, :task, project: project) } let_it_be(:guest_task) { create(:work_item, :task) } let_it_be(:invalid_task) { build_stubbed(:work_item, :task, id: non_existing_record_id) } let_it_be(:another_project) { (create :project) } @@ -118,26 +118,74 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol expect(subject[:created_references].map(&:work_item_id)).to match_array([task1.id, task2.id]) end - it 'creates notes', :aggregate_failures do - subject + it 'creates notes and records the events', :aggregate_failures do + expect { subject }.to change(WorkItems::ResourceLinkEvent, :count).by(2) work_item_notes = work_item.notes.last(2) + resource_link_events = WorkItems::ResourceLinkEvent.last(2) expect(work_item_notes.first.note).to eq("added #{task1.to_reference} as child task") expect(work_item_notes.last.note).to eq("added #{task2.to_reference} as child task") expect(task1.notes.last.note).to eq("added #{work_item.to_reference} as parent issue") expect(task2.notes.last.note).to eq("added #{work_item.to_reference} as parent issue") + expect(resource_link_events.first).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task1.id, + action: "add", + system_note_metadata_id: task1.notes.last.system_note_metadata.id + ) + expect(resource_link_events.last).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task2.id, + action: "add", + system_note_metadata_id: task2.notes.last.system_note_metadata.id + ) + end + + context 'when note creation fails for some reason' do + let(:params) { { issuable_references: [task1] } } + + [Note.new, nil].each do |relate_child_note| + it 'still records the link event', :aggregate_failures do + allow_next_instance_of(WorkItems::ParentLinks::CreateService) do |instance| + allow(instance).to receive(:create_notes).and_return(relate_child_note) + end + + expect { subject } + .to change(WorkItems::ResourceLinkEvent, :count).by(1) + .and not_change(Note, :count) + + expect(WorkItems::ResourceLinkEvent.last).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task1.id, + action: "add", + system_note_metadata_id: nil + ) + end + end end context 'when task is already assigned' do let(:params) { { issuable_references: [task, task2] } } it 'creates links only for non related tasks', :aggregate_failures do - expect { subject }.to change(parent_link_class, :count).by(1) + expect { subject } + .to change(parent_link_class, :count).by(1) + .and change(WorkItems::ResourceLinkEvent, :count).by(1) expect(subject[:created_references].map(&:work_item_id)).to match_array([task2.id]) expect(work_item.notes.last.note).to eq("added #{task2.to_reference} as child task") expect(task2.notes.last.note).to eq("added #{work_item.to_reference} as parent issue") expect(task.notes).to be_empty + expect(WorkItems::ResourceLinkEvent.last).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task2.id, + action: "add", + system_note_metadata_id: task2.notes.last.system_note_metadata.id + ) end end @@ -194,7 +242,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol end context 'when params include invalid ids' do - let(:params) { { issuable_references: [task1, invalid_task] } } + let(:params) { { issuable_references: [task1, guest_task] } } it 'creates links only for valid IDs' do expect { subject }.to change(parent_link_class, :count).by(1) diff --git a/spec/services/work_items/parent_links/destroy_service_spec.rb b/spec/services/work_items/parent_links/destroy_service_spec.rb index c77546f6ca1..7e2e3949b73 100644 --- a/spec/services/work_items/parent_links/destroy_service_spec.rb +++ b/spec/services/work_items/parent_links/destroy_service_spec.rb @@ -24,23 +24,53 @@ RSpec.describe WorkItems::ParentLinks::DestroyService, feature_category: :team_p let(:user) { reporter } it 'removes relation and creates notes', :aggregate_failures do - expect { subject }.to change(parent_link_class, :count).by(-1) + expect { subject } + .to change(parent_link_class, :count).by(-1) + .and change(WorkItems::ResourceLinkEvent, :count).by(1) expect(work_item.notes.last.note).to eq("removed child task #{task.to_reference}") expect(task.notes.last.note).to eq("removed parent issue #{work_item.to_reference}") + expect(WorkItems::ResourceLinkEvent.last).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task.id, + action: "remove", + system_note_metadata_id: task.notes.last.system_note_metadata.id + ) end it 'returns success message' do is_expected.to eq(message: 'Relation was removed', status: :success) end + + context 'when note creation fails for some reason' do + [Note.new, nil].each do |unrelate_child_note| + it 'still records the link event', :aggregate_failures do + allow(SystemNoteService).to receive(:unrelate_work_item).and_return(unrelate_child_note) + + expect { subject } + .to change(WorkItems::ResourceLinkEvent, :count).by(1) + .and not_change(Note, :count) + + expect(WorkItems::ResourceLinkEvent.last).to have_attributes( + user_id: user.id, + issue_id: work_item.id, + child_work_item_id: task.id, + action: "remove", + system_note_metadata_id: nil + ) + end + end + end end context 'when user has insufficient permissions' do let(:user) { guest } it 'does not remove relation', :aggregate_failures do - expect { subject }.not_to change(parent_link_class, :count).from(1) - + expect { subject } + .to not_change(parent_link_class, :count).from(1) + .and not_change(WorkItems::ResourceLinkEvent, :count) expect(SystemNoteService).not_to receive(:unrelate_work_item) end diff --git a/spec/services/work_items/parent_links/reorder_service_spec.rb b/spec/services/work_items/parent_links/reorder_service_spec.rb new file mode 100644 index 00000000000..0448429d2bb --- /dev/null +++ b/spec/services/work_items/parent_links/reorder_service_spec.rb @@ -0,0 +1,176 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::ParentLinks::ReorderService, feature_category: :portfolio_management do + describe '#execute' do + let_it_be(:reporter) { create(:user) } + let_it_be(:guest) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be_with_reload(:parent) { create(:work_item, :objective, project: project) } + let_it_be_with_reload(:work_item) { create(:work_item, :objective, project: project) } + let_it_be_with_reload(:top_adjacent) { create(:work_item, :objective, project: project) } + let_it_be_with_reload(:last_adjacent) { create(:work_item, :objective, project: project) } + + let(:parent_link_class) { WorkItems::ParentLink } + let(:user) { reporter } + let(:params) { { target_issuable: work_item } } + let(:relative_range) { [top_adjacent, last_adjacent].map(&:parent_link).map(&:relative_position) } + + subject { described_class.new(parent, user, params).execute } + + before do + project.add_reporter(reporter) + project.add_guest(guest) + + create(:parent_link, work_item: top_adjacent, work_item_parent: parent) + create(:parent_link, work_item: last_adjacent, work_item_parent: parent) + end + + shared_examples 'raises a service error' do |message, status = 409| + it { is_expected.to eq(service_error(message, http_status: status)) } + end + + shared_examples 'returns not found error' do + it 'returns error' do + error = "No matching work item found. Make sure that you are adding a valid work item ID." + + is_expected.to eq(service_error(error)) + end + + it 'creates no relationship' do + expect { subject }.not_to change { parent_link_class.count } + end + end + + shared_examples 'returns conflict error' do + it_behaves_like 'raises a service error', 'Work item(s) already assigned' + + it 'creates no relationship' do + expect { subject }.to not_change { parent_link_class.count } + end + end + + shared_examples 'processes ordered hierarchy' do + it 'returns success status and processed links', :aggregate_failures do + expect(subject.keys).to match_array([:status, :created_references]) + expect(subject[:status]).to eq(:success) + expect(subject[:created_references].map(&:work_item_id)).to match_array([work_item.id]) + end + + it 'orders hierarchy' do + subject + + expect(last_adjacent.parent_link.relative_position).to be_between(*relative_range) + end + end + + context 'when user has insufficient permissions' do + let(:user) { guest } + + it_behaves_like 'returns not found error' + + context 'when user is a guest assigned to the work item' do + before do + work_item.assignees = [guest] + end + + it_behaves_like 'returns not found error' + end + end + + context 'when child and parent are already linked' do + before do + create(:parent_link, work_item: work_item, work_item_parent: parent) + end + + it_behaves_like 'returns conflict error' + + context 'when adjacents are already in place and the user has sufficient permissions' do + let(:base_param) { { target_issuable: work_item } } + + shared_examples 'updates hierarchy order without notes' do + it_behaves_like 'processes ordered hierarchy' + + it 'keeps relationships', :aggregate_failures do + expect { subject }.to not_change { parent_link_class.count } + + expect(parent_link_class.where(work_item: work_item).last.work_item_parent).to eq(parent) + end + + it 'does not create notes', :aggregate_failures do + expect { subject }.to not_change { work_item.notes.count }.and(not_change { work_item.notes.count }) + end + end + + context 'when moving before adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: last_adjacent, relative_position: 'BEFORE' }) } + + it_behaves_like 'updates hierarchy order without notes' + end + + context 'when moving after adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: top_adjacent, relative_position: 'AFTER' }) } + + it_behaves_like 'updates hierarchy order without notes' + end + end + end + + context 'when new parent is assigned' do + shared_examples 'updates hierarchy order and creates notes' do + it_behaves_like 'processes ordered hierarchy' + + it 'creates notes', :aggregate_failures do + subject + + expect(parent.notes.last.note).to eq("added #{work_item.to_reference} as child objective") + expect(work_item.notes.last.note).to eq("added #{parent.to_reference} as parent objective") + end + end + + context 'when adjacents are already in place and the user has sufficient permissions' do + let(:base_param) { { target_issuable: work_item } } + + context 'when moving before adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: last_adjacent, relative_position: 'BEFORE' }) } + + it_behaves_like 'updates hierarchy order and creates notes' + end + + context 'when moving after adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: top_adjacent, relative_position: 'AFTER' }) } + + it_behaves_like 'updates hierarchy order and creates notes' + end + + context 'when previous parent was in place' do + before do + create(:parent_link, work_item: work_item, + work_item_parent: create(:work_item, :objective, project: project)) + end + + context 'when moving before adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: last_adjacent, relative_position: 'BEFORE' }) } + + it_behaves_like 'updates hierarchy order and creates notes' + end + + context 'when moving after adjacent work item' do + let(:params) { base_param.merge({ adjacent_work_item: top_adjacent, relative_position: 'AFTER' }) } + + it_behaves_like 'updates hierarchy order and creates notes' + end + end + end + end + end + + def service_error(message, http_status: 404) + { + message: message, + status: :error, + http_status: http_status + } + end +end diff --git a/spec/services/work_items/prepare_import_csv_service_spec.rb b/spec/services/work_items/prepare_import_csv_service_spec.rb new file mode 100644 index 00000000000..6a657120690 --- /dev/null +++ b/spec/services/work_items/prepare_import_csv_service_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::PrepareImportCsvService, feature_category: :team_planning do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + let(:file) { double } + let(:upload_service) { double } + let(:uploader) { double } + let(:upload) { double } + + let(:subject) do + described_class.new(project, user, file: file).execute + end + + context 'when file is uploaded correctly' do + let(:upload_id) { 99 } + + before do + mock_upload + end + + it 'returns a success message' do + result = subject + + expect(result[:status]).to eq(:success) + expect(result[:message]).to eq( + "Your work items are being imported. Once finished, you'll receive a confirmation email.") + end + + it 'enqueues the ImportWorkItemsCsvWorker' do + expect(WorkItems::ImportWorkItemsCsvWorker).to receive(:perform_async).with(user.id, project.id, upload_id) + + subject + end + end + + context 'when file upload fails' do + before do + mock_upload(false) + end + + it 'returns an error message' do + result = subject + + expect(result[:status]).to eq(:error) + expect(result[:message]).to eq('File upload error.') + end + end +end diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb index 5647f8c085c..2cf52ee853a 100644 --- a/spec/services/work_items/update_service_spec.rb +++ b/spec/services/work_items/update_service_spec.rb @@ -44,6 +44,33 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do end end + context 'when applying quick actions' do + let(:opts) { { description: "/shrug" } } + + context 'when work item type is not the default Issue' do + before do + task_type = WorkItems::Type.default_by_type(:task) + work_item.update_columns(issue_type: task_type.base_type, work_item_type_id: task_type.id) + end + + it 'does not apply the quick action' do + expect do + update_work_item + end.to change(work_item, :description).to('/shrug') + end + end + + context 'when work item type is the default Issue' do + let(:issue) { create(:work_item, :issue, description: '') } + + it 'applies the quick action' do + expect do + update_work_item + end.to change(work_item, :description).to(' ¯\_(ツ)_/¯') + end + end + end + context 'when title is changed' do let(:opts) { { title: 'changed' } } diff --git a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb b/spec/services/work_items/widgets/assignees_service/update_service_spec.rb index 67736592876..66e30e2f882 100644 --- a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb +++ b/spec/services/work_items/widgets/assignees_service/update_service_spec.rb @@ -21,10 +21,9 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time end describe '#before_update_in_transaction' do - subject do - described_class.new(widget: widget, current_user: current_user) - .before_update_in_transaction(params: params) - end + let(:service) { described_class.new(widget: widget, current_user: current_user) } + + subject { service.before_update_in_transaction(params: params) } it 'updates the assignees and sets updated_at to the current time' do subject @@ -112,5 +111,20 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time expect(work_item.updated_at).to be_like_time(1.day.ago) end end + + context 'when widget does not exist in new type' do + let(:params) { {} } + + before do + allow(service).to receive(:new_type_excludes_widget?).and_return(true) + work_item.assignee_ids = [new_assignee.id] + end + + it "resets the work item's assignees" do + subject + + expect(work_item.assignee_ids).to be_empty + end + end end end diff --git a/spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb b/spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb new file mode 100644 index 00000000000..186e4d56cc4 --- /dev/null +++ b/spec/services/work_items/widgets/award_emoji_service/update_service_spec.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::Widgets::AwardEmojiService::UpdateService, feature_category: :team_planning do + let_it_be(:reporter) { create(:user) } + let_it_be(:unauthorized_user) { create(:user) } + let_it_be(:project) { create(:project, :private) } + let_it_be(:work_item) { create(:work_item, project: project) } + + let(:current_user) { reporter } + let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::AwardEmoji) } } + + before_all do + project.add_reporter(reporter) + end + + describe '#before_update_in_transaction' do + subject do + described_class.new(widget: widget, current_user: current_user) + .before_update_in_transaction(params: params) + end + + shared_examples 'raises a WidgetError' do + it { expect { subject }.to raise_error(described_class::WidgetError, message) } + end + + context 'when awarding an emoji' do + let(:params) { { action: :add, name: 'star' } } + + context 'when user has no access' do + let(:current_user) { unauthorized_user } + + it 'does not award the emoji' do + expect { subject }.not_to change { AwardEmoji.count } + end + end + + context 'when user has access' do + it 'awards the emoji to the work item' do + expect { subject }.to change { AwardEmoji.count }.by(1) + + emoji = AwardEmoji.last + + expect(emoji.name).to eq('star') + expect(emoji.awardable_id).to eq(work_item.id) + expect(emoji.user).to eq(current_user) + end + + context 'when the name is incorrect' do + let(:params) { { action: :add, name: 'foo' } } + + it_behaves_like 'raises a WidgetError' do + let(:message) { 'Name is not a valid emoji name' } + end + end + + context 'when the action is incorrect' do + let(:params) { { action: :foo, name: 'star' } } + + it_behaves_like 'raises a WidgetError' do + let(:message) { 'foo is not a valid action.' } + end + end + end + end + + context 'when removing emoji' do + let(:params) { { action: :remove, name: 'thumbsup' } } + + context 'when user has no access' do + let(:current_user) { unauthorized_user } + + it 'does not remove the emoji' do + expect { subject }.not_to change { AwardEmoji.count } + end + end + + context 'when user has access' do + it 'removes existing emoji' do + create(:award_emoji, :upvote, awardable: work_item, user: current_user) + + expect { subject }.to change { AwardEmoji.count }.by(-1) + end + + context 'when work item does not have the emoji' do + let(:params) { { action: :remove, name: 'star' } } + + it_behaves_like 'raises a WidgetError' do + let(:message) { 'User has not awarded emoji of type star on the awardable' } + end + end + end + end + end +end diff --git a/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb b/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb new file mode 100644 index 00000000000..85b7e7a70df --- /dev/null +++ b/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb @@ -0,0 +1,106 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, feature_category: :team_planning do + let_it_be(:reporter) { create(:user) } + let_it_be(:project) { create(:project, :private) } + let_it_be(:current_user) { reporter } + let_it_be(:work_item) { create(:work_item, project: project) } + + let_it_be(:pending_todo1) do + create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: current_user) + end + + let_it_be(:pending_todo2) do + create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: current_user) + end + + let_it_be(:done_todo) do + create(:todo, state: :done, target: work_item, target_type: work_item.class.name, user: current_user) + end + + let_it_be(:other_work_item_todo) { create(:todo, state: :pending, target: create(:work_item), user: current_user) } + let_it_be(:other_user_todo) do + create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: create(:user)) + end + + let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::CurrentUserTodos) } } + + before_all do + project.add_reporter(reporter) + end + + describe '#before_update_in_transaction' do + subject do + described_class.new(widget: widget, current_user: current_user) + .before_update_in_transaction(params: params) + end + + context 'when adding a todo' do + let(:params) { { action: "add" } } + + context 'when user has no access' do + let(:current_user) { create(:user) } + + it 'does add a todo' do + expect { subject }.not_to change { Todo.count } + end + end + + context 'when user has access' do + let(:params) { { action: "add" } } + + it 'creates a new todo for the user and the work item' do + expect { subject }.to change { current_user.todos.count }.by(1) + + todo = current_user.todos.last + + expect(todo.target).to eq(work_item) + expect(todo).to be_pending + end + end + end + + context 'when marking as done' do + let(:params) { { action: "mark_as_done" } } + + context 'when user has no access' do + let(:current_user) { create(:user) } + + it 'does not change todo status' do + subject + + expect(pending_todo1.reload).to be_pending + expect(pending_todo2.reload).to be_pending + expect(other_work_item_todo.reload).to be_pending + expect(other_user_todo.reload).to be_pending + end + end + + context 'when resolving all todos of the work item', :aggregate_failures do + it 'resolves todos of the user for the work item' do + subject + + expect(pending_todo1.reload).to be_done + expect(pending_todo2.reload).to be_done + expect(other_work_item_todo.reload).to be_pending + expect(other_user_todo.reload).to be_pending + end + end + + context 'when resolving a specific todo', :aggregate_failures do + let(:params) { { action: "mark_as_done", todo_id: pending_todo1.id } } + + it 'resolves todos of the user for the work item' do + subject + + expect(pending_todo1.reload).to be_done + expect(pending_todo2.reload).to be_pending + expect(other_work_item_todo.reload).to be_pending + expect(other_user_todo.reload).to be_pending + end + end + end + end +end diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/widgets/description_service/update_service_spec.rb index 20b5758dde9..7da5b24a3b7 100644 --- a/spec/services/work_items/widgets/description_service/update_service_spec.rb +++ b/spec/services/work_items/widgets/description_service/update_service_spec.rb @@ -20,7 +20,9 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Description) } } describe '#update' do - subject { described_class.new(widget: widget, current_user: current_user).before_update_callback(params: params) } + let(:service) { described_class.new(widget: widget, current_user: current_user) } + + subject(:before_update_callback) { service.before_update_callback(params: params) } shared_examples 'sets work item description' do it 'correctly sets work item description value' do @@ -78,6 +80,23 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca it_behaves_like 'does not set work item description' end + + context 'when widget does not exist in new type' do + let(:current_user) { author } + let(:params) { {} } + + before do + allow(service).to receive(:new_type_excludes_widget?).and_return(true) + work_item.update!(description: 'test') + end + + it "resets the work item's description" do + expect { before_update_callback } + .to change { work_item.description } + .from('test') + .to(nil) + end + end end context 'when user does not have permission to update description' do diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb index 6285b43311d..229ba81d676 100644 --- a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb +++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb @@ -14,7 +14,13 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } } let(:not_found_error) { 'No matching work item found. Make sure that you are adding a valid work item ID.' } - shared_examples 'raises a WidgetError' do + shared_examples 'raises a WidgetError' do |message| + it { expect { subject }.to raise_error(described_class::WidgetError, message) } + end + + shared_examples 'raises a WidgetError with message' do + let(:message) { not_found_error } + it { expect { subject }.to raise_error(described_class::WidgetError, message) } end @@ -24,16 +30,30 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate context 'when parent and children params are present' do let(:params) { { parent: parent_work_item, children: [child_work_item] } } - it_behaves_like 'raises a WidgetError' do - let(:message) { 'A Work Item can be a parent or a child, but not both.' } - end + it_behaves_like 'raises a WidgetError', 'A Work Item can be a parent or a child, but not both.' end context 'when invalid params are present' do let(:params) { { other_parent: parent_work_item } } - it_behaves_like 'raises a WidgetError' do - let(:message) { 'One or more arguments are invalid: other_parent.' } + it_behaves_like 'raises a WidgetError', 'One or more arguments are invalid: other_parent.' + end + + context 'when relative position params are incomplete' do + context 'when only adjacent_work_item is present' do + let(:params) do + { parent: parent_work_item, adjacent_work_item: child_work_item } + end + + it_behaves_like 'raises a WidgetError', described_class::INVALID_RELATIVE_POSITION_ERROR + end + + context 'when only relative_position is present' do + let(:params) do + { parent: parent_work_item, relative_position: 'AFTER' } + end + + it_behaves_like 'raises a WidgetError', described_class::INVALID_RELATIVE_POSITION_ERROR end end @@ -45,7 +65,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate context 'when user has insufficient permissions to link work items' do let(:params) { { children: [child_work_item4] } } - it_behaves_like 'raises a WidgetError' do + it_behaves_like 'raises a WidgetError with message' do let(:message) { not_found_error } end end @@ -55,7 +75,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate project.add_developer(user) end - context 'with valid params' do + context 'with valid children params' do let(:params) { { children: [child_work_item2, child_work_item3] } } it 'correctly sets work item parent' do @@ -64,14 +84,30 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate expect(work_item.reload.work_item_children) .to contain_exactly(child_work_item, child_work_item2, child_work_item3) end + + context 'when relative_position and adjacent_work_item are given' do + context 'with BEFORE value' do + let(:params) do + { children: [child_work_item3], relative_position: 'BEFORE', adjacent_work_item: child_work_item } + end + + it_behaves_like 'raises a WidgetError', described_class::CHILDREN_REORDERING_ERROR + end + + context 'with AFTER value' do + let(:params) do + { children: [child_work_item2], relative_position: 'AFTER', adjacent_work_item: child_work_item } + end + + it_behaves_like 'raises a WidgetError', described_class::CHILDREN_REORDERING_ERROR + end + end end context 'when child is already assigned' do let(:params) { { children: [child_work_item] } } - it_behaves_like 'raises a WidgetError' do - let(:message) { 'Work item(s) already assigned' } - end + it_behaves_like 'raises a WidgetError', 'Work item(s) already assigned' end context 'when child type is invalid' do @@ -79,10 +115,8 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate let(:params) { { children: [child_issue] } } - it_behaves_like 'raises a WidgetError' do - let(:message) do - "#{child_issue.to_reference} cannot be added: is not allowed to add this type of parent" - end + it_behaves_like 'raises a WidgetError with message' do + let(:message) { "#{child_issue.to_reference} cannot be added: is not allowed to add this type of parent" } end end end @@ -94,7 +128,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate let(:params) { { parent: parent_work_item } } context 'when user has insufficient permissions to link work items' do - it_behaves_like 'raises a WidgetError' do + it_behaves_like 'raises a WidgetError with message' do let(:message) { not_found_error } end end @@ -121,7 +155,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate end.to change(work_item, :work_item_parent).from(parent_work_item).to(nil) end - it 'returns success status if parent not present', :aggregate_failure do + it 'returns success status if parent not present', :aggregate_failures do work_item.update!(work_item_parent: nil) expect(subject[:status]).to eq(:success) @@ -134,10 +168,34 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate let(:params) { { parent: parent_task } } - it_behaves_like 'raises a WidgetError' do - let(:message) do - "#{work_item.to_reference} cannot be added: is not allowed to add this type of parent" + it_behaves_like 'raises a WidgetError with message' do + let(:message) { "#{work_item.to_reference} cannot be added: is not allowed to add this type of parent" } + end + end + + context 'with positioning arguments' do + let_it_be_with_reload(:adjacent) { create(:work_item, :task, project: project) } + + let_it_be_with_reload(:adjacent_link) do + create(:parent_link, work_item: adjacent, work_item_parent: parent_work_item) + end + + let(:params) { { parent: parent_work_item, adjacent_work_item: adjacent, relative_position: 'AFTER' } } + + it 'correctly sets new parent and position' do + expect(subject[:status]).to eq(:success) + expect(work_item.work_item_parent).to eq(parent_work_item) + expect(work_item.parent_link.relative_position).to be > adjacent_link.relative_position + end + + context 'when other hierarchy adjacent is provided' do + let_it_be(:other_hierarchy_adjacent) { create(:parent_link).work_item } + + let(:params) do + { parent: parent_work_item, adjacent_work_item: other_hierarchy_adjacent, relative_position: 'AFTER' } end + + it_behaves_like 'raises a WidgetError', described_class::UNRELATED_ADJACENT_HIERARCHY_ERROR end end end diff --git a/spec/services/work_items/widgets/labels_service/update_service_spec.rb b/spec/services/work_items/widgets/labels_service/update_service_spec.rb new file mode 100644 index 00000000000..17daec2b1ea --- /dev/null +++ b/spec/services/work_items/widgets/labels_service/update_service_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::Widgets::LabelsService::UpdateService, feature_category: :team_planning do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:label1) { create(:label, project: project) } + let_it_be(:label2) { create(:label, project: project) } + let_it_be(:label3) { create(:label, project: project) } + let_it_be(:current_user) { create(:user) } + + let(:work_item) { create(:work_item, project: project, labels: [label1, label2]) } + let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Labels) } } + let(:service) { described_class.new(widget: widget, current_user: current_user) } + + describe '#prepare_update_params' do + context 'when params are set' do + let(:params) { { add_label_ids: [label1.id], remove_label_ids: [label2.id] } } + + it "sets params correctly" do + expect(service.prepare_update_params(params: params)).to include( + { + add_label_ids: match_array([label1.id]), + remove_label_ids: match_array([label2.id]) + } + ) + end + end + + context 'when widget does not exist in new type' do + let(:params) { {} } + + before do + allow(service).to receive(:new_type_excludes_widget?).and_return(true) + end + + it "sets correct params to remove work item labels" do + expect(service.prepare_update_params(params: params)).to include( + { + remove_label_ids: match_array([label1.id, label2.id]), + add_label_ids: [] + } + ) + end + end + end +end diff --git a/spec/services/work_items/widgets/milestone_service/create_service_spec.rb b/spec/services/work_items/widgets/milestone_service/create_service_spec.rb deleted file mode 100644 index 64ab2421c74..00000000000 --- a/spec/services/work_items/widgets/milestone_service/create_service_spec.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe WorkItems::Widgets::MilestoneService::CreateService, feature_category: :portfolio_management do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, :private, group: group) } - let_it_be(:project_milestone) { create(:milestone, project: project) } - let_it_be(:group_milestone) { create(:milestone, group: group) } - let_it_be(:guest) { create(:user) } - - let(:current_user) { guest } - let(:work_item) { build(:work_item, project: project, updated_at: 1.day.ago) } - let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Milestone) } } - let(:service) { described_class.new(widget: widget, current_user: current_user) } - - before do - project.add_guest(guest) - end - - describe '#before_create_callback' do - it_behaves_like "setting work item's milestone" do - subject(:execute_callback) do - service.before_create_callback(params: params) - end - end - end -end diff --git a/spec/services/work_items/widgets/milestone_service/update_service_spec.rb b/spec/services/work_items/widgets/milestone_service/update_service_spec.rb deleted file mode 100644 index c5bc2b12fc5..00000000000 --- a/spec/services/work_items/widgets/milestone_service/update_service_spec.rb +++ /dev/null @@ -1,58 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe WorkItems::Widgets::MilestoneService::UpdateService, feature_category: :portfolio_management do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, :private, group: group) } - let_it_be(:project_milestone) { create(:milestone, project: project) } - let_it_be(:group_milestone) { create(:milestone, group: group) } - let_it_be(:reporter) { create(:user) } - let_it_be(:guest) { create(:user) } - - let(:work_item) { create(:work_item, project: project, updated_at: 1.day.ago) } - let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Milestone) } } - let(:service) { described_class.new(widget: widget, current_user: current_user) } - - before do - project.add_reporter(reporter) - project.add_guest(guest) - end - - describe '#before_update_callback' do - context 'when current user is not allowed to set work item metadata' do - let(:current_user) { guest } - let(:params) { { milestone_id: group_milestone.id } } - - it "does not set the work item's milestone" do - expect { service.before_update_callback(params: params) } - .to not_change(work_item, :milestone) - end - end - - context "when current user is allowed to set work item metadata" do - let(:current_user) { reporter } - - it_behaves_like "setting work item's milestone" do - subject(:execute_callback) do - service.before_update_callback(params: params) - end - end - - context 'when unsetting a milestone' do - let(:params) { { milestone_id: nil } } - - before do - work_item.update!(milestone: project_milestone) - end - - it "sets the work item's milestone" do - expect { service.before_update_callback(params: params) } - .to change(work_item, :milestone) - .from(project_milestone) - .to(nil) - end - end - end - end -end diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb index a46e9ac9f7a..0196e7c2b02 100644 --- a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb +++ b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb @@ -12,10 +12,9 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur describe '#before_update_callback' do let(:start_date) { Date.today } let(:due_date) { 1.week.from_now.to_date } + let(:service) { described_class.new(widget: widget, current_user: user) } - subject(:update_params) do - described_class.new(widget: widget, current_user: user).before_update_callback(params: params) - end + subject(:update_params) { service.before_update_callback(params: params) } context 'when start and due date params are present' do let(:params) { { start_date: Date.today, due_date: 1.week.from_now.to_date } } @@ -58,5 +57,22 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur end end end + + context 'when widget does not exist in new type' do + let(:params) { {} } + + before do + allow(service).to receive(:new_type_excludes_widget?).and_return(true) + work_item.update!(start_date: start_date, due_date: due_date) + end + + it 'sets both dates to null' do + expect do + update_params + end.to change(work_item, :start_date).from(start_date).to(nil).and( + change(work_item, :due_date).from(due_date).to(nil) + ) + end + end end end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 3b50d821b4c..c3bddf1a6ae 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -172,6 +172,7 @@ RSpec.configure do |config| config.include RailsHelpers config.include SidekiqMiddleware config.include StubActionCableConnection, type: :channel + config.include StubMemberAccessLevel config.include StubSpamServices config.include SnowplowHelpers config.include RenderedHelpers @@ -179,6 +180,7 @@ RSpec.configure do |config| config.include DetailedErrorHelpers config.include RequestUrgencyMatcher, type: :controller config.include RequestUrgencyMatcher, type: :request + config.include Capybara::RSpecMatchers, type: :request config.include_context 'when rendered has no HTML escapes', type: :view @@ -356,71 +358,7 @@ RSpec.configure do |config| # The ongoing implementation of Admin Mode for API is behind the :admin_mode_for_api feature flag. # All API specs will be adapted continuously. The following list contains the specs that have not yet been adapted. # The feature flag is disabled for these specs as long as they are not yet adapted. - admin_mode_for_api_feature_flag_paths = %w[ - ./spec/requests/api/broadcast_messages_spec.rb - ./spec/requests/api/deploy_keys_spec.rb - ./spec/requests/api/deploy_tokens_spec.rb - ./spec/requests/api/groups_spec.rb - ./spec/requests/api/keys_spec.rb - ./spec/requests/api/merge_requests_spec.rb - ./spec/requests/api/namespaces_spec.rb - ./spec/requests/api/notes_spec.rb - ./spec/requests/api/personal_access_tokens/self_information_spec.rb - ./spec/requests/api/personal_access_tokens_spec.rb - ./spec/requests/api/project_export_spec.rb - ./spec/requests/api/project_repository_storage_moves_spec.rb - ./spec/requests/api/project_snapshots_spec.rb - ./spec/requests/api/project_snippets_spec.rb - ./spec/requests/api/projects_spec.rb - ./spec/requests/api/releases_spec.rb - ./spec/requests/api/sidekiq_metrics_spec.rb - ./spec/requests/api/snippet_repository_storage_moves_spec.rb - ./spec/requests/api/snippets_spec.rb - ./spec/requests/api/statistics_spec.rb - ./spec/requests/api/system_hooks_spec.rb - ./spec/requests/api/topics_spec.rb - ./spec/requests/api/usage_data_non_sql_metrics_spec.rb - ./spec/requests/api/usage_data_queries_spec.rb - ./spec/requests/api/users_spec.rb - ./spec/requests/api/v3/github_spec.rb - ./spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb - ./spec/support/shared_examples/requests/api/hooks_shared_examples.rb - ./spec/support/shared_examples/requests/api/notes_shared_examples.rb - ./spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb - ./spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb - ./spec/support/shared_examples/requests/api/snippets_shared_examples.rb - ./spec/support/shared_examples/requests/api/status_shared_examples.rb - ./spec/support/shared_examples/requests/clusters/certificate_based_clusters_feature_flag_shared_examples.rb - ./spec/support/shared_examples/requests/snippet_shared_examples.rb - ./ee/spec/requests/api/audit_events_spec.rb - ./ee/spec/requests/api/ci/minutes_spec.rb - ./ee/spec/requests/api/elasticsearch_indexed_namespaces_spec.rb - ./ee/spec/requests/api/epics_spec.rb - ./ee/spec/requests/api/geo_nodes_spec.rb - ./ee/spec/requests/api/geo_replication_spec.rb - ./ee/spec/requests/api/geo_spec.rb - ./ee/spec/requests/api/group_push_rule_spec.rb - ./ee/spec/requests/api/group_repository_storage_moves_spec.rb - ./ee/spec/requests/api/groups_spec.rb - ./ee/spec/requests/api/internal/upcoming_reconciliations_spec.rb - ./ee/spec/requests/api/invitations_spec.rb - ./ee/spec/requests/api/license_spec.rb - ./ee/spec/requests/api/merge_request_approvals_spec.rb - ./ee/spec/requests/api/namespaces_spec.rb - ./ee/spec/requests/api/notes_spec.rb - ./ee/spec/requests/api/project_aliases_spec.rb - ./ee/spec/requests/api/project_approval_rules_spec.rb - ./ee/spec/requests/api/project_approval_settings_spec.rb - ./ee/spec/requests/api/project_approvals_spec.rb - ./ee/spec/requests/api/projects_spec.rb - ./ee/spec/requests/api/settings_spec.rb - ./ee/spec/requests/api/users_spec.rb - ./ee/spec/requests/api/vulnerabilities_spec.rb - ./ee/spec/requests/api/vulnerability_exports_spec.rb - ./ee/spec/requests/api/vulnerability_findings_spec.rb - ./ee/spec/requests/api/vulnerability_issue_links_spec.rb - ./ee/spec/support/shared_examples/requests/api/project_approval_rules_api_shared_examples.rb - ] + admin_mode_for_api_feature_flag_paths = %w[] if example.metadata[:file_path].start_with?(*admin_mode_for_api_feature_flag_paths) stub_feature_flags(admin_mode_for_api: false) diff --git a/spec/support/banzai/filter_timeout_shared_examples.rb b/spec/support/banzai/filter_timeout_shared_examples.rb deleted file mode 100644 index 1f2ebe6fef6..00000000000 --- a/spec/support/banzai/filter_timeout_shared_examples.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -# This shared_example requires the following variables: -# - text: The text to be run through the filter -# -# Usage: -# -# it_behaves_like 'filter timeout' do -# let(:text) { 'some text' } -# end -RSpec.shared_examples 'filter timeout' do - context 'when rendering takes too long' do - let_it_be(:project) { create(:project) } - let_it_be(:context) { { project: project } } - - it 'times out' do - stub_const("Banzai::Filter::TimeoutHtmlPipelineFilter::RENDER_TIMEOUT", 0.1) - allow_next_instance_of(described_class) do |instance| - allow(instance).to receive(:call_with_timeout) do - sleep(0.2) - text - end - end - - expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original - expect(Gitlab::ErrorTracking).to receive(:track_exception).with( - instance_of(Timeout::Error), - project_id: context[:project].id, - class_name: described_class.name.demodulize - ) - - result = filter(text) - - expect(result.to_html).to eq text - end - end -end diff --git a/spec/support/banzai/reference_filter_shared_examples.rb b/spec/support/banzai/reference_filter_shared_examples.rb deleted file mode 100644 index 0046d931e7d..00000000000 --- a/spec/support/banzai/reference_filter_shared_examples.rb +++ /dev/null @@ -1,88 +0,0 @@ -# frozen_string_literal: true - -# Specs for reference links containing HTML. -# -# Requires a reference: -# let(:reference) { '#42' } -RSpec.shared_examples 'a reference containing an element node' do - let(:inner_html) { 'element node inside' } - let(:reference_with_element) { %{#{inner_html}} } - - it 'does not escape inner html' do - doc = reference_filter(reference_with_element) - expect(doc.children.first.inner_html).to eq(inner_html) - end -end - -# Requires a reference, subject and subject_name: -# subject { create(:user) } -# let(:reference) { subject.to_reference } -# let(:subject_name) { 'user' } -RSpec.shared_examples 'user reference or project reference' do - shared_examples 'it contains a data- attribute' do - it 'includes a data- attribute' do - doc = reference_filter("Hey #{reference}") - link = doc.css('a').first - - expect(link).to have_attribute("data-#{subject_name}") - expect(link.attr("data-#{subject_name}")).to eq subject.id.to_s - end - end - - context 'mentioning a resource' do - it_behaves_like 'a reference containing an element node' - it_behaves_like 'it contains a data- attribute' - - it "links to a resource" do - doc = reference_filter("Hey #{reference}") - expect(doc.css('a').first.attr('href')).to eq urls.send("#{subject_name}_url", subject) - end - - it 'links to a resource with a period' do - subject = create(subject_name.to_sym, name: 'alphA.Beta') - - doc = reference_filter("Hey #{get_reference(subject)}") - expect(doc.css('a').length).to eq 1 - end - - it 'links to a resource with an underscore' do - subject = create(subject_name.to_sym, name: 'ping_pong_king') - - doc = reference_filter("Hey #{get_reference(subject)}") - expect(doc.css('a').length).to eq 1 - end - - it 'links to a resource with different case-sensitivity' do - subject = create(subject_name.to_sym, name: 'RescueRanger') - reference = get_reference(subject) - - doc = reference_filter("Hey #{reference.upcase}") - expect(doc.css('a').length).to eq 1 - expect(doc.css('a').text).to eq(reference) - end - end - - it 'supports an :only_path context' do - doc = reference_filter("Hey #{reference}", only_path: true) - link = doc.css('a').first.attr('href') - - expect(link).not_to match %r(https?://) - expect(link).to eq urls.send "#{subject_name}_path", subject - end - - context 'referencing a resource in a link href' do - let(:reference) { %Q{Some text} } - - it_behaves_like 'it contains a data- attribute' - - it 'links to the resource' do - doc = reference_filter("Hey #{reference}") - expect(doc.css('a').first.attr('href')).to eq urls.send "#{subject_name}_url", subject - end - - it 'links with adjacent text' do - doc = reference_filter("Mention me (#{reference}.)") - expect(doc.to_html).to match(%r{\(Some text\.\)}) - end - end -end diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb index fe9bff827dc..f06a8cfe937 100644 --- a/spec/support/capybara.rb +++ b/spec/support/capybara.rb @@ -7,7 +7,7 @@ require 'capybara-screenshot/rspec' require 'selenium-webdriver' # Give CI some extra time -timeout = ENV['CI'] || ENV['CI_SERVER'] ? 30 : 10 +timeout = ENV['CI'] || ENV['CI_SERVER'] ? 45 : 10 # Support running Capybara on a specific port to allow saving commonly used pages Capybara.server_port = ENV['CAPYBARA_PORT'] if ENV['CAPYBARA_PORT'] diff --git a/spec/support/chunked_io/chunked_io_helpers.rb b/spec/support/chunked_io/chunked_io_helpers.rb deleted file mode 100644 index 278f577f3cb..00000000000 --- a/spec/support/chunked_io/chunked_io_helpers.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module ChunkedIOHelpers - def sample_trace_raw - @sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace')) - .force_encoding(Encoding::BINARY) - end - - def stub_buffer_size(size) - stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size) - stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) - end -end diff --git a/spec/support/controllers/project_import_rate_limiter_shared_examples.rb b/spec/support/controllers/project_import_rate_limiter_shared_examples.rb deleted file mode 100644 index 66d753a4010..00000000000 --- a/spec/support/controllers/project_import_rate_limiter_shared_examples.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'project import rate limiter' do - let(:user) { create(:user) } - - before do - sign_in(user) - end - - context 'when limit exceeds' do - before do - allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true) - end - - it 'notifies and redirects user' do - post :create, params: {} - - expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.') - expect(response).to have_gitlab_http_status(:found) - end - end -end diff --git a/spec/support/cycle_analytics_helpers/test_generation.rb b/spec/support/cycle_analytics_helpers/test_generation.rb deleted file mode 100644 index 816caf5f775..00000000000 --- a/spec/support/cycle_analytics_helpers/test_generation.rb +++ /dev/null @@ -1,160 +0,0 @@ -# frozen_string_literal: true - -# rubocop:disable Metrics/AbcSize - -# Note: The ABC size is large here because we have a method generating test cases with -# multiple nested contexts. This shouldn't count as a violation. -module CycleAnalyticsHelpers - module TestGeneration - # Generate the most common set of specs that all value stream analytics phases need to have. - # - # Arguments: - # - # phase: Which phase are we testing? Will call `CycleAnalytics.new.send(phase)` for the final assertion - # data_fn: A function that returns a hash, constituting initial data for the test case - # start_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with - # `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`). - # Each `condition_fn` is expected to implement a case which consitutes the start of the given value stream analytics phase. - # end_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with - # `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`). - # Each `condition_fn` is expected to implement a case which consitutes the end of the given value stream analytics phase. - # before_end_fn: This function is run before calling the end time conditions. Used for setup that needs to be run between the start and end conditions. - # post_fn: Code that needs to be run after running the end time conditions. - - def generate_cycle_analytics_spec(phase:, data_fn:, start_time_conditions:, end_time_conditions:, before_end_fn: nil, post_fn: nil) - combinations_of_start_time_conditions = (1..start_time_conditions.size).flat_map { |size| start_time_conditions.combination(size).to_a } - combinations_of_end_time_conditions = (1..end_time_conditions.size).flat_map { |size| end_time_conditions.combination(size).to_a } - - scenarios = combinations_of_start_time_conditions.product(combinations_of_end_time_conditions) - scenarios.each do |start_time_conditions, end_time_conditions| - let_it_be(:other_project) { create(:project, :repository) } - - before do - other_project.add_developer(self.user) - end - - context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do - context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do - it "finds the median of available durations between the two conditions", :sidekiq_might_not_need_inline do - time_differences = Array.new(5) do |index| - data = data_fn[self] - start_time = (index * 10).days.from_now - end_time = start_time + rand(1..5).days - - start_time_conditions.each do |condition_name, condition_fn| - travel_to(start_time) { condition_fn[self, data] } - end - - # Run `before_end_fn` at the midpoint between `start_time` and `end_time` - travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn - - end_time_conditions.each do |condition_name, condition_fn| - travel_to(end_time) { condition_fn[self, data] } - end - - travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn - - end_time - start_time - end - - median_time_difference = time_differences.sort[2] - expect(subject[phase].project_median).to be_within(5).of(median_time_difference) - end - - context "when the data belongs to another project" do - it "returns nil" do - # Use a stub to "trick" the data/condition functions - # into using another project. This saves us from having to - # define separate data/condition functions for this particular - # test case. - allow(self).to receive(:project) { other_project } - - data = data_fn[self] - start_time = Time.now - end_time = rand(1..10).days.from_now - - start_time_conditions.each do |condition_name, condition_fn| - travel_to(start_time) { condition_fn[self, data] } - end - - end_time_conditions.each do |condition_name, condition_fn| - travel_to(end_time) { condition_fn[self, data] } - end - - travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn - - # Turn off the stub before checking assertions - allow(self).to receive(:project).and_call_original - - expect(subject[phase].project_median).to be_nil - end - end - - context "when the end condition happens before the start condition" do - it 'returns nil' do - data = data_fn[self] - start_time = Time.now - end_time = start_time + rand(1..5).days - - # Run `before_end_fn` at the midpoint between `start_time` and `end_time` - travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn - - end_time_conditions.each do |condition_name, condition_fn| - travel_to(start_time) { condition_fn[self, data] } - end - - start_time_conditions.each do |condition_name, condition_fn| - travel_to(end_time) { condition_fn[self, data] } - end - - travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn - - expect(subject[phase].project_median).to be_nil - end - end - end - end - - context "start condition NOT PRESENT: #{start_time_conditions.map(&:first).to_sentence}" do - context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do - it "returns nil" do - data = data_fn[self] - end_time = rand(1..10).days.from_now - - end_time_conditions.each_with_index do |(_condition_name, condition_fn), index| - travel_to(end_time + index.days) { condition_fn[self, data] } - end - - travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn - - expect(subject[phase].project_median).to be_nil - end - end - end - - context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do - context "end condition NOT PRESENT: #{end_time_conditions.map(&:first).to_sentence}" do - it "returns nil" do - data = data_fn[self] - start_time = Time.now - - start_time_conditions.each do |condition_name, condition_fn| - travel_to(start_time) { condition_fn[self, data] } - end - - post_fn[self, data] if post_fn - - expect(subject[phase].project_median).to be_nil - end - end - end - end - - context "when none of the start / end conditions are matched" do - it "returns nil" do - expect(subject[phase].project_median).to be_nil - end - end - end - end -end diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml index 750295e16c4..25084ece58d 100644 --- a/spec/support/finder_collection_allowlist.yml +++ b/spec/support/finder_collection_allowlist.yml @@ -24,7 +24,7 @@ - Ci::CommitStatusesFinder - Ci::DailyBuildGroupReportResultsFinder - ClusterAncestorsFinder -- Clusters::AgentAuthorizationsFinder +- Clusters::Agents::Authorizations::CiAccess::Finder - Clusters::KubernetesNamespaceFinder - ComplianceManagement::MergeRequests::ComplianceViolationsFinder - ContainerRepositoriesFinder @@ -69,3 +69,4 @@ - UploaderFinder - UserGroupNotificationSettingsFinder - UserGroupsCounter +- DataTransfer::MockedTransferFinder # Can be removed when https://gitlab.com/gitlab-org/gitlab/-/issues/397693 is closed diff --git a/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb b/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb deleted file mode 100644 index cef9860fe25..00000000000 --- a/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'a correct instrumented metric value' do |params| - let(:time_frame) { params[:time_frame] } - let(:options) { params[:options] } - let(:metric) { described_class.new(time_frame: time_frame, options: options) } - - around do |example| - freeze_time { example.run } - end - - before do - if metric.respond_to?(:relation, true) && metric.send(:relation).respond_to?(:connection) - allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false) - end - end - - it 'has correct value' do - expect(metric.value).to eq(expected_value) - end -end - -RSpec.shared_examples 'a correct instrumented metric query' do |params| - let(:time_frame) { params[:time_frame] } - let(:options) { params[:options] } - let(:metric) { described_class.new(time_frame: time_frame, options: options) } - - around do |example| - freeze_time { example.run } - end - - before do - allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false) - end - - it 'has correct generate query' do - expect(metric.to_sql).to eq(expected_query) - end -end - -RSpec.shared_examples 'a correct instrumented metric value and query' do |params| - it_behaves_like 'a correct instrumented metric value', params - it_behaves_like 'a correct instrumented metric query', params -end diff --git a/spec/support/google_api/cloud_platform_helpers.rb b/spec/support/google_api/cloud_platform_helpers.rb deleted file mode 100644 index b9752577c76..00000000000 --- a/spec/support/google_api/cloud_platform_helpers.rb +++ /dev/null @@ -1,166 +0,0 @@ -# frozen_string_literal: true - -module GoogleApi - module CloudPlatformHelpers - def stub_google_api_validate_token - request.session[GoogleApi::CloudPlatform::Client.session_key_for_token] = 'token' - request.session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at] = 1.hour.since.to_i.to_s - end - - def stub_google_api_expired_token - request.session[GoogleApi::CloudPlatform::Client.session_key_for_token] = 'token' - request.session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at] = 1.hour.ago.to_i.to_s - end - - def stub_cloud_platform_projects_list(options) - WebMock.stub_request(:get, cloud_platform_projects_list_url) - .to_return(cloud_platform_response(cloud_platform_projects_body(options))) - end - - def stub_cloud_platform_projects_get_billing_info(project_id, billing_enabled) - WebMock.stub_request(:get, cloud_platform_projects_get_billing_info_url(project_id)) - .to_return(cloud_platform_response(cloud_platform_projects_billing_info_body(project_id, billing_enabled))) - end - - def stub_cloud_platform_get_zone_cluster(project_id, zone, cluster_id, options = {}) - WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id)) - .to_return(cloud_platform_response(cloud_platform_cluster_body(options))) - end - - def stub_cloud_platform_get_zone_cluster_error(project_id, zone, cluster_id) - WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id)) - .to_return(status: [500, "Internal Server Error"]) - end - - def stub_cloud_platform_create_cluster(project_id, zone, options = {}) - WebMock.stub_request(:post, cloud_platform_create_cluster_url(project_id, zone)) - .to_return(cloud_platform_response(cloud_platform_operation_body(options))) - end - - def stub_cloud_platform_create_cluster_error(project_id, zone) - WebMock.stub_request(:post, cloud_platform_create_cluster_url(project_id, zone)) - .to_return(status: [500, "Internal Server Error"]) - end - - def stub_cloud_platform_get_zone_operation(project_id, zone, operation_id, options = {}) - WebMock.stub_request(:get, cloud_platform_get_zone_operation_url(project_id, zone, operation_id)) - .to_return(cloud_platform_response(cloud_platform_operation_body(options))) - end - - def stub_cloud_platform_get_zone_operation_error(project_id, zone, operation_id) - WebMock.stub_request(:get, cloud_platform_get_zone_operation_url(project_id, zone, operation_id)) - .to_return(status: [500, "Internal Server Error"]) - end - - def cloud_platform_projects_list_url - "https://cloudresourcemanager.googleapis.com/v1/projects" - end - - def cloud_platform_projects_get_billing_info_url(project_id) - "https://cloudbilling.googleapis.com/v1/projects/#{project_id}/billingInfo" - end - - def cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id) - "https://container.googleapis.com/v1/projects/#{project_id}/zones/#{zone}/clusters/#{cluster_id}" - end - - def cloud_platform_create_cluster_url(project_id, zone) - "https://container.googleapis.com/v1beta1/projects/#{project_id}/zones/#{zone}/clusters" - end - - def cloud_platform_get_zone_operation_url(project_id, zone, operation_id) - "https://container.googleapis.com/v1/projects/#{project_id}/zones/#{zone}/operations/#{operation_id}" - end - - def cloud_platform_response(body) - { status: 200, headers: { 'Content-Type' => 'application/json' }, body: body.to_json } - end - - def load_sample_cert - pem_file = File.expand_path(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) - Base64.encode64(File.read(pem_file)) - end - - ## - # gcloud container clusters create - # https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters/create - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity - def cloud_platform_cluster_body(options) - { - "name": options[:name] || 'string', - "description": options[:description] || 'string', - "initialNodeCount": options[:initialNodeCount] || 'number', - "masterAuth": { - "username": options[:username] || 'string', - "password": options[:password] || 'string', - "clusterCaCertificate": options[:clusterCaCertificate] || load_sample_cert, - "clientCertificate": options[:clientCertificate] || 'string', - "clientKey": options[:clientKey] || 'string' - }, - "loggingService": options[:loggingService] || 'string', - "monitoringService": options[:monitoringService] || 'string', - "network": options[:network] || 'string', - "clusterIpv4Cidr": options[:clusterIpv4Cidr] || 'string', - "subnetwork": options[:subnetwork] || 'string', - "enableKubernetesAlpha": options[:enableKubernetesAlpha] || 'boolean', - "labelFingerprint": options[:labelFingerprint] || 'string', - "selfLink": options[:selfLink] || 'string', - "zone": options[:zone] || 'string', - "endpoint": options[:endpoint] || 'string', - "initialClusterVersion": options[:initialClusterVersion] || 'string', - "currentMasterVersion": options[:currentMasterVersion] || 'string', - "currentNodeVersion": options[:currentNodeVersion] || 'string', - "createTime": options[:createTime] || 'string', - "status": options[:status] || 'RUNNING', - "statusMessage": options[:statusMessage] || 'string', - "nodeIpv4CidrSize": options[:nodeIpv4CidrSize] || 'number', - "servicesIpv4Cidr": options[:servicesIpv4Cidr] || 'string', - "currentNodeCount": options[:currentNodeCount] || 'number', - "expireTime": options[:expireTime] || 'string' - } - end - - def cloud_platform_operation_body(options) - { - "name": options[:name] || 'operation-1234567891234-1234567', - "zone": options[:zone] || 'us-central1-a', - "operationType": options[:operationType] || 'CREATE_CLUSTER', - "status": options[:status] || 'PENDING', - "detail": options[:detail] || 'detail', - "statusMessage": options[:statusMessage] || '', - "selfLink": options[:selfLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/operations/operation-1234567891234-1234567', - "targetLink": options[:targetLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/clusters/test-cluster', - "startTime": options[:startTime] || '2017-09-13T16:49:13.055601589Z', - "endTime": options[:endTime] || '' - } - end - - def cloud_platform_projects_body(options) - { - "projects": [ - { - "projectNumber": options[:project_number] || "1234", - "projectId": options[:project_id] || "test-project-1234", - "lifecycleState": "ACTIVE", - "name": options[:name] || "test-project", - "createTime": "2017-12-16T01:48:29.129Z", - "parent": { - "type": "organization", - "id": "12345" - } - } - ] - } - end - - def cloud_platform_projects_billing_info_body(project_id, billing_enabled) - { - "name": "projects/#{project_id}/billingInfo", - "projectId": project_id.to_s, - "billingAccountName": "account-name", - "billingEnabled": billing_enabled - } - end - end -end diff --git a/spec/support/graphql/arguments.rb b/spec/support/graphql/arguments.rb deleted file mode 100644 index 478a460a0f6..00000000000 --- a/spec/support/graphql/arguments.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -module Graphql - class Arguments - delegate :blank?, :empty?, to: :to_h - - def initialize(values) - @values = values - end - - def to_h - @values - end - - def ==(other) - to_h == other&.to_h - end - - alias_method :eql, :== - - def to_s - return '' if empty? - - @values.map do |name, value| - value_str = as_graphql_literal(value) - - "#{GraphqlHelpers.fieldnamerize(name.to_s)}: #{value_str}" - end.join(", ") - end - - def as_graphql_literal(value) - self.class.as_graphql_literal(value) - end - - # Transform values to GraphQL literal arguments. - # Use symbol for Enum values - def self.as_graphql_literal(value) - case value - when ::Graphql::Arguments then "{#{value}}" - when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]" - when Hash then "{#{new(value)}}" - when Integer, Float, Symbol then value.to_s - when String, GlobalID then "\"#{value.to_s.gsub(/"/, '\\"')}\"" - when Time, Date then "\"#{value.iso8601}\"" - when NilClass then 'null' - when true then 'true' - when false then 'false' - else - value.to_graphql_value - end - rescue NoMethodError - raise ArgumentError, "Cannot represent #{value} (instance of #{value.class}) as GraphQL literal" - end - - def merge(other) - self.class.new(@values.merge(other.to_h)) - end - - def +(other) - if blank? - other - elsif other.blank? - self - elsif other.is_a?(String) - [to_s, other].compact.join(', ') - else - merge(other) - end - end - end -end diff --git a/spec/support/graphql/fake_query_type.rb b/spec/support/graphql/fake_query_type.rb deleted file mode 100644 index 18cf2cf3e82..00000000000 --- a/spec/support/graphql/fake_query_type.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true -require 'graphql' - -module Graphql - class FakeQueryType < ::GraphQL::Schema::Object - graphql_name 'FakeQuery' - - field :hello_world, String, null: true do - argument :message, String, required: false - end - - field :breaking_field, String, null: true - - def hello_world(message: "world") - "Hello #{message}!" - end - - def breaking_field - raise "This field is supposed to break" - end - end -end diff --git a/spec/support/graphql/fake_tracer.rb b/spec/support/graphql/fake_tracer.rb deleted file mode 100644 index 58688c9abd0..00000000000 --- a/spec/support/graphql/fake_tracer.rb +++ /dev/null @@ -1,15 +0,0 @@ -# frozen_string_literal: true - -module Graphql - class FakeTracer - def initialize(trace_callback) - @trace_callback = trace_callback - end - - def trace(...) - @trace_callback.call(...) - - yield - end - end -end diff --git a/spec/support/graphql/field_inspection.rb b/spec/support/graphql/field_inspection.rb deleted file mode 100644 index 8730f82b893..00000000000 --- a/spec/support/graphql/field_inspection.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -module Graphql - class FieldInspection - def initialize(field) - @field = field - end - - def nested_fields? - !scalar? && !enum? - end - - def scalar? - type.kind.scalar? - end - - def enum? - type.kind.enum? - end - - def type - @type ||= begin - field_type = @field.type - - # The type could be nested. For example `[GraphQL::Types::String]`: - # - List - # - String! - # - String - field_type = field_type.of_type while field_type.respond_to?(:of_type) - - field_type - end - end - end -end diff --git a/spec/support/graphql/field_selection.rb b/spec/support/graphql/field_selection.rb deleted file mode 100644 index 432340cfdb5..00000000000 --- a/spec/support/graphql/field_selection.rb +++ /dev/null @@ -1,69 +0,0 @@ -# frozen_string_literal: true - -module Graphql - class FieldSelection - delegate :empty?, :blank?, :to_h, to: :selection - delegate :size, to: :paths - - attr_reader :selection - - def initialize(selection = {}) - @selection = selection.to_h - end - - def to_s - serialize_field_selection(selection) - end - - def paths - selection.flat_map do |field, subselection| - paths_in([field], subselection) - end - end - - private - - def paths_in(path, leaves) - return [path] if leaves.nil? - - leaves.to_a.flat_map do |k, v| - paths_in([k], v).map { |tail| path + tail } - end - end - - def serialize_field_selection(hash, level = 0) - indent = ' ' * level - - hash.map do |field, subselection| - if subselection.nil? - "#{indent}#{field}" - else - subfields = serialize_field_selection(subselection, level + 1) - "#{indent}#{field} {\n#{subfields}\n#{indent}}" - end - end.join("\n") - end - - NO_SKIP = ->(_name, _field) { false } - - def self.select_fields(type, skip = NO_SKIP, max_depth = 3) - return new if max_depth <= 0 || !type.kind.fields? - - new(type.fields.flat_map do |name, field| - next [] if skip[name, field] - - inspected = ::Graphql::FieldInspection.new(field) - singular_field_type = inspected.type - - if inspected.nested_fields? - subselection = select_fields(singular_field_type, skip, max_depth - 1) - next [] if subselection.empty? - - [[name, subselection.to_h]] - else - [[name, nil]] - end - end) - end - end -end diff --git a/spec/support/graphql/resolver_factories.rb b/spec/support/graphql/resolver_factories.rb deleted file mode 100644 index 76df4b58943..00000000000 --- a/spec/support/graphql/resolver_factories.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -module Graphql - module ResolverFactories - def new_resolver(resolved_value = 'Resolved value', method: :resolve) - case method - when :resolve - simple_resolver(resolved_value) - when :find_object - find_object_resolver(resolved_value) - else - raise "Cannot build a resolver for #{method}" - end - end - - private - - def simple_resolver(resolved_value = 'Resolved value', base_class: Resolvers::BaseResolver) - Class.new(base_class) do - define_method :resolve do |**_args| - resolved_value - end - end - end - - def find_object_resolver(resolved_value = 'Found object') - Class.new(Resolvers::BaseResolver) do - include ::Gitlab::Graphql::Authorize::AuthorizeResource - - def resolve(...) - authorized_find!(...) - end - - define_method :find_object do |**_args| - resolved_value - end - end - end - end -end diff --git a/spec/support/graphql/subscriptions/action_cable/mock_action_cable.rb b/spec/support/graphql/subscriptions/action_cable/mock_action_cable.rb deleted file mode 100644 index 5467564a79e..00000000000 --- a/spec/support/graphql/subscriptions/action_cable/mock_action_cable.rb +++ /dev/null @@ -1,100 +0,0 @@ -# frozen_string_literal: true - -# A stub implementation of ActionCable. -# Any methods to support the mock backend have `mock` in the name. -module Graphql - module Subscriptions - module ActionCable - class MockActionCable - class MockChannel - def initialize - @mock_broadcasted_messages = [] - end - - attr_reader :mock_broadcasted_messages - - def stream_from(stream_name, coder: nil, &block) - # Rails uses `coder`, we don't - block ||= ->(msg) { @mock_broadcasted_messages << msg } - MockActionCable.mock_stream_for(stream_name).add_mock_channel(self, block) - end - end - - class MockStream - def initialize - @mock_channels = {} - end - - def add_mock_channel(channel, handler) - @mock_channels[channel] = handler - end - - def mock_broadcast(message) - @mock_channels.each do |channel, handler| - handler && handler.call(message) - end - end - end - - class << self - def clear_mocks - @mock_streams = {} - end - - def server - self - end - - def broadcast(stream_name, message) - stream = @mock_streams[stream_name] - stream && stream.mock_broadcast(message) - end - - def mock_stream_for(stream_name) - @mock_streams[stream_name] ||= MockStream.new - end - - def get_mock_channel - MockChannel.new - end - - def mock_stream_names - @mock_streams.keys - end - end - end - - class MockSchema < GraphQL::Schema - class << self - def find_by_gid(gid) - return unless gid - - if gid.model_class < ApplicationRecord - Gitlab::Graphql::Loaders::BatchModelLoader.new(gid.model_class, gid.model_id).find - elsif gid.model_class.respond_to?(:lazy_find) - gid.model_class.lazy_find(gid.model_id) - else - gid.find - end - end - - def id_from_object(object, _type = nil, _ctx = nil) - unless object.respond_to?(:to_global_id) - # This is an error in our schema and needs to be solved. So raise a - # more meaningful error message - raise "#{object} does not implement `to_global_id`. " \ - "Include `GlobalID::Identification` into `#{object.class}" - end - - object.to_global_id - end - end - - query(::Types::QueryType) - subscription(::Types::SubscriptionType) - - use GraphQL::Subscriptions::ActionCableSubscriptions, action_cable: MockActionCable, action_cable_coder: JSON - end - end - end -end diff --git a/spec/support/graphql/subscriptions/action_cable/mock_gitlab_schema.rb b/spec/support/graphql/subscriptions/action_cable/mock_gitlab_schema.rb deleted file mode 100644 index cd5d78cc78b..00000000000 --- a/spec/support/graphql/subscriptions/action_cable/mock_gitlab_schema.rb +++ /dev/null @@ -1,41 +0,0 @@ -# frozen_string_literal: true - -# A stub implementation of ActionCable. -# Any methods to support the mock backend have `mock` in the name. -module Graphql - module Subscriptions - module ActionCable - class MockGitlabSchema < GraphQL::Schema - class << self - def find_by_gid(gid) - return unless gid - - if gid.model_class < ApplicationRecord - Gitlab::Graphql::Loaders::BatchModelLoader.new(gid.model_class, gid.model_id).find - elsif gid.model_class.respond_to?(:lazy_find) - gid.model_class.lazy_find(gid.model_id) - else - gid.find - end - end - - def id_from_object(object, _type = nil, _ctx = nil) - unless object.respond_to?(:to_global_id) - # This is an error in our schema and needs to be solved. So raise a - # more meaningful error message - raise "#{object} does not implement `to_global_id`. " \ - "Include `GlobalID::Identification` into `#{object.class}" - end - - object.to_global_id - end - end - - query(::Types::QueryType) - subscription(::Types::SubscriptionType) - - use GraphQL::Subscriptions::ActionCableSubscriptions, action_cable: MockActionCable, action_cable_coder: JSON - end - end - end -end diff --git a/spec/support/graphql/subscriptions/notes/helper.rb b/spec/support/graphql/subscriptions/notes/helper.rb deleted file mode 100644 index 9a552f9879e..00000000000 --- a/spec/support/graphql/subscriptions/notes/helper.rb +++ /dev/null @@ -1,94 +0,0 @@ -# frozen_string_literal: true - -module Graphql - module Subscriptions - module Notes - module Helper - def subscription_response - subscription_channel = subscribe - yield - subscription_channel.mock_broadcasted_messages.first - end - - def notes_subscription(name, noteable, current_user) - mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel - - query = case name - when 'workItemNoteDeleted' - note_deleted_subscription_query(name, noteable) - when 'workItemNoteUpdated' - note_updated_subscription_query(name, noteable) - when 'workItemNoteCreated' - note_created_subscription_query(name, noteable) - else - raise "Subscription query unknown: #{name}" - end - - GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel }) - - mock_channel - end - - def note_subscription(name, noteable, current_user) - mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel - - query = <<~SUBSCRIPTION - subscription { - #{name}(noteableId: \"#{noteable.to_gid}\") { - id - body - } - } - SUBSCRIPTION - - GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel }) - - mock_channel - end - - private - - def note_deleted_subscription_query(name, noteable) - <<~SUBSCRIPTION - subscription { - #{name}(noteableId: \"#{noteable.to_gid}\") { - id - discussionId - lastDiscussionNote - } - } - SUBSCRIPTION - end - - def note_created_subscription_query(name, noteable) - <<~SUBSCRIPTION - subscription { - #{name}(noteableId: \"#{noteable.to_gid}\") { - id - discussion { - id - notes { - nodes { - id - } - } - } - } - } - SUBSCRIPTION - end - - def note_updated_subscription_query(name, noteable) - <<~SUBSCRIPTION - subscription { - #{name}(noteableId: \"#{noteable.to_gid}\") { - id - body - } - } - SUBSCRIPTION - end - end - end - end -end diff --git a/spec/support/graphql/var.rb b/spec/support/graphql/var.rb deleted file mode 100644 index 4f2c774e898..00000000000 --- a/spec/support/graphql/var.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -module Graphql - # Helper to pass variables around generated queries. - # - # e.g.: - # first = var('Int') - # after = var('String') - # - # query = with_signature( - # [first, after], - # query_graphql_path([ - # [:project, { full_path: project.full_path }], - # [:issues, { after: after, first: first }] - # :nodes - # ], all_graphql_fields_for('Issue')) - # ) - # - # post_graphql(query, variables: [first.with(2), after.with(some_cursor)]) - # - class Var - attr_reader :name, :type - attr_accessor :value - - def initialize(name, type) - @name = name - @type = type - end - - def sig - "#{to_graphql_value}: #{type}" - end - - def to_graphql_value - "$#{name}" - end - - # We return a new object so that running the same query twice with - # different values does not risk re-using the value - # - # e.g. - # - # x = var('Int') - # expect { post_graphql(query, variables: x) } - # .to issue_same_number_of_queries_as { post_graphql(query, variables: x.with(1)) } - # - # Here we post the `x` variable once with the value set to 1, and once with - # the value set to `nil`. - def with(value) - copy = Var.new(name, type) - copy.value = value - copy - end - - def to_h - { name => value } - end - end -end diff --git a/spec/support/helpers/api_internal_base_helpers.rb b/spec/support/helpers/api_internal_base_helpers.rb index e89716571f9..8299821a699 100644 --- a/spec/support/helpers/api_internal_base_helpers.rb +++ b/spec/support/helpers/api_internal_base_helpers.rb @@ -44,12 +44,14 @@ module APIInternalBaseHelpers end def push(key, container, protocol = 'ssh', env: nil, changes: nil) - push_with_path(key, - full_path: full_path_for(container), - gl_repository: gl_repository_for(container), - protocol: protocol, - env: env, - changes: changes) + push_with_path( + key, + full_path: full_path_for(container), + gl_repository: gl_repository_for(container), + protocol: protocol, + env: env, + changes: changes + ) end def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil) diff --git a/spec/support/helpers/board_helpers.rb b/spec/support/helpers/board_helpers.rb index d7277ba9a20..c7a7993c52b 100644 --- a/spec/support/helpers/board_helpers.rb +++ b/spec/support/helpers/board_helpers.rb @@ -29,13 +29,15 @@ module BoardHelpers # ensure there is enough horizontal space for four board lists resize_window(2000, 800) - drag_to(selector: selector, - scrollable: '#board-app', - list_from_index: list_from_index, - from_index: from_index, - to_index: to_index, - list_to_index: list_to_index, - perform_drop: perform_drop) + drag_to( + selector: selector, + scrollable: '#board-app', + list_from_index: list_from_index, + from_index: from_index, + to_index: to_index, + list_to_index: list_to_index, + perform_drop: perform_drop + ) end wait_for_requests diff --git a/spec/support/helpers/chunked_io_helpers.rb b/spec/support/helpers/chunked_io_helpers.rb new file mode 100644 index 00000000000..278f577f3cb --- /dev/null +++ b/spec/support/helpers/chunked_io_helpers.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module ChunkedIOHelpers + def sample_trace_raw + @sample_trace_raw ||= File.read(expand_fixture_path('trace/sample_trace')) + .force_encoding(Encoding::BINARY) + end + + def stub_buffer_size(size) + stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size) + stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) + end +end diff --git a/spec/support/helpers/ci/source_pipeline_helpers.rb b/spec/support/helpers/ci/source_pipeline_helpers.rb index b99f499cc16..ef3aea7de52 100644 --- a/spec/support/helpers/ci/source_pipeline_helpers.rb +++ b/spec/support/helpers/ci/source_pipeline_helpers.rb @@ -3,11 +3,13 @@ module Ci module SourcePipelineHelpers def create_source_pipeline(upstream, downstream) - create(:ci_sources_pipeline, - source_job: create(:ci_build, pipeline: upstream), - source_project: upstream.project, - pipeline: downstream, - project: downstream.project) + create( + :ci_sources_pipeline, + source_job: create(:ci_build, pipeline: upstream), + source_project: upstream.project, + pipeline: downstream, + project: downstream.project + ) end end end diff --git a/spec/support/helpers/content_editor_helpers.rb b/spec/support/helpers/content_editor_helpers.rb index c12fd1fbbd7..1bbc05cc05a 100644 --- a/spec/support/helpers/content_editor_helpers.rb +++ b/spec/support/helpers/content_editor_helpers.rb @@ -2,7 +2,7 @@ module ContentEditorHelpers def switch_to_content_editor - click_button _('Viewing markdown') + click_button _('Editing markdown') click_button _('Rich text') end @@ -10,8 +10,8 @@ module ContentEditorHelpers find(content_editor_testid).send_keys keys end - def open_insert_media_dropdown - page.find('svg[data-testid="media-icon"]').click + def click_attachment_button + page.find('svg[data-testid="paperclip-icon"]').click end def set_source_editor_content(content) diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb index eba5771e062..0accb341cb9 100644 --- a/spec/support/helpers/cycle_analytics_helpers.rb +++ b/spec/support/helpers/cycle_analytics_helpers.rb @@ -91,13 +91,13 @@ module CycleAnalyticsHelpers wait_for_requests end - def create_value_stream_group_aggregation(group) - aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(group) + def create_value_stream_aggregation(group_or_project_namespace) + aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(group_or_project_namespace) Analytics::CycleAnalytics::AggregatorService.new(aggregation: aggregation).execute end def select_group_and_custom_value_stream(group, custom_value_stream_name) - create_value_stream_group_aggregation(group) + create_value_stream_aggregation(group) select_group(group) select_value_stream(custom_value_stream_name) @@ -235,4 +235,13 @@ module CycleAnalyticsHelpers pipeline: dummy_pipeline(project), protected: false) end + + def create_deployment(args) + project = args[:project] + environment = project.environments.production.first || create(:environment, :production, project: project) + create(:deployment, :success, args.merge(environment: environment)) + + # this is needed for the DORA API so we have aggregated data + ::Dora::DailyMetrics::RefreshWorker.new.perform(environment.id, Time.current.to_date.to_s) if Gitlab.ee? + end end diff --git a/spec/support/helpers/cycle_analytics_helpers/test_generation.rb b/spec/support/helpers/cycle_analytics_helpers/test_generation.rb new file mode 100644 index 00000000000..1c7c45c06a1 --- /dev/null +++ b/spec/support/helpers/cycle_analytics_helpers/test_generation.rb @@ -0,0 +1,166 @@ +# frozen_string_literal: true + +# rubocop:disable Layout/LineLength +# rubocop:disable Metrics/CyclomaticComplexity +# rubocop:disable Metrics/PerceivedComplexity +# rubocop:disable Metrics/AbcSize + +# Note: The ABC size is large here because we have a method generating test cases with +# multiple nested contexts. This shouldn't count as a violation. +module CycleAnalyticsHelpers + module TestGeneration + # Generate the most common set of specs that all value stream analytics phases need to have. + # + # Arguments: + # + # phase: Which phase are we testing? Will call `CycleAnalytics.new.send(phase)` for the final assertion + # data_fn: A function that returns a hash, constituting initial data for the test case + # start_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with + # `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`). + # Each `condition_fn` is expected to implement a case which consitutes the start of the given value stream analytics phase. + # end_time_conditions: An array of `conditions`. Each condition is an tuple of `condition_name` and `condition_fn`. `condition_fn` is called with + # `context` (no lexical scope, so need to do `context.create` for factories, for example) and `data` (from the `data_fn`). + # Each `condition_fn` is expected to implement a case which consitutes the end of the given value stream analytics phase. + # before_end_fn: This function is run before calling the end time conditions. Used for setup that needs to be run between the start and end conditions. + # post_fn: Code that needs to be run after running the end time conditions. + + def generate_cycle_analytics_spec(phase:, data_fn:, start_time_conditions:, end_time_conditions:, before_end_fn: nil, post_fn: nil) + combinations_of_start_time_conditions = (1..start_time_conditions.size).flat_map { |size| start_time_conditions.combination(size).to_a } + combinations_of_end_time_conditions = (1..end_time_conditions.size).flat_map { |size| end_time_conditions.combination(size).to_a } + + scenarios = combinations_of_start_time_conditions.product(combinations_of_end_time_conditions) + scenarios.each do |start_time_conditions, end_time_conditions| + let_it_be(:other_project) { create(:project, :repository) } + + before do + other_project.add_developer(user) + end + + context "start condition: #{start_time_conditions.map(&:first).to_sentence}" do + context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do + it "finds the median of available durations between the two conditions", :sidekiq_might_not_need_inline do + time_differences = Array.new(5) do |index| + data = data_fn[self] + start_time = (index * 10).days.from_now + end_time = start_time + rand(1..5).days + + start_time_conditions.each_value do |condition_fn| + travel_to(start_time) { condition_fn[self, data] } + end + + # Run `before_end_fn` at the midpoint between `start_time` and `end_time` + travel_to(start_time + ((end_time - start_time) / 2)) { before_end_fn[self, data] } if before_end_fn + + end_time_conditions.each_value do |condition_fn| + travel_to(end_time) { condition_fn[self, data] } + end + + travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn + + end_time - start_time + end + + median_time_difference = time_differences.sort[2] + expect(subject[phase].project_median).to be_within(5).of(median_time_difference) + end + + context "when the data belongs to another project" do + it "returns nil" do + # Use a stub to "trick" the data/condition functions + # into using another project. This saves us from having to + # define separate data/condition functions for this particular + # test case. + allow(self).to receive(:project) { other_project } + + data = data_fn[self] + start_time = Time.now + end_time = rand(1..10).days.from_now + + start_time_conditions.each_value do |condition_fn| + travel_to(start_time) { condition_fn[self, data] } + end + + end_time_conditions.each_value do |condition_fn| + travel_to(end_time) { condition_fn[self, data] } + end + + travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn + + # Turn off the stub before checking assertions + allow(self).to receive(:project).and_call_original + + expect(subject[phase].project_median).to be_nil + end + end + + context "when the end condition happens before the start condition" do + it 'returns nil' do + data = data_fn[self] + start_time = Time.now + end_time = start_time + rand(1..5).days + + # Run `before_end_fn` at the midpoint between `start_time` and `end_time` + travel_to(start_time + ((end_time - start_time) / 2)) { before_end_fn[self, data] } if before_end_fn + + end_time_conditions.each_value do |condition_fn| + travel_to(start_time) { condition_fn[self, data] } + end + + start_time_conditions.each_value do |condition_fn| + travel_to(end_time) { condition_fn[self, data] } + end + + travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn + + expect(subject[phase].project_median).to be_nil + end + end + end + + context "end condition NOT PRESENT: #{end_time_conditions.map(&:first).to_sentence}" do + it "returns nil" do + data = data_fn[self] + start_time = Time.now + + start_time_conditions.each_value do |condition_fn| + travel_to(start_time) { condition_fn[self, data] } + end + + post_fn[self, data] if post_fn + + expect(subject[phase].project_median).to be_nil + end + end + end + + context "start condition NOT PRESENT: #{start_time_conditions.map(&:first).to_sentence}" do + context "end condition: #{end_time_conditions.map(&:first).to_sentence}" do + it "returns nil" do + data = data_fn[self] + end_time = rand(1..10).days.from_now + + end_time_conditions.each_with_index do |(_condition_name, condition_fn), index| + travel_to(end_time + index.days) { condition_fn[self, data] } + end + + travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn + + expect(subject[phase].project_median).to be_nil + end + end + end + end + + context "when none of the start / end conditions are matched" do + it "returns nil" do + expect(subject[phase].project_median).to be_nil + end + end + end + end +end + +# rubocop:enable Layout/LineLength +# rubocop:enable Metrics/CyclomaticComplexity +# rubocop:enable Metrics/PerceivedComplexity +# rubocop:enable Metrics/AbcSize diff --git a/spec/support/helpers/database/multiple_databases_helpers.rb b/spec/support/helpers/database/multiple_databases_helpers.rb index 5083ea1ff53..3c9a5762c47 100644 --- a/spec/support/helpers/database/multiple_databases_helpers.rb +++ b/spec/support/helpers/database/multiple_databases_helpers.rb @@ -4,6 +4,28 @@ module Database module MultipleDatabasesHelpers EXTRA_DBS = ::Gitlab::Database::DATABASE_NAMES.map(&:to_sym) - [:main] + def database_exists?(database_name) + ::Gitlab::Database.has_database?(database_name) + end + + def skip_if_shared_database(database_name) + skip "Skipping because #{database_name} is shared or doesn't not exist" unless database_exists?(database_name) + end + + def skip_if_database_exists(database_name) + skip "Skipping because database #{database_name} exists" if database_exists?(database_name) + end + + def execute_on_each_database(query, databases: %I[main ci]) + databases = databases.select { |database_name| database_exists?(database_name) } + + Gitlab::Database::EachDatabase.each_database_connection(only: databases, include_shared: false) do |connection, _| + next unless Gitlab::Database.gitlab_schemas_for_connection(connection).include?(:gitlab_shared) + + connection.execute(query) + end + end + def skip_if_multiple_databases_not_setup(*databases) unless (databases - EXTRA_DBS).empty? raise "Unsupported database in #{databases}. It must be one of #{EXTRA_DBS}." diff --git a/spec/support/helpers/email_helpers.rb b/spec/support/helpers/email_helpers.rb index f4bdaa7e425..57386233775 100644 --- a/spec/support/helpers/email_helpers.rb +++ b/spec/support/helpers/email_helpers.rb @@ -76,4 +76,25 @@ module EmailHelpers composed_expectation.and(have_enqueued_mail(mailer_class, mailer_method).with(*arguments)) end end + + def expect_sender(user, sender_email: nil) + sender = subject.header[:from].addrs[0] + expect(sender.display_name).to eq("#{user.name} (@#{user.username})") + expect(sender.address).to eq(sender_email.presence || gitlab_sender) + end + + def expect_service_desk_custom_email_delivery_options(service_desk_setting) + expect(subject.delivery_method).to be_a Mail::SMTP + expect(service_desk_setting.custom_email_credential).to be_present + + credential = service_desk_setting.custom_email_credential + + expect(subject.delivery_method.settings).to include( + address: credential.smtp_address, + port: credential.smtp_port, + user_name: credential.smtp_username, + password: credential.smtp_password, + domain: service_desk_setting.custom_email.split('@').last + ) + end end diff --git a/spec/support/helpers/every_sidekiq_worker_test_helper.rb b/spec/support/helpers/every_sidekiq_worker_test_helper.rb new file mode 100644 index 00000000000..b053ed04b58 --- /dev/null +++ b/spec/support/helpers/every_sidekiq_worker_test_helper.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +module EverySidekiqWorkerTestHelper + def extra_retry_exceptions + {} + end +end + +EverySidekiqWorkerTestHelper.prepend_mod diff --git a/spec/support/helpers/fake_webauthn_device.rb b/spec/support/helpers/fake_webauthn_device.rb index d2c2f7d6bf3..5a535735817 100644 --- a/spec/support/helpers/fake_webauthn_device.rb +++ b/spec/support/helpers/fake_webauthn_device.rb @@ -45,7 +45,7 @@ class FakeWebauthnDevice return Promise.resolve(result); }; JS - @page.click_link('Try again?', href: false) + @page.click_button(_('Try again?')) end def fake_webauthn_authentication diff --git a/spec/support/helpers/feature_flag_helpers.rb b/spec/support/helpers/feature_flag_helpers.rb index 4e57002a7c6..3cf611c66e6 100644 --- a/spec/support/helpers/feature_flag_helpers.rb +++ b/spec/support/helpers/feature_flag_helpers.rb @@ -2,22 +2,32 @@ module FeatureFlagHelpers def create_flag(project, name, active = true, description: nil, version: Operations::FeatureFlag.versions['new_version_flag']) - create(:operations_feature_flag, name: name, active: active, version: version, - description: description, project: project) + create( + :operations_feature_flag, + name: name, + active: active, + version: version, + description: description, + project: project + ) end def create_scope(feature_flag, environment_scope, active = true, strategies = [{ name: "default", parameters: {} }]) - create(:operations_feature_flag_scope, + create( + :operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: environment_scope, active: active, - strategies: strategies) + strategies: strategies + ) end def create_strategy(feature_flag, name = 'default', parameters = {}) - create(:operations_strategy, + create( + :operations_strategy, feature_flag: feature_flag, - name: name) + name: name + ) end def within_feature_flag_row(index) @@ -95,6 +105,6 @@ module FeatureFlagHelpers end def expect_user_to_see_feature_flags_index_page - expect(page).to have_text('Feature Flags') + expect(page).to have_text('Feature flags') end end diff --git a/spec/support/helpers/features/access_token_helpers.rb b/spec/support/helpers/features/access_token_helpers.rb index f4bdb70c160..bc839642914 100644 --- a/spec/support/helpers/features/access_token_helpers.rb +++ b/spec/support/helpers/features/access_token_helpers.rb @@ -1,18 +1,15 @@ # frozen_string_literal: true -module Spec - module Support - module Helpers - module AccessTokenHelpers - def active_access_tokens - find("[data-testid='active-tokens']") - end - def created_access_token - within('[data-testid=access-token-section]') do - find('[data-testid=toggle-visibility-button]').click - find_field('new-access-token').value - end - end +module Features + module AccessTokenHelpers + def active_access_tokens + find("[data-testid='active-tokens']") + end + + def created_access_token + within('[data-testid=access-token-section]') do + find('[data-testid=toggle-visibility-button]').click + find_field('new-access-token').value end end end diff --git a/spec/support/helpers/features/admin_users_helpers.rb b/spec/support/helpers/features/admin_users_helpers.rb index 99b19eedcff..9a87ccf113a 100644 --- a/spec/support/helpers/features/admin_users_helpers.rb +++ b/spec/support/helpers/features/admin_users_helpers.rb @@ -1,24 +1,18 @@ # frozen_string_literal: true -module Spec - module Support - module Helpers - module Features - module AdminUsersHelpers - def click_user_dropdown_toggle(user_id) - page.within("[data-testid='user-actions-#{user_id}']") do - find("[data-testid='dropdown-toggle']").click - end - end +module Features + module AdminUsersHelpers + def click_user_dropdown_toggle(user_id) + page.within("[data-testid='user-actions-#{user_id}']") do + find("[data-testid='dropdown-toggle']").click + end + end - def click_action_in_user_dropdown(user_id, action) - click_user_dropdown_toggle(user_id) + def click_action_in_user_dropdown(user_id, action) + click_user_dropdown_toggle(user_id) - within find("[data-testid='user-actions-#{user_id}']") do - find('li button', exact_text: action).click - end - end - end + within find("[data-testid='user-actions-#{user_id}']") do + find('li button', exact_text: action).click end end end diff --git a/spec/support/helpers/features/blob_spec_helpers.rb b/spec/support/helpers/features/blob_spec_helpers.rb index 7ccfc9be7e2..8254e1d76bd 100644 --- a/spec/support/helpers/features/blob_spec_helpers.rb +++ b/spec/support/helpers/features/blob_spec_helpers.rb @@ -1,14 +1,16 @@ # frozen_string_literal: true -# These helpers help you interact within the blobs page and blobs edit page (Single file editor). -module BlobSpecHelpers - include ActionView::Helpers::JavaScriptHelper +module Features + # These helpers help you interact within the blobs page and blobs edit page (Single file editor). + module BlobSpecHelpers + include ActionView::Helpers::JavaScriptHelper - def set_default_button(type) - evaluate_script("localStorage.setItem('gl-web-ide-button-selected', '#{type}')") - end + def set_default_button(type) + evaluate_script("localStorage.setItem('gl-web-ide-button-selected', '#{type}')") + end - def unset_default_button - set_default_button('') + def unset_default_button + set_default_button('') + end end end diff --git a/spec/support/helpers/features/branches_helpers.rb b/spec/support/helpers/features/branches_helpers.rb index dc4fa448167..9fb6236d052 100644 --- a/spec/support/helpers/features/branches_helpers.rb +++ b/spec/support/helpers/features/branches_helpers.rb @@ -4,31 +4,28 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::BranchesHelpers +# include Features::BranchesHelpers # ... # # create_branch("feature") # select_branch("master") # -module Spec - module Support - module Helpers - module Features - module BranchesHelpers - def create_branch(branch_name, source_branch_name = "master") - fill_in("branch_name", with: branch_name) - select_branch(source_branch_name) - click_button("Create branch") - end +module Features + module BranchesHelpers + include ListboxHelpers - def select_branch(branch_name) - wait_for_requests + def create_branch(branch_name, source_branch_name = "master") + fill_in("branch_name", with: branch_name) + select_branch(source_branch_name) + click_button("Create branch") + end + + def select_branch(branch_name) + wait_for_requests - click_button branch_name - send_keys branch_name - end - end - end + click_button branch_name + send_keys branch_name + select_listbox_item(branch_name) end end end diff --git a/spec/support/helpers/features/canonical_link_helpers.rb b/spec/support/helpers/features/canonical_link_helpers.rb index da3a28f1cb2..6ef934a924b 100644 --- a/spec/support/helpers/features/canonical_link_helpers.rb +++ b/spec/support/helpers/features/canonical_link_helpers.rb @@ -4,25 +4,19 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::CanonicalLinkHelpers +# include Features::CanonicalLinkHelpers # ... # # expect(page).to have_canonical_link(url) # -module Spec - module Support - module Helpers - module Features - module CanonicalLinkHelpers - def have_canonical_link(url) - have_xpath("//link[@rel=\"canonical\" and @href=\"#{url}\"]", visible: false) - end +module Features + module CanonicalLinkHelpers + def have_canonical_link(url) + have_xpath("//link[@rel=\"canonical\" and @href=\"#{url}\"]", visible: false) + end - def have_any_canonical_links - have_xpath('//link[@rel="canonical"]', visible: false) - end - end - end + def have_any_canonical_links + have_xpath('//link[@rel="canonical"]', visible: false) end end end diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb deleted file mode 100644 index 47cbd6b5208..00000000000 --- a/spec/support/helpers/features/invite_members_modal_helper.rb +++ /dev/null @@ -1,154 +0,0 @@ -# frozen_string_literal: true - -module Spec - module Support - module Helpers - module Features - module InviteMembersModalHelper - def invite_member(names, role: 'Guest', expires_at: nil) - click_on 'Invite members' - - page.within invite_modal_selector do - select_members(names) - choose_options(role, expires_at) - submit_invites - end - - wait_for_requests - end - - def invite_member_by_email(role) - click_on _('Invite members') - - page.within invite_modal_selector do - choose_options(role, nil) - find(member_dropdown_selector).set('new_email@gitlab.com') - wait_for_requests - - find('.dropdown-item', text: 'Invite "new_email@gitlab.com" by email').click - - submit_invites - - wait_for_requests - end - end - - def input_invites(names) - click_on 'Invite members' - - page.within invite_modal_selector do - select_members(names) - end - end - - def select_members(names) - Array.wrap(names).each do |name| - find(member_dropdown_selector).set(name) - - wait_for_requests - click_button name - end - end - - def invite_group(name, role: 'Guest', expires_at: nil) - click_on 'Invite a group' - - click_on 'Select a group' - wait_for_requests - click_button name - choose_options(role, expires_at) - - submit_invites - end - - def submit_invites - click_button 'Invite' - end - - def choose_options(role, expires_at) - select role, from: 'Select a role' - fill_in 'YYYY-MM-DD', with: expires_at.strftime('%Y-%m-%d') if expires_at - end - - def click_groups_tab - expect(page).to have_link 'Groups' - click_link "Groups" - end - - def group_dropdown_selector - '[data-testid="group-select-dropdown"]' - end - - def member_dropdown_selector - '[data-testid="members-token-select-input"]' - end - - def invite_modal_selector - '[data-testid="invite-modal"]' - end - - def member_token_error_selector(id) - "[data-testid='error-icon-#{id}']" - end - - def member_token_avatar_selector - "[data-testid='token-avatar']" - end - - def member_token_selector(id) - "[data-token-id='#{id}']" - end - - def more_invite_errors_button_selector - "[data-testid='accordion-button']" - end - - def limited_invite_error_selector - "[data-testid='errors-limited-item']" - end - - def expanded_invite_error_selector - "[data-testid='errors-expanded-item']" - end - - def remove_token(id) - page.within member_token_selector(id) do - find('[data-testid="close-icon"]').click - end - end - - def expect_to_have_successful_invite_indicator(page, user) - expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100") - expect(page).not_to have_text("#{user.name}: ") - end - - def expect_to_have_invalid_invite_indicator(page, user, message: true) - expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100") - expect(page).to have_selector(member_token_error_selector(user.id)) - expect(page).to have_text("#{user.name}: Access level should be greater than or equal to") if message - end - - def expect_to_have_normal_invite_indicator(page, user) - expect(page).to have_selector(member_token_selector(user.id)) - expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100") - expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100") - expect(page).not_to have_text("#{user.name}: ") - end - - def expect_to_have_invite_removed(page, user) - expect(page).not_to have_selector(member_token_selector(user.id)) - expect(page).not_to have_text("#{user.name}: Access level should be greater than or equal to") - end - - def expect_to_have_group(group) - expect(page).to have_selector("[entity-id='#{group.id}']") - end - - def expect_not_to_have_group(group) - expect(page).not_to have_selector("[entity-id='#{group.id}']") - end - end - end - end - end -end diff --git a/spec/support/helpers/features/invite_members_modal_helpers.rb b/spec/support/helpers/features/invite_members_modal_helpers.rb new file mode 100644 index 00000000000..75573616686 --- /dev/null +++ b/spec/support/helpers/features/invite_members_modal_helpers.rb @@ -0,0 +1,148 @@ +# frozen_string_literal: true + +module Features + module InviteMembersModalHelpers + def invite_member(names, role: 'Guest', expires_at: nil) + click_on 'Invite members' + + page.within invite_modal_selector do + select_members(names) + choose_options(role, expires_at) + submit_invites + end + + wait_for_requests + end + + def invite_member_by_email(role) + click_on _('Invite members') + + page.within invite_modal_selector do + choose_options(role, nil) + find(member_dropdown_selector).set('new_email@gitlab.com') + wait_for_requests + + find('.dropdown-item', text: 'Invite "new_email@gitlab.com" by email').click + + submit_invites + + wait_for_requests + end + end + + def input_invites(names) + click_on 'Invite members' + + page.within invite_modal_selector do + select_members(names) + end + end + + def select_members(names) + Array.wrap(names).each do |name| + find(member_dropdown_selector).set(name) + + wait_for_requests + click_button name + end + end + + def invite_group(name, role: 'Guest', expires_at: nil) + click_on 'Invite a group' + + click_on 'Select a group' + wait_for_requests + click_button name + choose_options(role, expires_at) + + submit_invites + end + + def submit_invites + click_button 'Invite' + end + + def choose_options(role, expires_at) + select role, from: 'Select a role' + fill_in 'YYYY-MM-DD', with: expires_at.strftime('%Y-%m-%d') if expires_at + end + + def click_groups_tab + expect(page).to have_link 'Groups' + click_link "Groups" + end + + def group_dropdown_selector + '[data-testid="group-select-dropdown"]' + end + + def member_dropdown_selector + '[data-testid="members-token-select-input"]' + end + + def invite_modal_selector + '[data-testid="invite-modal"]' + end + + def member_token_error_selector(id) + "[data-testid='error-icon-#{id}']" + end + + def member_token_avatar_selector + "[data-testid='token-avatar']" + end + + def member_token_selector(id) + "[data-token-id='#{id}']" + end + + def more_invite_errors_button_selector + "[data-testid='accordion-button']" + end + + def limited_invite_error_selector + "[data-testid='errors-limited-item']" + end + + def expanded_invite_error_selector + "[data-testid='errors-expanded-item']" + end + + def remove_token(id) + page.within member_token_selector(id) do + find('[data-testid="close-icon"]').click + end + end + + def expect_to_have_successful_invite_indicator(page, user) + expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100") + expect(page).not_to have_text("#{user.name}: ") + end + + def expect_to_have_invalid_invite_indicator(page, user, message: true) + expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100") + expect(page).to have_selector(member_token_error_selector(user.id)) + expect(page).to have_text("#{user.name}: Access level should be greater than or equal to") if message + end + + def expect_to_have_normal_invite_indicator(page, user) + expect(page).to have_selector(member_token_selector(user.id)) + expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100") + expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100") + expect(page).not_to have_text("#{user.name}: ") + end + + def expect_to_have_invite_removed(page, user) + expect(page).not_to have_selector(member_token_selector(user.id)) + expect(page).not_to have_text("#{user.name}: Access level should be greater than or equal to") + end + + def expect_to_have_group(group) + expect(page).to have_selector("[entity-id='#{group.id}']") + end + + def expect_not_to_have_group(group) + expect(page).not_to have_selector("[entity-id='#{group.id}']") + end + end +end diff --git a/spec/support/helpers/features/iteration_helpers.rb b/spec/support/helpers/features/iteration_helpers.rb index 8e1d252f55f..fab373a547f 100644 --- a/spec/support/helpers/features/iteration_helpers.rb +++ b/spec/support/helpers/features/iteration_helpers.rb @@ -1,6 +1,9 @@ # frozen_string_literal: true -module IterationHelpers - def iteration_period(iteration) - "#{iteration.start_date.to_s(:medium)} - #{iteration.due_date.to_s(:medium)}" + +module Features + module IterationHelpers + def iteration_period(iteration) + "#{iteration.start_date.to_s(:medium)} - #{iteration.due_date.to_s(:medium)}" + end end end diff --git a/spec/support/helpers/features/list_rows_helpers.rb b/spec/support/helpers/features/list_rows_helpers.rb deleted file mode 100644 index 0626415361c..00000000000 --- a/spec/support/helpers/features/list_rows_helpers.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true -# These helpers allow you to access rows in the list -# -# Usage: -# describe "..." do -# include Spec::Support::Helpers::Features::ListRowsHelpers -# ... -# -# expect(first_row.text).to include("John Doe") -# expect(second_row.text).to include("John Smith") -# -module Spec - module Support - module Helpers - module Features - module ListRowsHelpers - def first_row - page.all('ul.content-list > li')[0] - end - - def second_row - page.all('ul.content-list > li')[1] - end - end - end - end - end -end diff --git a/spec/support/helpers/features/members_helpers.rb b/spec/support/helpers/features/members_helpers.rb index 2d3f0902a3c..9882767cecf 100644 --- a/spec/support/helpers/features/members_helpers.rb +++ b/spec/support/helpers/features/members_helpers.rb @@ -1,78 +1,72 @@ # frozen_string_literal: true -module Spec - module Support - module Helpers - module Features - module MembersHelpers - def members_table - page.find('[data-testid="members-table"]') - end +module Features + module MembersHelpers + def members_table + page.find('[data-testid="members-table"]') + end - def all_rows - page.within(members_table) do - page.all('tbody > tr') - end - end + def all_rows + page.within(members_table) do + page.all('tbody > tr') + end + end - def first_row - all_rows[0] - end + def first_row + all_rows[0] + end - def second_row - all_rows[1] - end + def second_row + all_rows[1] + end - def third_row - all_rows[2] - end + def third_row + all_rows[2] + end - def find_row(name) - page.within(members_table) do - page.find('tbody > tr', text: name) - end - end + def find_row(name) + page.within(members_table) do + page.find('tbody > tr', text: name) + end + end - def find_member_row(user) - find_row(user.name) - end + def find_member_row(user) + find_row(user.name) + end - def find_username_row(user) - find_row(user.username) - end + def find_username_row(user) + find_row(user.username) + end - def find_invited_member_row(email) - find_row(email) - end + def find_invited_member_row(email) + find_row(email) + end - def find_group_row(group) - find_row(group.full_name) - end + def find_group_row(group) + find_row(group.full_name) + end - def fill_in_filtered_search(label, with:) - page.within '[data-testid="members-filtered-search-bar"]' do - find_field(label).click - find('input').native.send_keys(with) - click_button 'Search' - end - end + def fill_in_filtered_search(label, with:) + page.within '[data-testid="members-filtered-search-bar"]' do + find_field(label).click + find('input').native.send_keys(with) + click_button 'Search' + end + end - def user_action_dropdown - '[data-testid="user-action-dropdown"]' - end + def user_action_dropdown + '[data-testid="user-action-dropdown"]' + end - def show_actions - within user_action_dropdown do - find('button').click - end - end + def show_actions + within user_action_dropdown do + find('button').click + end + end - def show_actions_for_username(user) - within find_username_row(user) do - show_actions - end - end - end + def show_actions_for_username(user) + within find_username_row(user) do + show_actions end end end diff --git a/spec/support/helpers/features/merge_request_helpers.rb b/spec/support/helpers/features/merge_request_helpers.rb index 53896e1fe12..260a55487ea 100644 --- a/spec/support/helpers/features/merge_request_helpers.rb +++ b/spec/support/helpers/features/merge_request_helpers.rb @@ -1,25 +1,19 @@ # frozen_string_literal: true -module Spec - module Support - module Helpers - module Features - module MergeRequestHelpers - def preload_view_requirements(merge_request, note) - # This will load the status fields of the author of the note and merge request - # to avoid queries when rendering the view being tested. - # - merge_request.author.status - note.author.status - end +module Features + module MergeRequestHelpers + def preload_view_requirements(merge_request, note) + # This will load the status fields of the author of the note and merge request + # to avoid queries when rendering the view being tested. + # + merge_request.author.status + note.author.status + end - def serialize_issuable_sidebar(user, project, merge_request) - MergeRequestSerializer - .new(current_user: user, project: project) - .represent(merge_request, serializer: 'sidebar') - end - end - end + def serialize_issuable_sidebar(user, project, merge_request) + MergeRequestSerializer + .new(current_user: user, project: project) + .represent(merge_request, serializer: 'sidebar') end end end diff --git a/spec/support/helpers/features/notes_helpers.rb b/spec/support/helpers/features/notes_helpers.rb index f8252254531..78774b515df 100644 --- a/spec/support/helpers/features/notes_helpers.rb +++ b/spec/support/helpers/features/notes_helpers.rb @@ -4,53 +4,47 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::NotesHelpers +# include Features::NotesHelpers # ... # # add_note("Hello world!") # -module Spec - module Support - module Helpers - module Features - module NotesHelpers - def add_note(text) - perform_enqueued_jobs do - page.within(".js-main-target-form") do - fill_in("note[note]", with: text) - find(".js-comment-submit-button").click - end - end - - wait_for_requests - end - - def edit_note(note_text_to_edit, new_note_text) - page.within('#notes-list li.note', text: note_text_to_edit) do - find('.js-note-edit').click - fill_in('note[note]', with: new_note_text) - find('.js-comment-button').click - end - - wait_for_requests - end - - def preview_note(text) - page.within('.js-main-target-form') do - filled_text = fill_in('note[note]', with: text) - - # Wait for quick action prompt to load and then dismiss it with ESC - # because it may block the Preview button - wait_for_requests - filled_text.send_keys(:escape) - - click_on('Preview') - - yield if block_given? - end - end +module Features + module NotesHelpers + def add_note(text) + perform_enqueued_jobs do + page.within(".js-main-target-form") do + fill_in("note[note]", with: text) + find(".js-comment-submit-button").click end end + + wait_for_requests + end + + def edit_note(note_text_to_edit, new_note_text) + page.within('#notes-list li.note', text: note_text_to_edit) do + find('.js-note-edit').click + fill_in('note[note]', with: new_note_text) + find('.js-comment-button').click + end + + wait_for_requests + end + + def preview_note(text) + page.within('.js-main-target-form') do + filled_text = fill_in('note[note]', with: text) + + # Wait for quick action prompt to load and then dismiss it with ESC + # because it may block the Preview button + wait_for_requests + filled_text.send_keys(:escape) + + click_on('Preview') + + yield if block_given? + end end end end diff --git a/spec/support/helpers/features/releases_helpers.rb b/spec/support/helpers/features/releases_helpers.rb index 545e12341ef..d5846aad15d 100644 --- a/spec/support/helpers/features/releases_helpers.rb +++ b/spec/support/helpers/features/releases_helpers.rb @@ -4,80 +4,83 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::ReleasesHelpers +# include Features::ReleasesHelpers # ... # # fill_tag_name("v1.0") # select_create_from("my-feature-branch") # -module Spec - module Support - module Helpers - module Features - module ReleasesHelpers - include ListboxHelpers +module Features + module ReleasesHelpers + include ListboxHelpers - def select_new_tag_name(tag_name) - page.within '[data-testid="tag-name-field"]' do - find('button').click - wait_for_all_requests + def select_new_tag_name(tag_name) + open_tag_popover - find('input[aria-label="Search or create tag"]').set(tag_name) - wait_for_all_requests + page.within '[data-testid="tag-name-search"]' do + find('input[type="search"]').set(tag_name) + wait_for_all_requests - click_button("Create tag #{tag_name}") - click_button tag_name - end - end - - def select_create_from(branch_name) - page.within '[data-testid="create-from-field"]' do - find('button').click + click_button("Create tag #{tag_name}") + end + end - wait_for_all_requests + def select_create_from(branch_name) + open_tag_popover - find('input[aria-label="Search branches, tags, and commits"]').set(branch_name) + page.within '[data-testid="create-from-field"]' do + find('.ref-selector button').click - wait_for_all_requests + wait_for_all_requests - select_listbox_item(branch_name.to_s, exact_text: true) - end - end + find('input[aria-label="Search branches, tags, and commits"]').set(branch_name) - def fill_release_title(release_title) - fill_in('Release title', with: release_title) - end + wait_for_all_requests - def select_milestone(milestone_title) - page.within '[data-testid="milestones-field"]' do - find('button').click + select_listbox_item(branch_name.to_s, exact_text: true) - wait_for_all_requests + click_button _('Save') + end + end - find('input[aria-label="Search Milestones"]').set(milestone_title) + def fill_release_title(release_title) + fill_in('Release title', with: release_title) + end - wait_for_all_requests + def select_milestone(milestone_title) + page.within '[data-testid="milestones-field"]' do + find('button').click - find('button', text: milestone_title, match: :first).click - end - end + wait_for_all_requests - def fill_release_notes(release_notes) - fill_in('Release notes', with: release_notes) - end + find('input[aria-label="Search Milestones"]').set(milestone_title) - def fill_asset_link(link) - all('input[name="asset-url"]').last.set(link[:url]) - all('input[name="asset-link-name"]').last.set(link[:title]) - all('select[name="asset-type"]').last.find("option[value=\"#{link[:type]}\"").select_option - end + wait_for_all_requests - # Click "Add another link" and tab back to the beginning of the new row - def add_another_asset_link - click_button('Add another link') - end - end + find('button', text: milestone_title, match: :first).click end end + + def fill_release_notes(release_notes) + fill_in('Release notes', with: release_notes) + end + + def fill_asset_link(link) + all('input[name="asset-url"]').last.set(link[:url]) + all('input[name="asset-link-name"]').last.set(link[:title]) + all('select[name="asset-type"]').last.find("option[value=\"#{link[:type]}\"").select_option + end + + # Click "Add another link" and tab back to the beginning of the new row + def add_another_asset_link + click_button('Add another link') + end + + def open_tag_popover(name = s_('Release|Search or create tag name')) + return if page.has_css? '.release-tag-selector' + + click_button name + wait_for_all_requests + end end end diff --git a/spec/support/helpers/features/responsive_table_helpers.rb b/spec/support/helpers/features/responsive_table_helpers.rb index 7a175219fe9..980f09b7eea 100644 --- a/spec/support/helpers/features/responsive_table_helpers.rb +++ b/spec/support/helpers/features/responsive_table_helpers.rb @@ -3,7 +3,7 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::ResponsiveTableHelpers +# include Features::ResponsiveTableHelpers # ... # # expect(first_row.text).to include("John Doe") @@ -13,20 +13,14 @@ # index starts at 1 as index 0 is expected to be the table header # # -module Spec - module Support - module Helpers - module Features - module ResponsiveTableHelpers - def first_row - page.all('.gl-responsive-table-row')[1] - end +module Features + module ResponsiveTableHelpers + def first_row + page.all('.gl-responsive-table-row')[1] + end - def second_row - page.all('.gl-responsive-table-row')[2] - end - end - end + def second_row + page.all('.gl-responsive-table-row')[2] end end end diff --git a/spec/support/helpers/features/runners_helpers.rb b/spec/support/helpers/features/runners_helpers.rb index c5d26108953..0504e883b82 100644 --- a/spec/support/helpers/features/runners_helpers.rb +++ b/spec/support/helpers/features/runners_helpers.rb @@ -1,68 +1,62 @@ # frozen_string_literal: true -module Spec - module Support - module Helpers - module Features - module RunnersHelpers - def within_runner_row(runner_id) - within "[data-testid='runner-row-#{runner_id}']" do - yield - end - end - - def search_bar_selector - '[data-testid="runners-filtered-search"]' - end +module Features + module RunnersHelpers + def within_runner_row(runner_id) + within "[data-testid='runner-row-#{runner_id}']" do + yield + end + end - # The filters must be clicked first to be able to receive events - # See: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1493 - def focus_filtered_search - page.within(search_bar_selector) do - page.find('.gl-filtered-search-term-token').click - end - end + def search_bar_selector + '[data-testid="runners-filtered-search"]' + end - def input_filtered_search_keys(search_term) - focus_filtered_search + # The filters must be clicked first to be able to receive events + # See: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1493 + def focus_filtered_search + page.within(search_bar_selector) do + page.find('.gl-filtered-search-term-token').click + end + end - page.within(search_bar_selector) do - page.find('input').send_keys(search_term) - click_on 'Search' - end + def input_filtered_search_keys(search_term) + focus_filtered_search - wait_for_requests - end + page.within(search_bar_selector) do + page.find('input').send_keys(search_term) + click_on 'Search' + end - def open_filtered_search_suggestions(filter) - focus_filtered_search + wait_for_requests + end - page.within(search_bar_selector) do - click_on filter - end + def open_filtered_search_suggestions(filter) + focus_filtered_search - wait_for_requests - end + page.within(search_bar_selector) do + click_on filter + end - def input_filtered_search_filter_is_only(filter, value) - focus_filtered_search + wait_for_requests + end - page.within(search_bar_selector) do - click_on filter + def input_filtered_search_filter_is_only(filter, value) + focus_filtered_search - # For OPERATORS_IS, clicking the filter - # immediately preselects "=" operator + page.within(search_bar_selector) do + click_on filter - page.find('input').send_keys(value) - page.find('input').send_keys(:enter) + # For OPERATORS_IS, clicking the filter + # immediately preselects "=" operator - click_on 'Search' - end + page.find('input').send_keys(value) + page.find('input').send_keys(:enter) - wait_for_requests - end - end + click_on 'Search' end + + wait_for_requests end end end diff --git a/spec/support/helpers/features/snippet_helpers.rb b/spec/support/helpers/features/snippet_helpers.rb deleted file mode 100644 index 3e32b0e4c67..00000000000 --- a/spec/support/helpers/features/snippet_helpers.rb +++ /dev/null @@ -1,89 +0,0 @@ -# frozen_string_literal: true - -# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.). -# - -require Rails.root.join("spec/support/helpers/features/source_editor_spec_helpers.rb") - -module Spec - module Support - module Helpers - module Features - module SnippetSpecHelpers - include ActionView::Helpers::JavaScriptHelper - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers - - def snippet_description_locator - 'snippet-description' - end - - def snippet_blob_path_locator - 'snippet_file_name' - end - - def snippet_description_view_selector - '.snippet-header .snippet-description' - end - - def snippet_description_field_collapsed - find('.js-description-input').find('input,textarea') - end - - def snippet_get_first_blob_path - page.find_field('snippet_file_name', match: :first).value - end - - def snippet_get_first_blob_value - page.find('.gl-source-editor', match: :first) - end - - def snippet_description_value - page.find_field(snippet_description_locator).value - end - - def snippet_fill_in_visibility(text) - page.find('#visibility-level-setting').choose(text) - end - - def snippet_fill_in_title(value) - fill_in 'snippet-title', with: value - end - - def snippet_fill_in_description(value) - # Click placeholder first to expand full description field - snippet_description_field_collapsed.click - fill_in snippet_description_locator, with: value - end - - def snippet_fill_in_content(value) - page.within('.gl-source-editor') do - el = find('.inputarea') - el.send_keys value - end - end - - def snippet_fill_in_file_name(value) - fill_in(snippet_blob_path_locator, match: :first, with: value) - end - - def snippet_fill_in_form(title: nil, content: nil, file_name: nil, description: nil, visibility: nil) - if content - snippet_fill_in_content(content) - # It takes some time after sending keys for the vue component to - # update so let Capybara wait for the content before proceeding - expect(page).to have_content(content) - end - - snippet_fill_in_title(title) if title - - snippet_fill_in_description(description) if description - - snippet_fill_in_file_name(file_name) if file_name - - snippet_fill_in_visibility(visibility) if visibility - end - end - end - end - end -end diff --git a/spec/support/helpers/features/snippet_spec_helpers.rb b/spec/support/helpers/features/snippet_spec_helpers.rb new file mode 100644 index 00000000000..19393f6e438 --- /dev/null +++ b/spec/support/helpers/features/snippet_spec_helpers.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +# These helpers help you interact within the Source Editor (single-file editor, snippets, etc.). +# + +require Rails.root.join("spec/support/helpers/features/source_editor_spec_helpers.rb") + +module Features + module SnippetSpecHelpers + include ActionView::Helpers::JavaScriptHelper + include Features::SourceEditorSpecHelpers + + def snippet_description_locator + 'snippet-description' + end + + def snippet_blob_path_locator + 'snippet_file_name' + end + + def snippet_description_view_selector + '.snippet-header .snippet-description' + end + + def snippet_description_field_collapsed + find('.js-description-input').find('input,textarea') + end + + def snippet_get_first_blob_path + page.find_field('snippet_file_name', match: :first).value + end + + def snippet_get_first_blob_value + page.find('.gl-source-editor', match: :first) + end + + def snippet_description_value + page.find_field(snippet_description_locator).value + end + + def snippet_fill_in_visibility(text) + page.find('#visibility-level-setting').choose(text) + end + + def snippet_fill_in_title(value) + fill_in 'snippet-title', with: value + end + + def snippet_fill_in_description(value) + # Click placeholder first to expand full description field + snippet_description_field_collapsed.click + fill_in snippet_description_locator, with: value + end + + def snippet_fill_in_content(value) + page.within('.gl-source-editor') do + el = find('.inputarea') + el.send_keys value + end + end + + def snippet_fill_in_file_name(value) + fill_in(snippet_blob_path_locator, match: :first, with: value) + end + + def snippet_fill_in_form(title: nil, content: nil, file_name: nil, description: nil, visibility: nil) + if content + snippet_fill_in_content(content) + # It takes some time after sending keys for the vue component to + # update so let Capybara wait for the content before proceeding + expect(page).to have_content(content) + end + + snippet_fill_in_title(title) if title + + snippet_fill_in_description(description) if description + + snippet_fill_in_file_name(file_name) if file_name + + snippet_fill_in_visibility(visibility) if visibility + end + end +end diff --git a/spec/support/helpers/features/sorting_helpers.rb b/spec/support/helpers/features/sorting_helpers.rb index 504a9b764cf..8dda16af625 100644 --- a/spec/support/helpers/features/sorting_helpers.rb +++ b/spec/support/helpers/features/sorting_helpers.rb @@ -4,33 +4,27 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::SortingHelpers +# include Features::SortingHelpers # ... # # sort_by("Last updated") # -module Spec - module Support - module Helpers - module Features - module SortingHelpers - def sort_by(value) - find('.filter-dropdown-container .dropdown').click +module Features + module SortingHelpers + def sort_by(value) + find('.filter-dropdown-container .dropdown').click - page.within('ul.dropdown-menu.dropdown-menu-right li') do - click_link(value) - end - end - - # pajamas_sort_by is used to sort new pajamas dropdowns. When - # all of the dropdowns are converted, pajamas_sort_by can be renamed to sort_by - # https://gitlab.com/groups/gitlab-org/-/epics/7551 - def pajamas_sort_by(value) - find('.filter-dropdown-container .gl-new-dropdown').click - find('.gl-new-dropdown-item', text: value).click - end - end + page.within('ul.dropdown-menu.dropdown-menu-right li') do + click_link(value) end end + + # pajamas_sort_by is used to sort new pajamas dropdowns. When + # all of the dropdowns are converted, pajamas_sort_by can be renamed to sort_by + # https://gitlab.com/groups/gitlab-org/-/epics/7551 + def pajamas_sort_by(value) + find('.filter-dropdown-container .gl-new-dropdown').click + find('.gl-new-dropdown-item', text: value).click + end end end diff --git a/spec/support/helpers/features/source_editor_spec_helpers.rb b/spec/support/helpers/features/source_editor_spec_helpers.rb index f7eb2a52507..e20ded60b01 100644 --- a/spec/support/helpers/features/source_editor_spec_helpers.rb +++ b/spec/support/helpers/features/source_editor_spec_helpers.rb @@ -2,24 +2,18 @@ # These helpers help you interact within the Source Editor (single-file editor, snippets, etc.). # -module Spec - module Support - module Helpers - module Features - module SourceEditorSpecHelpers - include ActionView::Helpers::JavaScriptHelper +module Features + module SourceEditorSpecHelpers + include ActionView::Helpers::JavaScriptHelper - def editor_set_value(value) - editor = find('.monaco-editor') - uri = editor['data-uri'] - execute_script("localMonaco.getModel('#{uri}').setValue('#{escape_javascript(value)}')") + def editor_set_value(value) + editor = find('.monaco-editor') + uri = editor['data-uri'] + execute_script("localMonaco.getModel('#{uri}').setValue('#{escape_javascript(value)}')") - # We only check that the first line is present because when the content is long, - # only a part of the text will be rendered in the DOM due to scrolling - page.has_selector?('.gl-source-editor .view-lines', text: value.lines.first) - end - end - end + # We only check that the first line is present because when the content is long, + # only a part of the text will be rendered in the DOM due to scrolling + page.has_selector?('.gl-source-editor .view-lines', text: value.lines.first) end end end diff --git a/spec/support/helpers/features/top_nav_spec_helpers.rb b/spec/support/helpers/features/top_nav_spec_helpers.rb index de495eceabc..ecc05189fb4 100644 --- a/spec/support/helpers/features/top_nav_spec_helpers.rb +++ b/spec/support/helpers/features/top_nav_spec_helpers.rb @@ -2,37 +2,31 @@ # These helpers help you interact within the Source Editor (single-file editor, snippets, etc.). # -module Spec - module Support - module Helpers - module Features - module TopNavSpecHelpers - def open_top_nav - find('.js-top-nav-dropdown-toggle').click - end +module Features + module TopNavSpecHelpers + def open_top_nav + find('.js-top-nav-dropdown-toggle').click + end - def within_top_nav - within('.js-top-nav-dropdown-menu') do - yield - end - end + def within_top_nav + within('.js-top-nav-dropdown-menu') do + yield + end + end - def open_top_nav_projects - open_top_nav + def open_top_nav_projects + open_top_nav - within_top_nav do - click_button('Projects') - end - end + within_top_nav do + click_button('Projects') + end + end - def open_top_nav_groups - open_top_nav + def open_top_nav_groups + open_top_nav - within_top_nav do - click_button('Groups') - end - end - end + within_top_nav do + click_button('Groups') end end end diff --git a/spec/support/helpers/features/two_factor_helpers.rb b/spec/support/helpers/features/two_factor_helpers.rb index d5f069a40ea..e0469091d96 100644 --- a/spec/support/helpers/features/two_factor_helpers.rb +++ b/spec/support/helpers/features/two_factor_helpers.rb @@ -4,92 +4,86 @@ # # Usage: # describe "..." do -# include Spec::Support::Helpers::Features::TwoFactorHelpers +# include Features::TwoFactorHelpers # ... # # manage_two_factor_authentication # -module Spec - module Support - module Helpers - module Features - module TwoFactorHelpers - def copy_recovery_codes - click_on _('Copy codes') - click_on _('Proceed') - end +module Features + module TwoFactorHelpers + def copy_recovery_codes + click_on _('Copy codes') + click_on _('Proceed') + end - def enable_two_factor_authentication - click_on _('Enable two-factor authentication') - expect(page).to have_content(_('Set up new device')) - wait_for_requests - end + def enable_two_factor_authentication + click_on _('Enable two-factor authentication') + expect(page).to have_content(_('Set up new device')) + wait_for_requests + end - def manage_two_factor_authentication - click_on 'Manage two-factor authentication' - expect(page).to have_content("Set up new device") - wait_for_requests - end + def manage_two_factor_authentication + click_on 'Manage two-factor authentication' + expect(page).to have_content("Set up new device") + wait_for_requests + end + + # Registers webauthn device via UI + # Remove after `webauthn_without_totp` feature flag is deleted. + def register_webauthn_device(webauthn_device = nil, name: 'My device') + webauthn_device ||= FakeWebauthnDevice.new(page, name) + webauthn_device.respond_to_webauthn_registration + click_on 'Set up new device' + expect(page).to have_content('Your device was successfully set up') + fill_in 'Pick a name', with: name + click_on 'Register device' + webauthn_device + end - # Registers webauthn device via UI - # Remove after `webauthn_without_totp` feature flag is deleted. - def register_webauthn_device(webauthn_device = nil, name: 'My device') - webauthn_device ||= FakeWebauthnDevice.new(page, name) - webauthn_device.respond_to_webauthn_registration - click_on 'Set up new device' - expect(page).to have_content('Your device was successfully set up') - fill_in 'Pick a name', with: name - click_on 'Register device' - webauthn_device - end + def webauthn_device_registration(webauthn_device: nil, name: 'My device', password: 'fake') + webauthn_device ||= FakeWebauthnDevice.new(page, name) + webauthn_device.respond_to_webauthn_registration + click_on _('Set up new device') + webauthn_fill_form_and_submit(name: name, password: password) + webauthn_device + end - def webauthn_device_registration(webauthn_device: nil, name: 'My device', password: 'fake') - webauthn_device ||= FakeWebauthnDevice.new(page, name) - webauthn_device.respond_to_webauthn_registration - click_on _('Set up new device') - webauthn_fill_form_and_submit(name: name, password: password) - webauthn_device - end + def webauthn_fill_form_and_submit(name: 'My device', password: 'fake') + content = _('Your device was successfully set up! Give it a name and register it with the GitLab server.') + expect(page).to have_content(content) - def webauthn_fill_form_and_submit(name: 'My device', password: 'fake') - expect(page).to have_content( - _('Your device was successfully set up! Give it a name and register it with the GitLab server.') - ) - within '[data-testid="create-webauthn"]' do - fill_in _('Device name'), with: name - fill_in _('Current password'), with: password - click_on _('Register device') - end - end + within '[data-testid="create-webauthn"]' do + fill_in _('Device name'), with: name + fill_in _('Current password'), with: password + click_on _('Register device') + end + end - # Adds webauthn device directly via database - def add_webauthn_device(app_id, user, fake_device = nil, name: 'My device') - fake_device ||= WebAuthn::FakeClient.new(app_id) + # Adds webauthn device directly via database + def add_webauthn_device(app_id, user, fake_device = nil, name: 'My device') + fake_device ||= WebAuthn::FakeClient.new(app_id) - options_for_create = WebAuthn::Credential.options_for_create( - user: { id: user.webauthn_xid, name: user.username }, - authenticator_selection: { user_verification: 'discouraged' }, - rp: { name: 'GitLab' } - ) - challenge = options_for_create.challenge + options_for_create = WebAuthn::Credential.options_for_create( + user: { id: user.webauthn_xid, name: user.username }, + authenticator_selection: { user_verification: 'discouraged' }, + rp: { name: 'GitLab' } + ) + challenge = options_for_create.challenge - device_response = fake_device.create(challenge: challenge).to_json # rubocop:disable Rails/SaveBang - device_registration_params = { device_response: device_response, - name: name } + device_response = fake_device.create(challenge: challenge).to_json # rubocop:disable Rails/SaveBang + device_registration_params = { device_response: device_response, + name: name } - Webauthn::RegisterService.new( - user, device_registration_params, challenge).execute - FakeWebauthnDevice.new(page, name, fake_device) - end + Webauthn::RegisterService.new( + user, device_registration_params, challenge).execute + FakeWebauthnDevice.new(page, name, fake_device) + end - def assert_fallback_ui(page) - expect(page).to have_button('Verify code') - expect(page).to have_css('#user_otp_attempt') - expect(page).not_to have_link('Sign in via 2FA code') - expect(page).not_to have_css("#js-authenticate-token-2fa") - end - end - end + def assert_fallback_ui(page) + expect(page).to have_button('Verify code') + expect(page).to have_css('#user_otp_attempt') + expect(page).not_to have_link('Sign in via 2FA code') + expect(page).not_to have_css("#js-authenticate-token-2fa") end end end diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb index 4793c9479fe..c51116b55b2 100644 --- a/spec/support/helpers/features/web_ide_spec_helpers.rb +++ b/spec/support/helpers/features/web_ide_spec_helpers.rb @@ -4,119 +4,120 @@ # # Usage: # describe "..." do -# include WebIdeSpecHelpers +# include Features::WebIdeSpecHelpers # ... # # ide_visit(project) # ide_commit -# -module WebIdeSpecHelpers - include Spec::Support::Helpers::Features::SourceEditorSpecHelpers - - # Open the IDE from anywhere by first visiting the given project's page - def ide_visit(project) - visit project_path(project) - - ide_visit_from_link - end +module Features + module WebIdeSpecHelpers + include Features::SourceEditorSpecHelpers - # Open the IDE from the current page by clicking the Web IDE link - def ide_visit_from_link(link_sel = 'Web IDE') - new_tab = window_opened_by { click_link(link_sel) } + # Open the IDE from anywhere by first visiting the given project's page + def ide_visit(project) + visit project_path(project) - switch_to_window new_tab - end + ide_visit_from_link + end - def ide_tree_body - page.find('.ide-tree-body') - end + # Open the IDE from the current page by clicking the Web IDE link + def ide_visit_from_link(link_sel = 'Web IDE') + new_tab = window_opened_by { click_link(link_sel) } - def ide_tree_actions - page.find('.ide-tree-actions') - end + switch_to_window new_tab + end - def ide_tab_selector(mode) - ".js-ide-#{mode}-mode" - end + def ide_tree_body + page.find('.ide-tree-body') + end - def ide_folder_row_open?(row) - row.matches_css?('.folder.is-open') - end + def ide_tree_actions + page.find('.ide-tree-actions') + end - # Deletes a file by traversing to `path` - # then clicking the 'Delete' action. - # - # - Throws an error if the file is not found - def ide_delete_file(path) - container = ide_traverse_to_file(path) + def ide_tab_selector(mode) + ".js-ide-#{mode}-mode" + end - click_file_action(container, 'Delete') - end + def ide_folder_row_open?(row) + row.matches_css?('.folder.is-open') + end - # Opens parent directories until the file at `path` - # is exposed. - # - # - Returns a reference to the file row at `path` - # - Throws an error if the file is not found - def ide_traverse_to_file(path) - paths = path.split('/') - container = nil + # Deletes a file by traversing to `path` + # then clicking the 'Delete' action. + # + # - Throws an error if the file is not found + def ide_delete_file(path) + container = ide_traverse_to_file(path) - paths.each_with_index do |path, index| - ide_open_file_row(container) if container - container = find_file_child(container, path, level: index) + click_file_action(container, 'Delete') end - container - end + # Opens parent directories until the file at `path` + # is exposed. + # + # - Returns a reference to the file row at `path` + # - Throws an error if the file is not found + def ide_traverse_to_file(path) + paths = path.split('/') + container = nil + + paths.each_with_index do |path, index| + ide_open_file_row(container) if container + container = find_file_child(container, path, level: index) + end + + container + end - def ide_open_file_row(row) - return if ide_folder_row_open?(row) + def ide_open_file_row(row) + return if ide_folder_row_open?(row) - row.click - end + row.click + end - def ide_set_editor_value(value) - editor_set_value(value) - end + def ide_set_editor_value(value) + editor_set_value(value) + end - def ide_commit_tab_selector - ide_tab_selector('commit') - end + def ide_commit_tab_selector + ide_tab_selector('commit') + end - def ide_commit - find(ide_commit_tab_selector).click + def ide_commit + find(ide_commit_tab_selector).click - commit_to_current_branch - end + commit_to_current_branch + end - private + private - def file_row_container(row) - row ? row.find(:xpath, '..') : ide_tree_body - end + def file_row_container(row) + row ? row.find(:xpath, '..') : ide_tree_body + end - def find_file_child(row, name, level: nil) - container = file_row_container(row) - container.find(".file-row[data-level=\"#{level}\"]", text: name) - end + def find_file_child(row, name, level: nil) + container = file_row_container(row) + container.find(".file-row[data-level=\"#{level}\"]", text: name) + end - def click_file_action(row, text) - row.hover - dropdown = row.find('.ide-new-btn') - dropdown.find('button').click - dropdown.find('button', text: text).click - end + def click_file_action(row, text) + row.hover + dropdown = row.find('.ide-new-btn') + dropdown.find('button').click + dropdown.find('button', text: text).click + end - def commit_to_current_branch(option: 'Commit to master branch', message: '') - within '.multi-file-commit-form' do - fill_in('commit-message', with: message) if message + def commit_to_current_branch(option: 'Commit to master branch', message: '') + within '.multi-file-commit-form' do + fill_in('commit-message', with: message) if message - choose(option) + choose(option) - click_button('Commit') + click_button('Commit') - wait_for_requests + wait_for_requests + end end end end diff --git a/spec/support/helpers/gitaly_setup.rb b/spec/support/helpers/gitaly_setup.rb index bf3c67a1818..7db9e0aaf09 100644 --- a/spec/support/helpers/gitaly_setup.rb +++ b/spec/support/helpers/gitaly_setup.rb @@ -10,7 +10,6 @@ require 'securerandom' require 'socket' require 'logger' require 'fileutils' -require 'bundler' require_relative '../../../lib/gitlab/utils' @@ -50,51 +49,18 @@ module GitalySetup expand_path('.gitlab_shell_secret') end - def gemfile - File.join(tmp_tests_gitaly_dir, 'ruby', 'Gemfile') - end - - def gemfile_dir - File.dirname(gemfile) - end - def gitlab_shell_secret_file File.join(tmp_tests_gitlab_shell_dir, '.gitlab_shell_secret') end def env { - 'GEM_PATH' => Gem.path.join(':'), - 'BUNDLER_SETUP' => nil, - 'BUNDLE_INSTALL_FLAGS' => nil, - 'BUNDLE_IGNORE_CONFIG' => '1', - 'BUNDLE_PATH' => bundle_path, - 'BUNDLE_GEMFILE' => gemfile, - 'BUNDLE_JOBS' => '4', - 'BUNDLE_RETRY' => '3', - 'RUBYOPT' => nil, - # Git hooks can't run during tests as the internal API is not running. 'GITALY_TESTING_NO_GIT_HOOKS' => "1", 'GITALY_TESTING_ENABLE_ALL_FEATURE_FLAGS' => "true" } end - def bundle_path - # Allow the user to override BUNDLE_PATH if they need to - return ENV['GITALY_TEST_BUNDLE_PATH'] if ENV['GITALY_TEST_BUNDLE_PATH'] - - if ENV['CI'] - expand_path('vendor/gitaly-ruby') - else - explicit_path = Bundler.configured_bundle_path.explicit_path - - return unless explicit_path - - expand_path(explicit_path) - end - end - def config_path(service) case service when :gitaly @@ -125,10 +91,6 @@ module GitalySetup system(env, *cmd, exception: true, chdir: tmp_tests_gitaly_dir) end - def install_gitaly_gems - run_command(%W[make #{tmp_tests_gitaly_dir}/.ruby-bundle], env: env) - end - def build_gitaly run_command(%w[make all WITH_BUNDLED_GIT=YesPlease], env: env.merge('GIT_VERSION' => nil)) end @@ -188,35 +150,6 @@ module GitalySetup end end - def check_gitaly_config! - LOGGER.debug "Checking gitaly-ruby Gemfile...\n" - - unless File.exist?(gemfile) - message = "#{gemfile} does not exist." - message += "\n\nThis might have happened if the CI artifacts for this build were destroyed." if ENV['CI'] - abort message - end - - LOGGER.debug "Checking gitaly-ruby bundle...\n" - - bundle_install unless bundle_check - - abort 'bundle check failed' unless bundle_check - end - - def bundle_check - bundle_cmd('check') - end - - def bundle_install - bundle_cmd('install') - end - - def bundle_cmd(cmd) - out = ENV['CI'] ? $stdout : '/dev/null' - system(env, 'bundle', cmd, out: out, chdir: gemfile_dir) - end - def connect_proc(toml) # This code needs to work in an environment where we cannot use bundler, # so we cannot easily use the toml-rb gem. This ad-hoc parser should be @@ -358,8 +291,6 @@ module GitalySetup end def spawn_gitaly(toml = nil) - check_gitaly_config! - pids = [] if toml diff --git a/spec/support/helpers/google_api/cloud_platform_helpers.rb b/spec/support/helpers/google_api/cloud_platform_helpers.rb new file mode 100644 index 00000000000..3d4ffe88da9 --- /dev/null +++ b/spec/support/helpers/google_api/cloud_platform_helpers.rb @@ -0,0 +1,168 @@ +# frozen_string_literal: true + +module GoogleApi + module CloudPlatformHelpers + def stub_google_api_validate_token + request.session[GoogleApi::CloudPlatform::Client.session_key_for_token] = 'token' + request.session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at] = 1.hour.since.to_i.to_s + end + + def stub_google_api_expired_token + request.session[GoogleApi::CloudPlatform::Client.session_key_for_token] = 'token' + request.session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at] = 1.hour.ago.to_i.to_s + end + + def stub_cloud_platform_projects_list(options) + WebMock.stub_request(:get, cloud_platform_projects_list_url) + .to_return(cloud_platform_response(cloud_platform_projects_body(options))) + end + + def stub_cloud_platform_projects_get_billing_info(project_id, billing_enabled) + WebMock.stub_request(:get, cloud_platform_projects_get_billing_info_url(project_id)) + .to_return(cloud_platform_response(cloud_platform_projects_billing_info_body(project_id, billing_enabled))) + end + + def stub_cloud_platform_get_zone_cluster(project_id, zone, cluster_id, options = {}) + WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id)) + .to_return(cloud_platform_response(cloud_platform_cluster_body(options))) + end + + def stub_cloud_platform_get_zone_cluster_error(project_id, zone, cluster_id) + WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id)) + .to_return(status: [500, "Internal Server Error"]) + end + + def stub_cloud_platform_create_cluster(project_id, zone, options = {}) + WebMock.stub_request(:post, cloud_platform_create_cluster_url(project_id, zone)) + .to_return(cloud_platform_response(cloud_platform_operation_body(options))) + end + + def stub_cloud_platform_create_cluster_error(project_id, zone) + WebMock.stub_request(:post, cloud_platform_create_cluster_url(project_id, zone)) + .to_return(status: [500, "Internal Server Error"]) + end + + def stub_cloud_platform_get_zone_operation(project_id, zone, operation_id, options = {}) + WebMock.stub_request(:get, cloud_platform_get_zone_operation_url(project_id, zone, operation_id)) + .to_return(cloud_platform_response(cloud_platform_operation_body(options))) + end + + def stub_cloud_platform_get_zone_operation_error(project_id, zone, operation_id) + WebMock.stub_request(:get, cloud_platform_get_zone_operation_url(project_id, zone, operation_id)) + .to_return(status: [500, "Internal Server Error"]) + end + + def cloud_platform_projects_list_url + "https://cloudresourcemanager.googleapis.com/v1/projects" + end + + def cloud_platform_projects_get_billing_info_url(project_id) + "https://cloudbilling.googleapis.com/v1/projects/#{project_id}/billingInfo" + end + + def cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id) + "https://container.googleapis.com/v1/projects/#{project_id}/zones/#{zone}/clusters/#{cluster_id}" + end + + def cloud_platform_create_cluster_url(project_id, zone) + "https://container.googleapis.com/v1beta1/projects/#{project_id}/zones/#{zone}/clusters" + end + + def cloud_platform_get_zone_operation_url(project_id, zone, operation_id) + "https://container.googleapis.com/v1/projects/#{project_id}/zones/#{zone}/operations/#{operation_id}" + end + + def cloud_platform_response(body) + { status: 200, headers: { 'Content-Type' => 'application/json' }, body: body.to_json } + end + + def load_sample_cert + pem_file = File.expand_path(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) + Base64.encode64(File.read(pem_file)) + end + + ## + # gcloud container clusters create + # https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters/create + # rubocop:disable Metrics/CyclomaticComplexity + # rubocop:disable Metrics/PerceivedComplexity + def cloud_platform_cluster_body(options) + { + name: options[:name] || 'string', + description: options[:description] || 'string', + initialNodeCount: options[:initialNodeCount] || 'number', + masterAuth: { + username: options[:username] || 'string', + password: options[:password] || 'string', + clusterCaCertificate: options[:clusterCaCertificate] || load_sample_cert, + clientCertificate: options[:clientCertificate] || 'string', + clientKey: options[:clientKey] || 'string' + }, + loggingService: options[:loggingService] || 'string', + monitoringService: options[:monitoringService] || 'string', + network: options[:network] || 'string', + clusterIpv4Cidr: options[:clusterIpv4Cidr] || 'string', + subnetwork: options[:subnetwork] || 'string', + enableKubernetesAlpha: options[:enableKubernetesAlpha] || 'boolean', + labelFingerprint: options[:labelFingerprint] || 'string', + selfLink: options[:selfLink] || 'string', + zone: options[:zone] || 'string', + endpoint: options[:endpoint] || 'string', + initialClusterVersion: options[:initialClusterVersion] || 'string', + currentMasterVersion: options[:currentMasterVersion] || 'string', + currentNodeVersion: options[:currentNodeVersion] || 'string', + createTime: options[:createTime] || 'string', + status: options[:status] || 'RUNNING', + statusMessage: options[:statusMessage] || 'string', + nodeIpv4CidrSize: options[:nodeIpv4CidrSize] || 'number', + servicesIpv4Cidr: options[:servicesIpv4Cidr] || 'string', + currentNodeCount: options[:currentNodeCount] || 'number', + expireTime: options[:expireTime] || 'string' + } + end + # rubocop:enable Metrics/CyclomaticComplexity + # rubocop:enable Metrics/PerceivedComplexity + + def cloud_platform_operation_body(options) + { + name: options[:name] || 'operation-1234567891234-1234567', + zone: options[:zone] || 'us-central1-a', + operationType: options[:operationType] || 'CREATE_CLUSTER', + status: options[:status] || 'PENDING', + detail: options[:detail] || 'detail', + statusMessage: options[:statusMessage] || '', + selfLink: options[:selfLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/operations/operation-1234567891234-1234567', + targetLink: options[:targetLink] || 'https://container.googleapis.com/v1/projects/123456789101/zones/us-central1-a/clusters/test-cluster', + startTime: options[:startTime] || '2017-09-13T16:49:13.055601589Z', + endTime: options[:endTime] || '' + } + end + + def cloud_platform_projects_body(options) + { + projects: [ + { + projectNumber: options[:project_number] || "1234", + projectId: options[:project_id] || "test-project-1234", + lifecycleState: "ACTIVE", + name: options[:name] || "test-project", + createTime: "2017-12-16T01:48:29.129Z", + parent: { + type: "organization", + id: "12345" + } + } + ] + } + end + + def cloud_platform_projects_billing_info_body(project_id, billing_enabled) + { + name: "projects/#{project_id}/billingInfo", + projectId: project_id.to_s, + billingAccountName: "account-name", + billingEnabled: billing_enabled + } + end + end +end diff --git a/spec/support/helpers/graphql/arguments.rb b/spec/support/helpers/graphql/arguments.rb new file mode 100644 index 00000000000..478a460a0f6 --- /dev/null +++ b/spec/support/helpers/graphql/arguments.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +module Graphql + class Arguments + delegate :blank?, :empty?, to: :to_h + + def initialize(values) + @values = values + end + + def to_h + @values + end + + def ==(other) + to_h == other&.to_h + end + + alias_method :eql, :== + + def to_s + return '' if empty? + + @values.map do |name, value| + value_str = as_graphql_literal(value) + + "#{GraphqlHelpers.fieldnamerize(name.to_s)}: #{value_str}" + end.join(", ") + end + + def as_graphql_literal(value) + self.class.as_graphql_literal(value) + end + + # Transform values to GraphQL literal arguments. + # Use symbol for Enum values + def self.as_graphql_literal(value) + case value + when ::Graphql::Arguments then "{#{value}}" + when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]" + when Hash then "{#{new(value)}}" + when Integer, Float, Symbol then value.to_s + when String, GlobalID then "\"#{value.to_s.gsub(/"/, '\\"')}\"" + when Time, Date then "\"#{value.iso8601}\"" + when NilClass then 'null' + when true then 'true' + when false then 'false' + else + value.to_graphql_value + end + rescue NoMethodError + raise ArgumentError, "Cannot represent #{value} (instance of #{value.class}) as GraphQL literal" + end + + def merge(other) + self.class.new(@values.merge(other.to_h)) + end + + def +(other) + if blank? + other + elsif other.blank? + self + elsif other.is_a?(String) + [to_s, other].compact.join(', ') + else + merge(other) + end + end + end +end diff --git a/spec/support/helpers/graphql/fake_query_type.rb b/spec/support/helpers/graphql/fake_query_type.rb new file mode 100644 index 00000000000..bdf30908532 --- /dev/null +++ b/spec/support/helpers/graphql/fake_query_type.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'graphql' + +module Graphql + class FakeQueryType < ::GraphQL::Schema::Object + graphql_name 'FakeQuery' + + field :hello_world, String, null: true do + argument :message, String, required: false + end + + field :breaking_field, String, null: true + + def hello_world(message: "world") + "Hello #{message}!" + end + + def breaking_field + raise "This field is supposed to break" + end + end +end diff --git a/spec/support/helpers/graphql/fake_tracer.rb b/spec/support/helpers/graphql/fake_tracer.rb new file mode 100644 index 00000000000..58688c9abd0 --- /dev/null +++ b/spec/support/helpers/graphql/fake_tracer.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module Graphql + class FakeTracer + def initialize(trace_callback) + @trace_callback = trace_callback + end + + def trace(...) + @trace_callback.call(...) + + yield + end + end +end diff --git a/spec/support/helpers/graphql/field_inspection.rb b/spec/support/helpers/graphql/field_inspection.rb new file mode 100644 index 00000000000..8730f82b893 --- /dev/null +++ b/spec/support/helpers/graphql/field_inspection.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module Graphql + class FieldInspection + def initialize(field) + @field = field + end + + def nested_fields? + !scalar? && !enum? + end + + def scalar? + type.kind.scalar? + end + + def enum? + type.kind.enum? + end + + def type + @type ||= begin + field_type = @field.type + + # The type could be nested. For example `[GraphQL::Types::String]`: + # - List + # - String! + # - String + field_type = field_type.of_type while field_type.respond_to?(:of_type) + + field_type + end + end + end +end diff --git a/spec/support/helpers/graphql/field_selection.rb b/spec/support/helpers/graphql/field_selection.rb new file mode 100644 index 00000000000..432340cfdb5 --- /dev/null +++ b/spec/support/helpers/graphql/field_selection.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module Graphql + class FieldSelection + delegate :empty?, :blank?, :to_h, to: :selection + delegate :size, to: :paths + + attr_reader :selection + + def initialize(selection = {}) + @selection = selection.to_h + end + + def to_s + serialize_field_selection(selection) + end + + def paths + selection.flat_map do |field, subselection| + paths_in([field], subselection) + end + end + + private + + def paths_in(path, leaves) + return [path] if leaves.nil? + + leaves.to_a.flat_map do |k, v| + paths_in([k], v).map { |tail| path + tail } + end + end + + def serialize_field_selection(hash, level = 0) + indent = ' ' * level + + hash.map do |field, subselection| + if subselection.nil? + "#{indent}#{field}" + else + subfields = serialize_field_selection(subselection, level + 1) + "#{indent}#{field} {\n#{subfields}\n#{indent}}" + end + end.join("\n") + end + + NO_SKIP = ->(_name, _field) { false } + + def self.select_fields(type, skip = NO_SKIP, max_depth = 3) + return new if max_depth <= 0 || !type.kind.fields? + + new(type.fields.flat_map do |name, field| + next [] if skip[name, field] + + inspected = ::Graphql::FieldInspection.new(field) + singular_field_type = inspected.type + + if inspected.nested_fields? + subselection = select_fields(singular_field_type, skip, max_depth - 1) + next [] if subselection.empty? + + [[name, subselection.to_h]] + else + [[name, nil]] + end + end) + end + end +end diff --git a/spec/support/helpers/graphql/resolver_factories.rb b/spec/support/helpers/graphql/resolver_factories.rb new file mode 100644 index 00000000000..76df4b58943 --- /dev/null +++ b/spec/support/helpers/graphql/resolver_factories.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module Graphql + module ResolverFactories + def new_resolver(resolved_value = 'Resolved value', method: :resolve) + case method + when :resolve + simple_resolver(resolved_value) + when :find_object + find_object_resolver(resolved_value) + else + raise "Cannot build a resolver for #{method}" + end + end + + private + + def simple_resolver(resolved_value = 'Resolved value', base_class: Resolvers::BaseResolver) + Class.new(base_class) do + define_method :resolve do |**_args| + resolved_value + end + end + end + + def find_object_resolver(resolved_value = 'Found object') + Class.new(Resolvers::BaseResolver) do + include ::Gitlab::Graphql::Authorize::AuthorizeResource + + def resolve(...) + authorized_find!(...) + end + + define_method :find_object do |**_args| + resolved_value + end + end + end + end +end diff --git a/spec/support/helpers/graphql/subscriptions/action_cable/mock_action_cable.rb b/spec/support/helpers/graphql/subscriptions/action_cable/mock_action_cable.rb new file mode 100644 index 00000000000..2ccc62a8729 --- /dev/null +++ b/spec/support/helpers/graphql/subscriptions/action_cable/mock_action_cable.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +# A stub implementation of ActionCable. +# Any methods to support the mock backend have `mock` in the name. +module Graphql + module Subscriptions + module ActionCable + class MockActionCable + class MockChannel + def initialize + @mock_broadcasted_messages = [] + end + + attr_reader :mock_broadcasted_messages + + def stream_from(stream_name, coder: nil, &block) # rubocop:disable Lint/UnusedMethodArgument + # Rails uses `coder`, we don't + block ||= ->(msg) { @mock_broadcasted_messages << msg } + MockActionCable.mock_stream_for(stream_name).add_mock_channel(self, block) + end + end + + class MockStream + def initialize + @mock_channels = {} + end + + def add_mock_channel(channel, handler) + @mock_channels[channel] = handler + end + + def mock_broadcast(message) + @mock_channels.each_value do |handler| + handler && handler.call(message) + end + end + end + + class << self + def clear_mocks + @mock_streams = {} + end + + def server + self + end + + def broadcast(stream_name, message) + stream = @mock_streams[stream_name] + stream && stream.mock_broadcast(message) + end + + def mock_stream_for(stream_name) + @mock_streams[stream_name] ||= MockStream.new + end + + def get_mock_channel + MockChannel.new + end + + def mock_stream_names + @mock_streams.keys + end + end + end + + class MockSchema < GraphQL::Schema + class << self + def find_by_gid(gid) + return unless gid + + if gid.model_class < ApplicationRecord + Gitlab::Graphql::Loaders::BatchModelLoader.new(gid.model_class, gid.model_id).find + elsif gid.model_class.respond_to?(:lazy_find) + gid.model_class.lazy_find(gid.model_id) + else + gid.find + end + end + + def id_from_object(object, _type = nil, _ctx = nil) + unless object.respond_to?(:to_global_id) + # This is an error in our schema and needs to be solved. So raise a + # more meaningful error message + raise "#{object} does not implement `to_global_id`. " \ + "Include `GlobalID::Identification` into `#{object.class}" + end + + object.to_global_id + end + end + + query(::Types::QueryType) + subscription(::Types::SubscriptionType) + + use GraphQL::Subscriptions::ActionCableSubscriptions, action_cable: MockActionCable, action_cable_coder: JSON + end + end + end +end diff --git a/spec/support/helpers/graphql/subscriptions/action_cable/mock_gitlab_schema.rb b/spec/support/helpers/graphql/subscriptions/action_cable/mock_gitlab_schema.rb new file mode 100644 index 00000000000..cd5d78cc78b --- /dev/null +++ b/spec/support/helpers/graphql/subscriptions/action_cable/mock_gitlab_schema.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +# A stub implementation of ActionCable. +# Any methods to support the mock backend have `mock` in the name. +module Graphql + module Subscriptions + module ActionCable + class MockGitlabSchema < GraphQL::Schema + class << self + def find_by_gid(gid) + return unless gid + + if gid.model_class < ApplicationRecord + Gitlab::Graphql::Loaders::BatchModelLoader.new(gid.model_class, gid.model_id).find + elsif gid.model_class.respond_to?(:lazy_find) + gid.model_class.lazy_find(gid.model_id) + else + gid.find + end + end + + def id_from_object(object, _type = nil, _ctx = nil) + unless object.respond_to?(:to_global_id) + # This is an error in our schema and needs to be solved. So raise a + # more meaningful error message + raise "#{object} does not implement `to_global_id`. " \ + "Include `GlobalID::Identification` into `#{object.class}" + end + + object.to_global_id + end + end + + query(::Types::QueryType) + subscription(::Types::SubscriptionType) + + use GraphQL::Subscriptions::ActionCableSubscriptions, action_cable: MockActionCable, action_cable_coder: JSON + end + end + end +end diff --git a/spec/support/helpers/graphql/subscriptions/notes/helper.rb b/spec/support/helpers/graphql/subscriptions/notes/helper.rb new file mode 100644 index 00000000000..9a552f9879e --- /dev/null +++ b/spec/support/helpers/graphql/subscriptions/notes/helper.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +module Graphql + module Subscriptions + module Notes + module Helper + def subscription_response + subscription_channel = subscribe + yield + subscription_channel.mock_broadcasted_messages.first + end + + def notes_subscription(name, noteable, current_user) + mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel + + query = case name + when 'workItemNoteDeleted' + note_deleted_subscription_query(name, noteable) + when 'workItemNoteUpdated' + note_updated_subscription_query(name, noteable) + when 'workItemNoteCreated' + note_created_subscription_query(name, noteable) + else + raise "Subscription query unknown: #{name}" + end + + GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel }) + + mock_channel + end + + def note_subscription(name, noteable, current_user) + mock_channel = Graphql::Subscriptions::ActionCable::MockActionCable.get_mock_channel + + query = <<~SUBSCRIPTION + subscription { + #{name}(noteableId: \"#{noteable.to_gid}\") { + id + body + } + } + SUBSCRIPTION + + GitlabSchema.execute(query, context: { current_user: current_user, channel: mock_channel }) + + mock_channel + end + + private + + def note_deleted_subscription_query(name, noteable) + <<~SUBSCRIPTION + subscription { + #{name}(noteableId: \"#{noteable.to_gid}\") { + id + discussionId + lastDiscussionNote + } + } + SUBSCRIPTION + end + + def note_created_subscription_query(name, noteable) + <<~SUBSCRIPTION + subscription { + #{name}(noteableId: \"#{noteable.to_gid}\") { + id + discussion { + id + notes { + nodes { + id + } + } + } + } + } + SUBSCRIPTION + end + + def note_updated_subscription_query(name, noteable) + <<~SUBSCRIPTION + subscription { + #{name}(noteableId: \"#{noteable.to_gid}\") { + id + body + } + } + SUBSCRIPTION + end + end + end + end +end diff --git a/spec/support/helpers/graphql/var.rb b/spec/support/helpers/graphql/var.rb new file mode 100644 index 00000000000..4f2c774e898 --- /dev/null +++ b/spec/support/helpers/graphql/var.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +module Graphql + # Helper to pass variables around generated queries. + # + # e.g.: + # first = var('Int') + # after = var('String') + # + # query = with_signature( + # [first, after], + # query_graphql_path([ + # [:project, { full_path: project.full_path }], + # [:issues, { after: after, first: first }] + # :nodes + # ], all_graphql_fields_for('Issue')) + # ) + # + # post_graphql(query, variables: [first.with(2), after.with(some_cursor)]) + # + class Var + attr_reader :name, :type + attr_accessor :value + + def initialize(name, type) + @name = name + @type = type + end + + def sig + "#{to_graphql_value}: #{type}" + end + + def to_graphql_value + "$#{name}" + end + + # We return a new object so that running the same query twice with + # different values does not risk re-using the value + # + # e.g. + # + # x = var('Int') + # expect { post_graphql(query, variables: x) } + # .to issue_same_number_of_queries_as { post_graphql(query, variables: x.with(1)) } + # + # Here we post the `x` variable once with the value set to 1, and once with + # the value set to `nil`. + def with(value) + copy = Var.new(name, type) + copy.value = value + copy + end + + def to_h + { name => value } + end + end +end diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb index 191e5192a61..a55027d3976 100644 --- a/spec/support/helpers/graphql_helpers.rb +++ b/spec/support/helpers/graphql_helpers.rb @@ -89,13 +89,16 @@ module GraphqlHelpers # All mutations accept a single `:input` argument. Wrap arguments here. args = { input: args } if resolver_class <= ::Mutations::BaseMutation && !args.key?(:input) - resolve_field(field, obj, - args: args, - ctx: ctx, - schema: schema, - object_type: resolver_parent, - extras: { parent: parent, lookahead: lookahead }, - arg_style: arg_style) + resolve_field( + field, + obj, + args: args, + ctx: ctx, + schema: schema, + object_type: resolver_parent, + extras: { parent: parent, lookahead: lookahead }, + arg_style: arg_style + ) end # Resolve the value of a field on an object. @@ -513,20 +516,23 @@ module GraphqlHelpers end def post_graphql_mutation(mutation, current_user: nil, token: {}) - post_graphql(mutation.query, - current_user: current_user, - variables: mutation.variables, - token: token) + post_graphql( + mutation.query, + current_user: current_user, + variables: mutation.variables, + token: token + ) end def post_graphql_mutation_with_uploads(mutation, current_user: nil) file_paths = file_paths_in_mutation(mutation) params = mutation_to_apollo_uploads_param(mutation, files: file_paths) - workhorse_post_with_file(api('/', current_user, version: 'graphql'), - params: params, - file_key: '1' - ) + workhorse_post_with_file( + api('/', current_user, version: 'graphql'), + params: params, + file_key: '1' + ) end def file_paths_in_mutation(mutation) diff --git a/spec/support/helpers/http_io_helpers.rb b/spec/support/helpers/http_io_helpers.rb new file mode 100644 index 00000000000..638d780cdc2 --- /dev/null +++ b/spec/support/helpers/http_io_helpers.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module HttpIOHelpers + def stub_remote_url_206(url, file_path) + WebMock.stub_request(:get, url) + .to_return { |request| remote_url_response(file_path, request, 206) } + end + + def stub_remote_url_200(url, file_path) + WebMock.stub_request(:get, url) + .to_return { |request| remote_url_response(file_path, request, 200) } + end + + def stub_remote_url_500(url) + WebMock.stub_request(:get, url) + .to_return(status: [500, "Internal Server Error"]) + end + + def remote_url_response(file_path, request, response_status) + range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/) + + body = File.read(file_path).force_encoding(Encoding::BINARY) + size = body.bytesize + + { + status: response_status, + headers: remote_url_response_headers(response_status, range[1].to_i, range[2].to_i, size), + body: body[range[1].to_i..range[2].to_i] + } + end + + def remote_url_response_headers(response_status, from, to, size) + { 'Content-Type' => 'text/plain' }.tap do |headers| + headers.merge('Content-Range' => "bytes #{from}-#{to}/#{size}") if response_status == 206 + end + end + + def set_smaller_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks / 2) * 128 + stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size) + end + + def set_larger_buffer_size_than(file_size) + blocks = (file_size / 128) + new_size = (blocks * 2) * 128 + stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size) + end +end diff --git a/spec/support/helpers/keyset_pagination_helpers.rb b/spec/support/helpers/keyset_pagination_helpers.rb new file mode 100644 index 00000000000..4bc20098e8c --- /dev/null +++ b/spec/support/helpers/keyset_pagination_helpers.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module KeysetPaginationHelpers + def pagination_links(response) + link = response.headers['LINK'] + return unless link + + link.split(',').filter_map do |link| + match = link.match(/<(?.*)>; rel="(?\w+)"/) + break nil unless match + + { url: match[:url], rel: match[:rel] } + end + end + + def pagination_params_from_next_url(response) + next_url = pagination_links(response).find { |link| link[:rel] == 'next' }[:url] + Rack::Utils.parse_query(URI.parse(next_url).query) + end +end diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb index e93d04a0b80..05ef960fda7 100644 --- a/spec/support/helpers/login_helpers.rb +++ b/spec/support/helpers/login_helpers.rb @@ -116,7 +116,7 @@ module LoginHelpers visit new_user_session_path expect(page).to have_content('Sign in with') - check 'remember_me' if remember_me + check 'remember_me_omniauth' if remember_me click_button "oauth-login-#{provider}" end diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb index 6fc5904fc83..1b8c3388051 100644 --- a/spec/support/helpers/migrations_helpers.rb +++ b/spec/support/helpers/migrations_helpers.rb @@ -92,7 +92,7 @@ module MigrationsHelpers end def reset_column_information(klass) - klass.reset_column_information + klass.reset_column_information if klass.instance_variable_get(:@table_name) end # In some migration tests, we're using factories to create records, diff --git a/spec/support/helpers/migrations_helpers/cluster_helpers.rb b/spec/support/helpers/migrations_helpers/cluster_helpers.rb new file mode 100644 index 00000000000..03104e22bcf --- /dev/null +++ b/spec/support/helpers/migrations_helpers/cluster_helpers.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +module MigrationHelpers + module ClusterHelpers + # Creates a list of cluster projects. + def create_cluster_project_list(quantity) + group = namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') + + quantity.times do |id| + create_cluster_project(group, id) + end + end + + # Creates dependencies for a cluster project: + # - Group + # - Project + # - Cluster + # - Project - cluster relationship + # - GCP provider + # - Platform Kubernetes + def create_cluster_project(group, id) + project = projects_table.create!( + name: "project-#{id}", + path: "project-#{id}", + namespace_id: group.id + ) + + cluster = clusters_table.create!( + name: 'test-cluster', + cluster_type: 3, + provider_type: :gcp, + platform_type: :kubernetes + ) + + cluster_projects_table.create!(project_id: project.id, cluster_id: cluster.id) + + provider_gcp_table.create!( + gcp_project_id: "test-gcp-project-#{id}", + endpoint: '111.111.111.111', + cluster_id: cluster.id, + status: 3, + num_nodes: 1, + zone: 'us-central1-a' + ) + + platform_kubernetes_table.create!( + cluster_id: cluster.id, + api_url: 'https://kubernetes.example.com', + encrypted_token: 'a' * 40, + encrypted_token_iv: 'a' * 40 + ) + end + + # Creates a Kubernetes namespace for a list of clusters + def create_kubernetes_namespace(clusters) + clusters.each do |cluster| + cluster_project = cluster_projects_table.find_by(cluster_id: cluster.id) + project = projects_table.find(cluster_project.project_id) + namespace = "#{project.path}-#{project.id}" + + cluster_kubernetes_namespaces_table.create!( + cluster_project_id: cluster_project.id, + cluster_id: cluster.id, + project_id: cluster_project.project_id, + namespace: namespace, + service_account_name: "#{namespace}-service-account" + ) + end + end + end +end diff --git a/spec/support/helpers/migrations_helpers/namespaces_helper.rb b/spec/support/helpers/migrations_helpers/namespaces_helper.rb new file mode 100644 index 00000000000..d9a4e0d1731 --- /dev/null +++ b/spec/support/helpers/migrations_helpers/namespaces_helper.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module MigrationHelpers + module NamespacesHelpers + def create_namespace(name, visibility, options = {}) + table(:namespaces).create!( + { + name: name, + path: name, + type: 'Group', + visibility_level: visibility + }.merge(options)) + end + end +end diff --git a/spec/support/helpers/migrations_helpers/schema_version_finder.rb b/spec/support/helpers/migrations_helpers/schema_version_finder.rb new file mode 100644 index 00000000000..69469959ce5 --- /dev/null +++ b/spec/support/helpers/migrations_helpers/schema_version_finder.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +# Sometimes data migration specs require adding invalid test data in order to test +# the migration (e.g. adding a row with null foreign key). Certain db migrations that +# add constraints (e.g. NOT NULL constraint) prevent invalid records from being added +# and data migration from being tested. For this reason, SchemaVersionFinder can be used +# to find and use schema prior to specified one. +# +# @example +# RSpec.describe CleanupThings, :migration, +# schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ... +# +# SchemaVersionFinder returns schema version prior to the one specified, which allows to then add +# invalid records to the database, which in return allows to properly test data migration. +module MigrationHelpers + class SchemaVersionFinder + def self.migrations_paths + ActiveRecord::Migrator.migrations_paths + end + + def self.migration_context + ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration) + end + + def self.migrations + migration_context.migrations + end + + def self.migration_prior(migration_klass) + migrations.each_cons(2) do |previous, migration| + break previous.version if migration.name == migration_klass.name + end + end + end +end diff --git a/spec/support/helpers/migrations_helpers/vulnerabilities_findings_helper.rb b/spec/support/helpers/migrations_helpers/vulnerabilities_findings_helper.rb new file mode 100644 index 00000000000..1f8505978f5 --- /dev/null +++ b/spec/support/helpers/migrations_helpers/vulnerabilities_findings_helper.rb @@ -0,0 +1,118 @@ +# frozen_string_literal: true + +module MigrationHelpers + module VulnerabilitiesFindingsHelper + def attributes_for_vulnerabilities_finding + uuid = SecureRandom.uuid + + { + project_fingerprint: SecureRandom.hex(20), + location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)), # rubocop:disable Fips/SHA1 + uuid: uuid, + name: "Vulnerability Finding #{uuid}", + metadata_version: '1.3', + raw_metadata: raw_metadata + } + end + + def raw_metadata + { + "description" => "The cipher does not provide data integrity update 1", + "message" => "The cipher does not provide data integrity", + "cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER", + "solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.", # rubocop:disable Layout/LineLength + "location" => { + "file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java", + "start_line" => 29, + "end_line" => 29, + "class" => "com.gitlab.security_products.tests.App", + "method" => "insecureCypher" + }, + "links" => [ + { + "name" => "Cipher does not check for integrity first?", + "url" => "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first" + } + ], + "assets" => [ + { + "type" => "postman", + "name" => "Test Postman Collection", + "url" => "http://localhost/test.collection" + } + ], + "evidence" => { + "summary" => "Credit card detected", + "request" => { + "method" => "GET", + "url" => "http://goat:8080/WebGoat/logout", + "body" => nil, + "headers" => [ + { + "name" => "Accept", + "value" => "*/*" + } + ] + }, + "response" => { + "reason_phrase" => "OK", + "status_code" => 200, + "body" => nil, + "headers" => [ + { + "name" => "Content-Length", + "value" => "0" + } + ] + }, + "source" => { + "id" => "assert:Response Body Analysis", + "name" => "Response Body Analysis", + "url" => "htpp://hostname/documentation" + }, + "supporting_messages" => [ + { + "name" => "Origional", + "request" => { + "method" => "GET", + "url" => "http://goat:8080/WebGoat/logout", + "body" => "", + "headers" => [ + { + "name" => "Accept", + "value" => "*/*" + } + ] + } + }, + { + "name" => "Recorded", + "request" => { + "method" => "GET", + "url" => "http://goat:8080/WebGoat/logout", + "body" => "", + "headers" => [ + { + "name" => "Accept", + "value" => "*/*" + } + ] + }, + "response" => { + "reason_phrase" => "OK", + "status_code" => 200, + "body" => "", + "headers" => [ + { + "name" => "Content-Length", + "value" => "0" + } + ] + } + } + ] + } + } + end + end +end diff --git a/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb b/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb new file mode 100644 index 00000000000..81c2d2cb225 --- /dev/null +++ b/spec/support/helpers/models/ci/partitioning_testing/cascade_check.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module PartitioningTesting + module CascadeCheck + extend ActiveSupport::Concern + + included do + after_create :check_partition_cascade_value + end + + def check_partition_cascade_value + raise 'Partition value not found' unless partition_scope_value + + return if partition_id == partition_scope_value + + raise "partition_id was expected to equal #{partition_scope_value} but it was #{partition_id}." + end + + class_methods do + # Allowing partition callback to be used with BulkInsertSafe + def _bulk_insert_callback_allowed?(name, args) + super || (args.first == :after && args.second == :check_partition_cascade_value) + end + end + end +end + +Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass| + next if klass == 'Ci::Pipeline' + + model = klass.safe_constantize + + model.include(PartitioningTesting::CascadeCheck) +end diff --git a/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb b/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb new file mode 100644 index 00000000000..aa091095fb6 --- /dev/null +++ b/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module Ci + module PartitioningTesting + module PartitionIdentifiers + module_function + + def ci_testing_partition_id + 99999 + end + end + end +end diff --git a/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb b/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb new file mode 100644 index 00000000000..3f0a2bb7f3b --- /dev/null +++ b/spec/support/helpers/models/ci/partitioning_testing/rspec_hooks.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.include Ci::PartitioningTesting::PartitionIdentifiers + + config.around(:each, :ci_partitionable) do |example| + unless Ci::Build.table_name.to_s.starts_with?('p_') + skip 'Skipping partitioning tests until `ci_builds` is partitioned' + end + + Ci::PartitioningTesting::SchemaHelpers.with_routing_tables do + example.run + end + end + + config.before(:all) do + Ci::PartitioningTesting::SchemaHelpers.setup + end + + config.after(:all) do + Ci::PartitioningTesting::SchemaHelpers.teardown + end +end diff --git a/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb new file mode 100644 index 00000000000..4107bbcb976 --- /dev/null +++ b/spec/support/helpers/models/ci/partitioning_testing/schema_helpers.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +module Ci + module PartitioningTesting + module SchemaHelpers + DEFAULT_PARTITION = 100 + + module_function + + def with_routing_tables + # model.table_name = :routing_table + yield + # ensure + # model.table_name = :regular_table + end + + # We're dropping the default values here to ensure that the application code + # populates the `partition_id` value and it's not falling back on the + # database default one. We should be able to clean this up after + # partitioning the tables and substituting the routing table in the model: + # https://gitlab.com/gitlab-org/gitlab/-/issues/377822 + # + def setup(connection: Ci::ApplicationRecord.connection) + each_partitionable_table do |table_name| + change_column_default(table_name, from: DEFAULT_PARTITION, to: nil, connection: connection) + change_column_default("p_#{table_name}", from: DEFAULT_PARTITION, to: nil, connection: connection) + create_test_partition("p_#{table_name}", connection: connection) + end + end + + def teardown(connection: Ci::ApplicationRecord.connection) + each_partitionable_table do |table_name| + drop_test_partition("p_#{table_name}", connection: connection) + change_column_default(table_name, from: nil, to: DEFAULT_PARTITION, connection: connection) + change_column_default("p_#{table_name}", from: nil, to: DEFAULT_PARTITION, connection: connection) + end + end + + def each_partitionable_table + ::Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass| + model = klass.safe_constantize + table_name = model.table_name.delete_prefix('p_') + + yield(table_name) + + model.reset_column_information if model.connected? + end + end + + def change_column_default(table_name, from:, to:, connection:) + return unless table_available?(table_name, connection: connection) + + connection.change_column_default(table_name, :partition_id, from: from, to: to) + end + + def create_test_partition(table_name, connection:) + return unless table_available?(table_name, connection: connection) + + drop_test_partition(table_name, connection: connection) + + connection.execute(<<~SQL.squish) + CREATE TABLE #{full_partition_name(table_name)} + PARTITION OF #{table_name} + FOR VALUES IN (#{PartitioningTesting::PartitionIdentifiers.ci_testing_partition_id}); + SQL + end + + def drop_test_partition(table_name, connection:) + return unless table_available?(table_name, connection: connection) + + connection.execute(<<~SQL.squish) + DROP TABLE IF EXISTS #{full_partition_name(table_name)}; + SQL + end + + def table_available?(table_name, connection:) + connection.table_exists?(table_name) && + connection.column_exists?(table_name, :partition_id) + end + + def full_partition_name(table_name) + [ + Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA, + '._test_gitlab_', + table_name.delete_prefix('p_'), + '_partition' + ].join('') + end + end + end +end diff --git a/spec/support/helpers/models/merge_request_without_merge_request_diff.rb b/spec/support/helpers/models/merge_request_without_merge_request_diff.rb new file mode 100644 index 00000000000..e9f97a2c95a --- /dev/null +++ b/spec/support/helpers/models/merge_request_without_merge_request_diff.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +class MergeRequestWithoutMergeRequestDiff < ::MergeRequest # rubocop:disable Gitlab/NamespacedClass + self.inheritance_column = :_type_disabled + + def ensure_merge_request_diff; end +end diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb index 8248ea0bb84..67ea00c6551 100644 --- a/spec/support/helpers/navbar_structure_helper.rb +++ b/spec/support/helpers/navbar_structure_helper.rb @@ -73,7 +73,7 @@ module NavbarStructureHelper insert_after_sub_nav_item( _('Package Registry'), within: _('Packages and registries'), - new_sub_nav_item_name: _('Infrastructure Registry') + new_sub_nav_item_name: _('Terraform modules') ) end @@ -114,6 +114,14 @@ module NavbarStructureHelper ) end + def insert_model_experiments_nav(within) + insert_after_sub_nav_item( + within, + within: _('Packages and registries'), + new_sub_nav_item_name: _('Model experiments') + ) + end + def project_analytics_sub_nav_item [ _('Value stream'), diff --git a/spec/support/helpers/project_template_test_helper.rb b/spec/support/helpers/project_template_test_helper.rb index bedbb8601e8..2af4a966f6d 100644 --- a/spec/support/helpers/project_template_test_helper.rb +++ b/spec/support/helpers/project_template_test_helper.rb @@ -9,7 +9,7 @@ module ProjectTemplateTestHelper nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx serverless_framework tencent_serverless_framework jsonnet cluster_management kotlin_native_linux - pelican bridgetown typo3_distribution + pelican bridgetown typo3_distribution laravel ] end end diff --git a/spec/support/helpers/prometheus/metric_builders.rb b/spec/support/helpers/prometheus/metric_builders.rb new file mode 100644 index 00000000000..53329ee8dce --- /dev/null +++ b/spec/support/helpers/prometheus/metric_builders.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module Prometheus + module MetricBuilders + def simple_query(suffix = 'a', **opts) + { query_range: "query_range_#{suffix}" }.merge(opts) + end + + def simple_queries + [simple_query, simple_query('b', label: 'label', unit: 'unit')] + end + + def simple_metric(title: 'title', required_metrics: [], queries: [simple_query]) + Gitlab::Prometheus::Metric.new(title: title, required_metrics: required_metrics, weight: 1, queries: queries) + end + + def simple_metrics(added_metric_name: 'metric_a') + [ + simple_metric(required_metrics: %W[#{added_metric_name} metric_b], queries: simple_queries), + simple_metric(required_metrics: [added_metric_name], queries: [simple_query('empty')]), + simple_metric(required_metrics: %w[metric_c]) + ] + end + + def simple_metric_group(name: 'name', metrics: simple_metrics) + Gitlab::Prometheus::MetricGroup.new(name: name, priority: 1, metrics: metrics) + end + end +end diff --git a/spec/support/helpers/redis_helpers.rb b/spec/support/helpers/redis_helpers.rb new file mode 100644 index 00000000000..2c5ceb2f09e --- /dev/null +++ b/spec/support/helpers/redis_helpers.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +module RedisHelpers + Gitlab::Redis::ALL_CLASSES.each do |instance_class| + define_method("redis_#{instance_class.store_name.underscore}_cleanup!") do + instance_class.with(&:flushdb) + end + end +end diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb index eab30be9243..75853371c0f 100644 --- a/spec/support/helpers/search_helpers.rb +++ b/spec/support/helpers/search_helpers.rb @@ -2,9 +2,6 @@ module SearchHelpers def fill_in_search(text) - # Once the `new_header_search` feature flag has been removed - # We can remove the `.search-input-wrap` selector - # https://gitlab.com/gitlab-org/gitlab/-/issues/339348 page.within('.header-search-new') do find('#search').click fill_in 'search', with: text @@ -14,10 +11,7 @@ module SearchHelpers end def submit_search(query) - # Once the `new_header_search` feature flag has been removed - # We can remove the `.search-form` selector - # https://gitlab.com/gitlab-org/gitlab/-/issues/339348 - page.within('.header-search, .search-form, .search-page-form') do + page.within('.header-search, .search-page-form') do field = find_field('search') field.click field.fill_in(with: query) diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb index 265e1c38b09..a04e5d46df9 100644 --- a/spec/support/helpers/snowplow_helpers.rb +++ b/spec/support/helpers/snowplow_helpers.rb @@ -46,7 +46,7 @@ module SnowplowHelpers # } # ] # ) - def expect_snowplow_event(category:, action:, context: nil, **kwargs) + def expect_snowplow_event(category:, action:, context: nil, tracking_method: :event, **kwargs) if context if context.is_a?(Array) kwargs[:context] = [] @@ -60,7 +60,7 @@ module SnowplowHelpers end end - expect(Gitlab::Tracking).to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking + expect(Gitlab::Tracking).to have_received(tracking_method) # rubocop:disable RSpec/ExpectGitlabTracking .with(category, action, **kwargs).at_least(:once) end @@ -79,11 +79,11 @@ module SnowplowHelpers # expect_no_snowplow_event # end # end - def expect_no_snowplow_event(category: nil, action: nil, **kwargs) + def expect_no_snowplow_event(category: nil, action: nil, tracking_method: :event, **kwargs) if category && action - expect(Gitlab::Tracking).not_to have_received(:event).with(category, action, **kwargs) # rubocop:disable RSpec/ExpectGitlabTracking + expect(Gitlab::Tracking).not_to have_received(tracking_method).with(category, action, **kwargs) # rubocop:disable RSpec/ExpectGitlabTracking else - expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking + expect(Gitlab::Tracking).not_to have_received(tracking_method) # rubocop:disable RSpec/ExpectGitlabTracking end end end diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb index e5c30769531..748ea525e40 100644 --- a/spec/support/helpers/stub_gitlab_calls.rb +++ b/spec/support/helpers/stub_gitlab_calls.rb @@ -94,10 +94,10 @@ module StubGitlabCalls end def stub_commonmark_sourcepos_disabled - render_options = Banzai::Filter::MarkdownEngines::CommonMark::RENDER_OPTIONS + engine = Banzai::Filter::MarkdownFilter.render_engine(nil) - allow_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| - allow(instance).to receive(:render_options).and_return(render_options) + allow_next_instance_of(engine) do |instance| + allow(instance).to receive(:sourcepos_disabled?).and_return(true) end end diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb index d120e1805e3..4efe2a98a45 100644 --- a/spec/support/helpers/stub_object_storage.rb +++ b/spec/support/helpers/stub_object_storage.rb @@ -2,9 +2,11 @@ module StubObjectStorage def stub_dependency_proxy_object_storage(**params) - stub_object_storage_uploader(config: ::Gitlab.config.dependency_proxy.object_store, - uploader: ::DependencyProxy::FileUploader, - **params) + stub_object_storage_uploader( + config: ::Gitlab.config.dependency_proxy.object_store, + uploader: ::DependencyProxy::FileUploader, + **params + ) end def stub_object_storage_uploader( @@ -36,8 +38,10 @@ module StubObjectStorage return unless enabled - stub_object_storage(connection_params: uploader.object_store_credentials, - remote_directory: old_config.remote_directory) + stub_object_storage( + connection_params: uploader.object_store_credentials, + remote_directory: old_config.remote_directory + ) end def stub_object_storage(connection_params:, remote_directory:) @@ -55,63 +59,99 @@ module StubObjectStorage end def stub_artifacts_object_storage(uploader = JobArtifactUploader, **params) - stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, - uploader: uploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.artifacts.object_store, + uploader: uploader, + **params + ) end def stub_external_diffs_object_storage(uploader = described_class, **params) - stub_object_storage_uploader(config: Gitlab.config.external_diffs.object_store, - uploader: uploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.external_diffs.object_store, + uploader: uploader, + **params + ) end def stub_lfs_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.lfs.object_store, - uploader: LfsObjectUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.lfs.object_store, + uploader: LfsObjectUploader, + **params + ) end def stub_package_file_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.packages.object_store, - uploader: ::Packages::PackageFileUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.packages.object_store, + uploader: ::Packages::PackageFileUploader, + **params + ) end def stub_rpm_repository_file_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.packages.object_store, - uploader: ::Packages::Rpm::RepositoryFileUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.packages.object_store, + uploader: ::Packages::Rpm::RepositoryFileUploader, + **params + ) end def stub_composer_cache_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.packages.object_store, - uploader: ::Packages::Composer::CacheUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.packages.object_store, + uploader: ::Packages::Composer::CacheUploader, + **params + ) + end + + def debian_component_file_object_storage(**params) + stub_object_storage_uploader( + config: Gitlab.config.packages.object_store, + uploader: ::Packages::Debian::ComponentFileUploader, + **params + ) + end + + def debian_distribution_release_file_object_storage(**params) + stub_object_storage_uploader( + config: Gitlab.config.packages.object_store, + uploader: ::Packages::Debian::DistributionReleaseFileUploader, + **params + ) end def stub_uploads_object_storage(uploader = described_class, **params) - stub_object_storage_uploader(config: Gitlab.config.uploads.object_store, - uploader: uploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.uploads.object_store, + uploader: uploader, + **params + ) end def stub_ci_secure_file_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.ci_secure_files.object_store, - uploader: Ci::SecureFileUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.ci_secure_files.object_store, + uploader: Ci::SecureFileUploader, + **params + ) end def stub_terraform_state_object_storage(**params) - stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store, - uploader: Terraform::StateUploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.terraform_state.object_store, + uploader: Terraform::StateUploader, + **params + ) end def stub_pages_object_storage(uploader = described_class, **params) - stub_object_storage_uploader(config: Gitlab.config.pages.object_store, - uploader: uploader, - **params) + stub_object_storage_uploader( + config: Gitlab.config.pages.object_store, + uploader: uploader, + **params + ) end def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id") diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb index 727b8a6b880..a53e1e1002c 100644 --- a/spec/support/helpers/test_env.rb +++ b/spec/support/helpers/test_env.rb @@ -234,7 +234,7 @@ module TestEnv end def workhorse_dir - @workhorse_path ||= File.join('tmp', 'tests', 'gitlab-workhorse') + @workhorse_path ||= Rails.root.join('tmp', 'tests', 'gitlab-workhorse') end def with_workhorse(host, port, upstream, &blk) diff --git a/spec/support/helpers/test_reports_helper.rb b/spec/support/helpers/test_reports_helper.rb new file mode 100644 index 00000000000..4c5a1cf3c74 --- /dev/null +++ b/spec/support/helpers/test_reports_helper.rb @@ -0,0 +1,103 @@ +# frozen_string_literal: true + +module TestReportsHelper + def create_test_case_rspec_success(name = 'test_spec') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'rspec', + name: 'Test#sum when a is 1 and b is 3 returns summary', + classname: "spec.#{name}", + file: './spec/test_spec.rb', + execution_time: 1.11, + status: Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS) + end + + def create_test_case_rspec_failed(name = 'test_spec', execution_time = 2.22) + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'rspec', + name: 'Test#sum when a is 1 and b is 3 returns summary', + classname: "spec.#{name}", + file: './spec/test_spec.rb', + execution_time: execution_time, + system_output: sample_rspec_failed_message, + status: Gitlab::Ci::Reports::TestCase::STATUS_FAILED) + end + + def create_test_case_rspec_skipped(name = 'test_spec') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'rspec', + name: 'Test#sum when a is 3 and b is 3 returns summary', + classname: "spec.#{name}", + file: './spec/test_spec.rb', + execution_time: 3.33, + status: Gitlab::Ci::Reports::TestCase::STATUS_SKIPPED) + end + + def create_test_case_rspec_error(name = 'test_spec') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'rspec', + name: 'Test#sum when a is 4 and b is 4 returns summary', + classname: "spec.#{name}", + file: './spec/test_spec.rb', + execution_time: 4.44, + status: Gitlab::Ci::Reports::TestCase::STATUS_ERROR) + end + + def sample_rspec_failed_message + <<-TEST_REPORT_MESSAGE.strip_heredoc + Failure/Error: is_expected.to eq(3) + + expected: 3 + got: -1 + + (compared using ==) + ./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>' + TEST_REPORT_MESSAGE + end + + def create_test_case_java_success(name = 'addTest') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'java', + name: name, + classname: 'CalculatorTest', + execution_time: 5.55, + status: Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS) + end + + def create_test_case_java_failed(name = 'addTest') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'java', + name: name, + classname: 'CalculatorTest', + execution_time: 6.66, + system_output: sample_java_failed_message, + status: Gitlab::Ci::Reports::TestCase::STATUS_FAILED) + end + + def create_test_case_java_skipped(name = 'addTest') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'java', + name: name, + classname: 'CalculatorTest', + execution_time: 7.77, + status: Gitlab::Ci::Reports::TestCase::STATUS_SKIPPED) + end + + def create_test_case_java_error(name = 'addTest') + Gitlab::Ci::Reports::TestCase.new( + suite_name: 'java', + name: name, + classname: 'CalculatorTest', + execution_time: 8.88, + status: Gitlab::Ci::Reports::TestCase::STATUS_ERROR) + end + + def sample_java_failed_message + <<-TEST_REPORT_MESSAGE.strip_heredoc + junit.framework.AssertionFailedError: expected:<1> but was:<3> + at CalculatorTest.subtractExpression(Unknown Source) + at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke0(Native Method) + at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) + at java.base/jdk.internal.database.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + TEST_REPORT_MESSAGE + end +end diff --git a/spec/support/helpers/trace_helpers.rb b/spec/support/helpers/trace_helpers.rb new file mode 100644 index 00000000000..9255715ff71 --- /dev/null +++ b/spec/support/helpers/trace_helpers.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module TraceHelpers + def create_legacy_trace(build, content) + File.open(legacy_trace_path(build), 'wb') { |stream| stream.write(content) } + end + + def create_legacy_trace_in_db(build, content) + build.update_column(:trace, content) + end + + def legacy_trace_path(build) + legacy_trace_dir = File.join(Settings.gitlab_ci.builds_path, + build.created_at.utc.strftime("%Y_%m"), + build.project_id.to_s) + + FileUtils.mkdir_p(legacy_trace_dir) + + File.join(legacy_trace_dir, "#{build.id}.log") + end + + def archived_trace_path(job_artifact) + disk_hash = Digest::SHA2.hexdigest(job_artifact.project_id.to_s) + creation_date = job_artifact.created_at.utc.strftime('%Y_%m_%d') + + File.join(Gitlab.config.artifacts.path, disk_hash[0..1], disk_hash[2..3], disk_hash, + creation_date, job_artifact.job_id.to_s, job_artifact.id.to_s, 'job.log') + end +end diff --git a/spec/support/helpers/workhorse_helpers.rb b/spec/support/helpers/workhorse_helpers.rb index f894aff373c..f3b1d3af501 100644 --- a/spec/support/helpers/workhorse_helpers.rb +++ b/spec/support/helpers/workhorse_helpers.rb @@ -29,31 +29,48 @@ module WorkhorseHelpers # workhorse_form_with_file will transform file_key inside params as if it was disk accelerated by workhorse def workhorse_form_with_file(url, file_key:, params:, method: :post) - workhorse_request_with_file(method, url, - file_key: file_key, - params: params, - env: { 'CONTENT_TYPE' => 'multipart/form-data' }, - send_rewritten_field: true + workhorse_request_with_file( + method, url, + file_key: file_key, + params: params, + env: { 'CONTENT_TYPE' => 'multipart/form-data' }, + send_rewritten_field: true ) end # workhorse_finalize will transform file_key inside params as if it was the finalize call of an inline object storage upload. # note that based on the content of the params it can simulate a disc acceleration or an object storage upload def workhorse_finalize(url, file_key:, params:, method: :post, headers: {}, send_rewritten_field: false) - workhorse_finalize_with_multiple_files(url, method: method, file_keys: file_key, params: params, headers: headers, send_rewritten_field: send_rewritten_field) + workhorse_finalize_with_multiple_files( + url, + method: method, + file_keys: file_key, + params: params, + headers: headers, + send_rewritten_field: send_rewritten_field + ) end def workhorse_finalize_with_multiple_files(url, file_keys:, params:, method: :post, headers: {}, send_rewritten_field: false) - workhorse_request_with_multiple_files(method, url, - file_keys: file_keys, - params: params, - extra_headers: headers, - send_rewritten_field: send_rewritten_field + workhorse_request_with_multiple_files( + method, url, + file_keys: file_keys, + params: params, + extra_headers: headers, + send_rewritten_field: send_rewritten_field ) end def workhorse_request_with_file(method, url, file_key:, params:, send_rewritten_field:, env: {}, extra_headers: {}) - workhorse_request_with_multiple_files(method, url, file_keys: file_key, params: params, env: env, extra_headers: extra_headers, send_rewritten_field: send_rewritten_field) + workhorse_request_with_multiple_files( + method, + url, + file_keys: file_key, + params: params, + env: env, + extra_headers: extra_headers, + send_rewritten_field: send_rewritten_field + ) end def workhorse_request_with_multiple_files(method, url, file_keys:, params:, send_rewritten_field:, env: {}, extra_headers: {}) @@ -118,14 +135,15 @@ module WorkhorseHelpers end end - def fog_to_uploaded_file(file, sha256: nil) - filename = File.basename(file.key) + def fog_to_uploaded_file(file, filename: nil, sha256: nil, remote_id: nil) + filename ||= File.basename(file.key) - UploadedFile.new(nil, - filename: filename, - remote_id: filename, - size: file.content_length, - sha256: sha256 - ) + UploadedFile.new( + nil, + filename: filename, + remote_id: remote_id || filename, + size: file.content_length, + sha256: sha256 + ) end end diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb deleted file mode 100644 index 0193db81fa9..00000000000 --- a/spec/support/http_io/http_io_helpers.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -module HttpIOHelpers - def stub_remote_url_206(url, file_path) - WebMock.stub_request(:get, url) - .to_return { |request| remote_url_response(file_path, request, 206) } - end - - def stub_remote_url_200(url, file_path) - WebMock.stub_request(:get, url) - .to_return { |request| remote_url_response(file_path, request, 200) } - end - - def stub_remote_url_500(url) - WebMock.stub_request(:get, url) - .to_return(status: [500, "Internal Server Error"]) - end - - def remote_url_response(file_path, request, response_status) - range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/) - - body = File.read(file_path).force_encoding(Encoding::BINARY) - size = body.bytesize - - { - status: response_status, - headers: remote_url_response_headers(response_status, range[1].to_i, range[2].to_i, size), - body: body[range[1].to_i..range[2].to_i] - } - end - - def remote_url_response_headers(response_status, from, to, size) - { 'Content-Type' => 'text/plain' }.tap do |headers| - if response_status == 206 - headers.merge('Content-Range' => "bytes #{from}-#{to}/#{size}") - end - end - end - - def set_smaller_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks / 2) * 128 - stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size) - end - - def set_larger_buffer_size_than(file_size) - blocks = (file_size / 128) - new_size = (blocks * 2) * 128 - stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size) - end -end diff --git a/spec/support/import_export/common_util.rb b/spec/support/import_export/common_util.rb index f8f32fa59d1..53e943dc3bc 100644 --- a/spec/support/import_export/common_util.rb +++ b/spec/support/import_export/common_util.rb @@ -18,14 +18,8 @@ module ImportExport allow(Gitlab::ImportExport).to receive(:export_path) { export_path } end - def setup_reader(reader) - if reader == :ndjson_reader && Feature.enabled?(:project_import_ndjson) - allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:exist?).and_return(false) - allow_any_instance_of(Gitlab::ImportExport::Json::NdjsonReader).to receive(:exist?).and_return(true) - else - allow_any_instance_of(Gitlab::ImportExport::Json::LegacyReader::File).to receive(:exist?).and_return(true) - allow_any_instance_of(Gitlab::ImportExport::Json::NdjsonReader).to receive(:exist?).and_return(false) - end + def setup_reader + allow_any_instance_of(Gitlab::ImportExport::Json::NdjsonReader).to receive(:exist?).and_return(true) end def fixtures_path @@ -36,19 +30,12 @@ module ImportExport "tmp/tests/gitlab-test/import_export" end - def get_json(path, exportable_path, key, ndjson_enabled) - if ndjson_enabled - json = if key == :projects - consume_attributes(path, exportable_path) - else - consume_relations(path, exportable_path, key) - end + def get_json(path, exportable_path, key) + if key == :projects + consume_attributes(path, exportable_path) else - json = project_json(path) - json = json[key.to_s] unless key == :projects + consume_relations(path, exportable_path, key) end - - json end def restore_then_save_project(project, user, import_path:, export_path:) diff --git a/spec/support/matchers/have_plain_text_content.rb b/spec/support/matchers/have_plain_text_content.rb new file mode 100644 index 00000000000..94f65ce3771 --- /dev/null +++ b/spec/support/matchers/have_plain_text_content.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +# can be replaced with https://github.com/email-spec/email-spec/pull/196 in the future +RSpec::Matchers.define :have_plain_text_content do |expected_text| + match do |actual_email| + plain_text_body(actual_email).include? expected_text + end + + failure_message do |actual_email| + "Expected email\n#{plain_text_body(actual_email).indent(2)}\nto contain\n#{expected_text.indent(2)}" + end + + def plain_text_body(email) + email.text_part.body.to_s + end +end diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb index a80c269f915..575ae572f25 100644 --- a/spec/support/matchers/markdown_matchers.rb +++ b/spec/support/matchers/markdown_matchers.rb @@ -202,7 +202,7 @@ module MarkdownMatchers match do |actual| expect(actual).to have_selector('[data-math-style="inline"]', count: 4) - expect(actual).to have_selector('[data-math-style="display"]', count: 4) + expect(actual).to have_selector('[data-math-style="display"]', count: 6) end end diff --git a/spec/support/migrations_helpers/cluster_helpers.rb b/spec/support/migrations_helpers/cluster_helpers.rb deleted file mode 100644 index 03104e22bcf..00000000000 --- a/spec/support/migrations_helpers/cluster_helpers.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -module MigrationHelpers - module ClusterHelpers - # Creates a list of cluster projects. - def create_cluster_project_list(quantity) - group = namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org') - - quantity.times do |id| - create_cluster_project(group, id) - end - end - - # Creates dependencies for a cluster project: - # - Group - # - Project - # - Cluster - # - Project - cluster relationship - # - GCP provider - # - Platform Kubernetes - def create_cluster_project(group, id) - project = projects_table.create!( - name: "project-#{id}", - path: "project-#{id}", - namespace_id: group.id - ) - - cluster = clusters_table.create!( - name: 'test-cluster', - cluster_type: 3, - provider_type: :gcp, - platform_type: :kubernetes - ) - - cluster_projects_table.create!(project_id: project.id, cluster_id: cluster.id) - - provider_gcp_table.create!( - gcp_project_id: "test-gcp-project-#{id}", - endpoint: '111.111.111.111', - cluster_id: cluster.id, - status: 3, - num_nodes: 1, - zone: 'us-central1-a' - ) - - platform_kubernetes_table.create!( - cluster_id: cluster.id, - api_url: 'https://kubernetes.example.com', - encrypted_token: 'a' * 40, - encrypted_token_iv: 'a' * 40 - ) - end - - # Creates a Kubernetes namespace for a list of clusters - def create_kubernetes_namespace(clusters) - clusters.each do |cluster| - cluster_project = cluster_projects_table.find_by(cluster_id: cluster.id) - project = projects_table.find(cluster_project.project_id) - namespace = "#{project.path}-#{project.id}" - - cluster_kubernetes_namespaces_table.create!( - cluster_project_id: cluster_project.id, - cluster_id: cluster.id, - project_id: cluster_project.project_id, - namespace: namespace, - service_account_name: "#{namespace}-service-account" - ) - end - end - end -end diff --git a/spec/support/migrations_helpers/namespaces_helper.rb b/spec/support/migrations_helpers/namespaces_helper.rb deleted file mode 100644 index c62ef6a4620..00000000000 --- a/spec/support/migrations_helpers/namespaces_helper.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true - -module MigrationHelpers - module NamespacesHelpers - def create_namespace(name, visibility, options = {}) - table(:namespaces).create!({ - name: name, - path: name, - type: 'Group', - visibility_level: visibility - }.merge(options)) - end - end -end diff --git a/spec/support/migrations_helpers/schema_version_finder.rb b/spec/support/migrations_helpers/schema_version_finder.rb deleted file mode 100644 index b677db7ea26..00000000000 --- a/spec/support/migrations_helpers/schema_version_finder.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -# Sometimes data migration specs require adding invalid test data in order to test -# the migration (e.g. adding a row with null foreign key). Certain db migrations that -# add constraints (e.g. NOT NULL constraint) prevent invalid records from being added -# and data migration from being tested. For this reason, SchemaVersionFinder can be used -# to find and use schema prior to specified one. -# -# @example -# RSpec.describe CleanupThings, :migration, schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ... -# -# SchemaVersionFinder returns schema version prior to the one specified, which allows to then add -# invalid records to the database, which in return allows to properly test data migration. -module MigrationHelpers - class SchemaVersionFinder - def self.migrations_paths - ActiveRecord::Migrator.migrations_paths - end - - def self.migration_context - ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration) - end - - def self.migrations - migration_context.migrations - end - - def self.migration_prior(migration_klass) - migrations.each_cons(2) do |previous, migration| - break previous.version if migration.name == migration_klass.name - end - end - end -end diff --git a/spec/support/migrations_helpers/vulnerabilities_findings_helper.rb b/spec/support/migrations_helpers/vulnerabilities_findings_helper.rb deleted file mode 100644 index 9a5313c3fa4..00000000000 --- a/spec/support/migrations_helpers/vulnerabilities_findings_helper.rb +++ /dev/null @@ -1,118 +0,0 @@ -# frozen_string_literal: true - -module MigrationHelpers - module VulnerabilitiesFindingsHelper - def attributes_for_vulnerabilities_finding - uuid = SecureRandom.uuid - - { - project_fingerprint: SecureRandom.hex(20), - location_fingerprint: Digest::SHA1.hexdigest(SecureRandom.hex(10)), - uuid: uuid, - name: "Vulnerability Finding #{uuid}", - metadata_version: '1.3', - raw_metadata: raw_metadata - } - end - - def raw_metadata - { - "description" => "The cipher does not provide data integrity update 1", - "message" => "The cipher does not provide data integrity", - "cve" => "818bf5dacb291e15d9e6dc3c5ac32178:CIPHER", - "solution" => "GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.", - "location" => { - "file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java", - "start_line" => 29, - "end_line" => 29, - "class" => "com.gitlab.security_products.tests.App", - "method" => "insecureCypher" - }, - "links" => [ - { - "name" => "Cipher does not check for integrity first?", - "url" => "https://crypto.stackexchange.com/questions/31428/pbewithmd5anddes-cipher-does-not-check-for-integrity-first" - } - ], - "assets" => [ - { - "type" => "postman", - "name" => "Test Postman Collection", - "url" => "http://localhost/test.collection" - } - ], - "evidence" => { - "summary" => "Credit card detected", - "request" => { - "method" => "GET", - "url" => "http://goat:8080/WebGoat/logout", - "body" => nil, - "headers" => [ - { - "name" => "Accept", - "value" => "*/*" - } - ] - }, - "response" => { - "reason_phrase" => "OK", - "status_code" => 200, - "body" => nil, - "headers" => [ - { - "name" => "Content-Length", - "value" => "0" - } - ] - }, - "source" => { - "id" => "assert:Response Body Analysis", - "name" => "Response Body Analysis", - "url" => "htpp://hostname/documentation" - }, - "supporting_messages" => [ - { - "name" => "Origional", - "request" => { - "method" => "GET", - "url" => "http://goat:8080/WebGoat/logout", - "body" => "", - "headers" => [ - { - "name" => "Accept", - "value" => "*/*" - } - ] - } - }, - { - "name" => "Recorded", - "request" => { - "method" => "GET", - "url" => "http://goat:8080/WebGoat/logout", - "body" => "", - "headers" => [ - { - "name" => "Accept", - "value" => "*/*" - } - ] - }, - "response" => { - "reason_phrase" => "OK", - "status_code" => 200, - "body" => "", - "headers" => [ - { - "name" => "Content-Length", - "value" => "0" - } - ] - } - } - ] - } - } - end - end -end diff --git a/spec/support/models/ci/partitioning_testing/cascade_check.rb b/spec/support/models/ci/partitioning_testing/cascade_check.rb deleted file mode 100644 index bcfc9675476..00000000000 --- a/spec/support/models/ci/partitioning_testing/cascade_check.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -module PartitioningTesting - module CascadeCheck - extend ActiveSupport::Concern - - included do - after_create :check_partition_cascade_value - end - - def check_partition_cascade_value - raise 'Partition value not found' unless partition_scope_value - - return if partition_id == partition_scope_value - - raise "partition_id was expected to equal #{partition_scope_value} but it was #{partition_id}." - end - - class_methods do - # Allowing partition callback to be used with BulkInsertSafe - def _bulk_insert_callback_allowed?(name, args) - super || args.first == :after && args.second == :check_partition_cascade_value - end - end - end -end - -Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass| - next if klass == 'Ci::Pipeline' - - model = klass.safe_constantize - - model.include(PartitioningTesting::CascadeCheck) -end diff --git a/spec/support/models/ci/partitioning_testing/partition_identifiers.rb b/spec/support/models/ci/partitioning_testing/partition_identifiers.rb deleted file mode 100644 index aa091095fb6..00000000000 --- a/spec/support/models/ci/partitioning_testing/partition_identifiers.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module Ci - module PartitioningTesting - module PartitionIdentifiers - module_function - - def ci_testing_partition_id - 99999 - end - end - end -end diff --git a/spec/support/models/ci/partitioning_testing/rspec_hooks.rb b/spec/support/models/ci/partitioning_testing/rspec_hooks.rb deleted file mode 100644 index 39b15ba8721..00000000000 --- a/spec/support/models/ci/partitioning_testing/rspec_hooks.rb +++ /dev/null @@ -1,19 +0,0 @@ -# frozen_string_literal: true - -RSpec.configure do |config| - config.include Ci::PartitioningTesting::PartitionIdentifiers - - config.around(:each, :ci_partitionable) do |example| - Ci::PartitioningTesting::SchemaHelpers.with_routing_tables do - example.run - end - end - - config.before(:all) do - Ci::PartitioningTesting::SchemaHelpers.setup - end - - config.after(:all) do - Ci::PartitioningTesting::SchemaHelpers.teardown - end -end diff --git a/spec/support/models/ci/partitioning_testing/schema_helpers.rb b/spec/support/models/ci/partitioning_testing/schema_helpers.rb deleted file mode 100644 index 4107bbcb976..00000000000 --- a/spec/support/models/ci/partitioning_testing/schema_helpers.rb +++ /dev/null @@ -1,91 +0,0 @@ -# frozen_string_literal: true - -module Ci - module PartitioningTesting - module SchemaHelpers - DEFAULT_PARTITION = 100 - - module_function - - def with_routing_tables - # model.table_name = :routing_table - yield - # ensure - # model.table_name = :regular_table - end - - # We're dropping the default values here to ensure that the application code - # populates the `partition_id` value and it's not falling back on the - # database default one. We should be able to clean this up after - # partitioning the tables and substituting the routing table in the model: - # https://gitlab.com/gitlab-org/gitlab/-/issues/377822 - # - def setup(connection: Ci::ApplicationRecord.connection) - each_partitionable_table do |table_name| - change_column_default(table_name, from: DEFAULT_PARTITION, to: nil, connection: connection) - change_column_default("p_#{table_name}", from: DEFAULT_PARTITION, to: nil, connection: connection) - create_test_partition("p_#{table_name}", connection: connection) - end - end - - def teardown(connection: Ci::ApplicationRecord.connection) - each_partitionable_table do |table_name| - drop_test_partition("p_#{table_name}", connection: connection) - change_column_default(table_name, from: nil, to: DEFAULT_PARTITION, connection: connection) - change_column_default("p_#{table_name}", from: nil, to: DEFAULT_PARTITION, connection: connection) - end - end - - def each_partitionable_table - ::Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass| - model = klass.safe_constantize - table_name = model.table_name.delete_prefix('p_') - - yield(table_name) - - model.reset_column_information if model.connected? - end - end - - def change_column_default(table_name, from:, to:, connection:) - return unless table_available?(table_name, connection: connection) - - connection.change_column_default(table_name, :partition_id, from: from, to: to) - end - - def create_test_partition(table_name, connection:) - return unless table_available?(table_name, connection: connection) - - drop_test_partition(table_name, connection: connection) - - connection.execute(<<~SQL.squish) - CREATE TABLE #{full_partition_name(table_name)} - PARTITION OF #{table_name} - FOR VALUES IN (#{PartitioningTesting::PartitionIdentifiers.ci_testing_partition_id}); - SQL - end - - def drop_test_partition(table_name, connection:) - return unless table_available?(table_name, connection: connection) - - connection.execute(<<~SQL.squish) - DROP TABLE IF EXISTS #{full_partition_name(table_name)}; - SQL - end - - def table_available?(table_name, connection:) - connection.table_exists?(table_name) && - connection.column_exists?(table_name, :partition_id) - end - - def full_partition_name(table_name) - [ - Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA, - '._test_gitlab_', - table_name.delete_prefix('p_'), - '_partition' - ].join('') - end - end - end -end diff --git a/spec/support/models/merge_request_without_merge_request_diff.rb b/spec/support/models/merge_request_without_merge_request_diff.rb deleted file mode 100644 index 5cdf1feb7a5..00000000000 --- a/spec/support/models/merge_request_without_merge_request_diff.rb +++ /dev/null @@ -1,7 +0,0 @@ -# frozen_string_literal: true - -class MergeRequestWithoutMergeRequestDiff < ::MergeRequest - self.inheritance_column = :_type_disabled - - def ensure_merge_request_diff; end -end diff --git a/spec/support/prometheus/additional_metrics_shared_examples.rb b/spec/support/prometheus/additional_metrics_shared_examples.rb deleted file mode 100644 index e589baf0909..00000000000 --- a/spec/support/prometheus/additional_metrics_shared_examples.rb +++ /dev/null @@ -1,159 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'additional metrics query' do - include Prometheus::MetricBuilders - - let(:metric_group_class) { Gitlab::Prometheus::MetricGroup } - let(:metric_class) { Gitlab::Prometheus::Metric } - - let(:metric_names) { %w{metric_a metric_b} } - - let(:query_range_result) do - [{ 'metric': {}, 'values': [[1488758662.506, '0.00002996364761904785'], [1488758722.506, '0.00003090239047619091']] }] - end - - let(:client) { double('prometheus_client') } - let(:query_result) { described_class.new(client).query(*query_params) } - let(:project) { create(:project, :repository) } - let(:environment) { create(:environment, slug: 'environment-slug', project: project) } - - before do - allow(client).to receive(:label_values).and_return(metric_names) - allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])]) - end - - context 'metrics query context' do - subject! { described_class.new(client) } - - shared_examples 'query context containing environment slug and filter' do - it 'contains ci_environment_slug' do - expect(subject).to receive(:query_metrics).with(project, environment, hash_including(ci_environment_slug: environment.slug)) - - subject.query(*query_params) - end - - it 'contains environment filter' do - expect(subject).to receive(:query_metrics).with( - project, - environment, - hash_including( - environment_filter: "container_name!=\"POD\",environment=\"#{environment.slug}\"" - ) - ) - - subject.query(*query_params) - end - end - - describe 'project has Kubernetes service' do - context 'when user configured kubernetes from CI/CD > Clusters' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } - let(:environment) { create(:environment, slug: 'environment-slug', project: project) } - let(:kube_namespace) { environment.deployment_namespace } - - it_behaves_like 'query context containing environment slug and filter' - - it 'query context contains kube_namespace' do - expect(subject).to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: kube_namespace)) - - subject.query(*query_params) - end - end - end - - describe 'project without Kubernetes service' do - it_behaves_like 'query context containing environment slug and filter' - - it 'query context contains empty kube_namespace' do - expect(subject).to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: '')) - - subject.query(*query_params) - end - end - end - - context 'with one group where two metrics is found' do - before do - allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group]) - end - - context 'some queries return results' do - before do - allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) - allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result) - allow(client).to receive(:query_range).with('query_range_empty', any_args).and_return([]) - end - - it 'return group data only for queries with results' do - expected = [ - { - group: 'name', - priority: 1, - metrics: [ - { - title: 'title', weight: 1, y_label: 'Values', queries: [ - { query_range: 'query_range_a', result: query_range_result }, - { query_range: 'query_range_b', label: 'label', unit: 'unit', result: query_range_result } - ] - } - ] - } - ] - - expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') - expect(query_result).to eq(expected) - end - end - end - - context 'with two groups with one metric each' do - let(:metrics) { [simple_metric(queries: [simple_query])] } - - before do - allow(metric_group_class).to receive(:common_metrics).and_return( - [ - simple_metric_group(name: 'group_a', metrics: [simple_metric(queries: [simple_query])]), - simple_metric_group(name: 'group_b', metrics: [simple_metric(title: 'title_b', queries: [simple_query('b')])]) - ]) - allow(client).to receive(:label_values).and_return(metric_names) - end - - context 'both queries return results' do - before do - allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) - allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result) - end - - it 'return group data both queries' do - queries_with_result_a = { queries: [{ query_range: 'query_range_a', result: query_range_result }] } - queries_with_result_b = { queries: [{ query_range: 'query_range_b', result: query_range_result }] } - - expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') - - expect(query_result.count).to eq(2) - expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 }) - - expect(query_result[0][:metrics].first).to include(queries_with_result_a) - expect(query_result[1][:metrics].first).to include(queries_with_result_b) - end - end - - context 'one query returns result' do - before do - allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) - allow(client).to receive(:query_range).with('query_range_b', any_args).and_return([]) - end - - it 'return group data only for query with results' do - queries_with_result = { queries: [{ query_range: 'query_range_a', result: query_range_result }] } - - expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') - - expect(query_result.count).to eq(1) - expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 }) - - expect(query_result.first[:metrics].first).to include(queries_with_result) - end - end - end -end diff --git a/spec/support/prometheus/metric_builders.rb b/spec/support/prometheus/metric_builders.rb deleted file mode 100644 index 512e32a44d0..00000000000 --- a/spec/support/prometheus/metric_builders.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -module Prometheus - module MetricBuilders - def simple_query(suffix = 'a', **opts) - { query_range: "query_range_#{suffix}" }.merge(opts) - end - - def simple_queries - [simple_query, simple_query('b', label: 'label', unit: 'unit')] - end - - def simple_metric(title: 'title', required_metrics: [], queries: [simple_query]) - Gitlab::Prometheus::Metric.new(title: title, required_metrics: required_metrics, weight: 1, queries: queries) - end - - def simple_metrics(added_metric_name: 'metric_a') - [ - simple_metric(required_metrics: %W(#{added_metric_name} metric_b), queries: simple_queries), - simple_metric(required_metrics: [added_metric_name], queries: [simple_query('empty')]), - simple_metric(required_metrics: %w{metric_c}) - ] - end - - def simple_metric_group(name: 'name', metrics: simple_metrics) - Gitlab::Prometheus::MetricGroup.new(name: name, priority: 1, metrics: metrics) - end - end -end diff --git a/spec/support/protected_tags/access_control_ce_shared_examples.rb b/spec/support/protected_tags/access_control_ce_shared_examples.rb deleted file mode 100644 index 6aa9647bcec..00000000000 --- a/spec/support/protected_tags/access_control_ce_shared_examples.rb +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples "protected tags > access control > CE" do - ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)| - it "allows creating protected tags that #{access_type_name} can create" do - visit project_protected_tags_path(project) - - set_protected_tag_name('master') - set_allowed_to('create', access_type_name) - click_on_protect - - expect(ProtectedTag.count).to eq(1) - expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to eq([access_type_id]) - end - - it "allows updating protected tags so that #{access_type_name} can create them" do - visit project_protected_tags_path(project) - - set_protected_tag_name('master') - set_allowed_to('create', 'No one') - click_on_protect - - expect(ProtectedTag.count).to eq(1) - - set_allowed_to('create', access_type_name, form: '.protected-tags-list') - - wait_for_requests - - expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to include(access_type_id) - end - end -end diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb deleted file mode 100644 index 2c5ceb2f09e..00000000000 --- a/spec/support/redis/redis_helpers.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -module RedisHelpers - Gitlab::Redis::ALL_CLASSES.each do |instance_class| - define_method("redis_#{instance_class.store_name.underscore}_cleanup!") do - instance_class.with(&:flushdb) - end - end -end diff --git a/spec/support/redis/redis_new_instance_shared_examples.rb b/spec/support/redis/redis_new_instance_shared_examples.rb deleted file mode 100644 index 435d342fcca..00000000000 --- a/spec/support/redis/redis_new_instance_shared_examples.rb +++ /dev/null @@ -1,111 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_class| - include TmpdirHelper - - let(:instance_specific_config_file) { "config/redis.#{name}.yml" } - let(:environment_config_file_name) { "GITLAB_REDIS_#{name.upcase}_CONFIG_FILE" } - let(:fallback_config_file) { nil } - let(:rails_root) { mktmpdir } - - before do - allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file) - end - - it_behaves_like "redis_shared_examples" - - describe '.config_file_name' do - subject { described_class.config_file_name } - - before do - # Undo top-level stub of config_file_name because we are testing that method now. - allow(described_class).to receive(:config_file_name).and_call_original - - allow(described_class).to receive(:rails_root).and_return(rails_root) - FileUtils.mkdir_p(File.join(rails_root, 'config')) - end - - context 'and there is a global env override' do - before do - stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override') - end - - it { expect(subject).to eq('global override') } - - context "and #{fallback_class.name.demodulize} has a different config file" do - let(:fallback_config_file) { 'fallback config file' } - - it { expect(subject).to eq('fallback config file') } - end - end - end - - describe '#fetch_config' do - subject { described_class.new('test').send(:fetch_config) } - - before do - FileUtils.mkdir_p(File.join(rails_root, 'config')) - - allow(described_class).to receive(:rails_root).and_return(rails_root) - end - - context 'when redis.yml exists' do - before do - allow(described_class).to receive(:config_file_name).and_call_original - allow(described_class).to receive(:redis_yml_path).and_call_original - end - - context 'when the fallback has a redis.yml entry' do - before do - File.write(File.join(rails_root, 'config/redis.yml'), { - 'test' => { - described_class.config_fallback.store_name.underscore => { 'fallback redis.yml' => 123 } - } - }.to_json) - end - - it { expect(subject).to eq({ 'fallback redis.yml' => 123 }) } - - context 'and an instance config file exists' do - before do - File.write(File.join(rails_root, instance_specific_config_file), { - 'test' => { 'instance specific file' => 456 } - }.to_json) - end - - it { expect(subject).to eq({ 'instance specific file' => 456 }) } - - context 'and the instance has a redis.yml entry' do - before do - File.write(File.join(rails_root, 'config/redis.yml'), { - 'test' => { name => { 'instance redis.yml' => 789 } } - }.to_json) - end - - it { expect(subject).to eq({ 'instance redis.yml' => 789 }) } - end - end - end - end - - context 'when no redis config file exsits' do - it 'returns nil' do - expect(subject).to eq(nil) - end - - context 'when resque.yml exists' do - before do - File.write(File.join(rails_root, 'config/resque.yml'), { - 'test' => { 'foobar' => 123 } - }.to_json) - end - - it 'returns the config from resque.yml' do - expect(subject).to eq({ 'foobar' => 123 }) - end - end - end - end -end diff --git a/spec/support/redis/redis_shared_examples.rb b/spec/support/redis/redis_shared_examples.rb deleted file mode 100644 index 8c195a9dbeb..00000000000 --- a/spec/support/redis/redis_shared_examples.rb +++ /dev/null @@ -1,459 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples "redis_shared_examples" do - include StubENV - include TmpdirHelper - - let(:test_redis_url) { "redis://redishost:#{redis_port}" } - let(:test_cluster_config) { { cluster: [{ host: "redis://redishost", port: redis_port }] } } - let(:config_file_name) { instance_specific_config_file } - let(:config_old_format_socket) { "spec/fixtures/config/redis_old_format_socket.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } - let(:old_socket_path) { "/path/to/old/redis.sock" } - let(:new_socket_path) { "/path/to/redis.sock" } - let(:config_old_format_host) { "spec/fixtures/config/redis_old_format_host.yml" } - let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } - let(:config_cluster_format_host) { "spec/fixtures/config/redis_cluster_format_host.yml" } - let(:redis_port) { 6379 } - let(:redis_database) { 99 } - let(:sentinel_port) { 26379 } - let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_config_with_env.yml" } - let(:config_env_variable_url) { "TEST_GITLAB_REDIS_URL" } - let(:rails_root) { mktmpdir } - - before do - allow(described_class).to receive(:config_file_name).and_return(Rails.root.join(config_file_name).to_s) - allow(described_class).to receive(:redis_yml_path).and_return('/dev/null') - end - - describe '.config_file_name' do - subject { described_class.config_file_name } - - before do - # Undo top-level stub of config_file_name because we are testing that method now. - allow(described_class).to receive(:config_file_name).and_call_original - - allow(described_class).to receive(:rails_root).and_return(rails_root) - FileUtils.mkdir_p(File.join(rails_root, 'config')) - end - - context 'when there is no config file anywhere' do - it { expect(subject).to be_nil } - - context 'and there is a global env override' do - before do - stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override') - end - - it { expect(subject).to eq('global override') } - - context 'and there is an instance specific config file' do - before do - FileUtils.touch(File.join(rails_root, instance_specific_config_file)) - end - - it { expect(subject).to eq("#{rails_root}/#{instance_specific_config_file}") } - - it 'returns a path that exists' do - expect(File.file?(subject)).to eq(true) - end - - context 'and there is a specific env override' do - before do - stub_env(environment_config_file_name, 'instance specific override') - end - - it { expect(subject).to eq('instance specific override') } - end - end - end - end - end - - describe '.store' do - let(:rails_env) { 'development' } - - subject { described_class.new(rails_env).store } - - shared_examples 'redis store' do - let(:redis_store) { ::Redis::Store } - let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database}" } - - it 'instantiates Redis::Store' do - is_expected.to be_a(redis_store) - - expect(subject.to_s).to eq(redis_store_to_s) - end - - context 'with the namespace' do - let(:namespace) { 'namespace_name' } - let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}" } - - subject { described_class.new(rails_env).store(namespace: namespace) } - - it "uses specified namespace" do - expect(subject.to_s).to eq(redis_store_to_s) - end - end - end - - context 'with old format' do - it_behaves_like 'redis store' do - let(:config_file_name) { config_old_format_host } - let(:host) { "localhost:#{redis_port}" } - end - end - - context 'with new format' do - it_behaves_like 'redis store' do - let(:config_file_name) { config_new_format_host } - let(:host) { "development-host:#{redis_port}" } - end - end - end - - describe '.params' do - subject { described_class.new(rails_env).params } - - let(:rails_env) { 'development' } - let(:config_file_name) { config_old_format_socket } - - it 'withstands mutation' do - params1 = described_class.params - params2 = described_class.params - params1[:foo] = :bar - - expect(params2).not_to have_key(:foo) - end - - context 'when url contains unix socket reference' do - context 'with old format' do - let(:config_file_name) { config_old_format_socket } - - it 'returns path key instead' do - is_expected.to include(path: old_socket_path) - is_expected.not_to have_key(:url) - end - end - - context 'with new format' do - let(:config_file_name) { config_new_format_socket } - - it 'returns path key instead' do - is_expected.to include(path: new_socket_path) - is_expected.not_to have_key(:url) - end - end - end - - context 'when url is host based' do - context 'with old format' do - let(:config_file_name) { config_old_format_host } - - it 'returns hash with host, port, db, and password' do - is_expected.to include(host: 'localhost', password: 'mypassword', port: redis_port, db: redis_database) - is_expected.not_to have_key(:url) - end - end - - context 'with new format' do - let(:config_file_name) { config_new_format_host } - - where(:rails_env, :host) do - [ - %w[development development-host], - %w[test test-host], - %w[production production-host] - ] - end - - with_them do - it 'returns hash with host, port, db, and password' do - is_expected.to include(host: host, password: 'mynewpassword', port: redis_port, db: redis_database) - is_expected.not_to have_key(:url) - end - end - end - - context 'with redis cluster format' do - let(:config_file_name) { config_cluster_format_host } - - where(:rails_env, :host) do - [ - %w[development development-master], - %w[test test-master], - %w[production production-master] - ] - end - - with_them do - it 'returns hash with cluster and password' do - is_expected.to include(password: 'myclusterpassword', - cluster: [ - { host: "#{host}1", port: redis_port }, - { host: "#{host}2", port: redis_port } - ] - ) - is_expected.not_to have_key(:url) - end - end - end - end - end - - describe '.url' do - let(:config_file_name) { config_old_format_socket } - - it 'withstands mutation' do - url1 = described_class.url - url2 = described_class.url - url1 << 'foobar' unless url1.frozen? - - expect(url2).not_to end_with('foobar') - end - - context 'when yml file with env variable' do - let(:config_file_name) { config_with_environment_variable_inside } - - before do - stub_env(config_env_variable_url, test_redis_url) - end - - it 'reads redis url from env variable' do - expect(described_class.url).to eq test_redis_url - end - end - end - - describe '.version' do - it 'returns a version' do - expect(described_class.version).to be_present - end - end - - describe '.with' do - let(:config_file_name) { config_old_format_socket } - - before do - clear_pool - end - after do - clear_pool - end - - context 'when running on single-threaded runtime' do - before do - allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(false) - end - - it 'instantiates a connection pool with size 5' do - expect(ConnectionPool).to receive(:new).with(size: 5).and_call_original - - described_class.with { |_redis_shared_example| true } - end - end - - context 'when running on multi-threaded runtime' do - before do - allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true) - allow(Gitlab::Runtime).to receive(:max_threads).and_return(18) - end - - it 'instantiates a connection pool with a size based on the concurrency of the worker' do - expect(ConnectionPool).to receive(:new).with(size: 18 + 5).and_call_original - - described_class.with { |_redis_shared_example| true } - end - end - - context 'when there is no config at all' do - before do - # Undo top-level stub of config_file_name because we are testing that method now. - allow(described_class).to receive(:config_file_name).and_call_original - - allow(described_class).to receive(:rails_root).and_return(rails_root) - end - - it 'can run an empty block' do - expect { described_class.with { nil } }.not_to raise_error - end - end - end - - describe '#db' do - let(:rails_env) { 'development' } - - subject { described_class.new(rails_env).db } - - context 'with old format' do - let(:config_file_name) { config_old_format_host } - - it 'returns the correct db' do - expect(subject).to eq(redis_database) - end - end - - context 'with new format' do - let(:config_file_name) { config_new_format_host } - - it 'returns the correct db' do - expect(subject).to eq(redis_database) - end - end - - context 'with cluster-mode' do - let(:config_file_name) { config_cluster_format_host } - - it 'returns the correct db' do - expect(subject).to eq(0) - end - end - end - - describe '#sentinels' do - subject { described_class.new(rails_env).sentinels } - - let(:rails_env) { 'development' } - - context 'when sentinels are defined' do - let(:config_file_name) { config_new_format_host } - - where(:rails_env, :hosts) do - [ - ['development', %w[development-replica1 development-replica2]], - ['test', %w[test-replica1 test-replica2]], - ['production', %w[production-replica1 production-replica2]] - ] - end - - with_them do - it 'returns an array of hashes with host and port keys' do - is_expected.to include(host: hosts[0], port: sentinel_port) - is_expected.to include(host: hosts[1], port: sentinel_port) - end - end - end - - context 'when sentinels are not defined' do - let(:config_file_name) { config_old_format_host } - - it 'returns nil' do - is_expected.to be_nil - end - end - - context 'when cluster is defined' do - let(:config_file_name) { config_cluster_format_host } - - it 'returns nil' do - is_expected.to be_nil - end - end - end - - describe '#sentinels?' do - subject { described_class.new(Rails.env).sentinels? } - - context 'when sentinels are defined' do - let(:config_file_name) { config_new_format_host } - - it 'returns true' do - is_expected.to be_truthy - end - end - - context 'when sentinels are not defined' do - let(:config_file_name) { config_old_format_host } - - it { expect(subject).to eq(nil) } - end - - context 'when cluster is defined' do - let(:config_file_name) { config_cluster_format_host } - - it 'returns false' do - is_expected.to be_falsey - end - end - end - - describe '#raw_config_hash' do - it 'returns old-style single url config in a hash' do - expect(subject).to receive(:fetch_config) { test_redis_url } - expect(subject.send(:raw_config_hash)).to eq(url: test_redis_url) - end - - it 'returns cluster config without url key in a hash' do - expect(subject).to receive(:fetch_config) { test_cluster_config } - expect(subject.send(:raw_config_hash)).to eq(test_cluster_config) - end - end - - describe '#fetch_config' do - before do - FileUtils.mkdir_p(File.join(rails_root, 'config')) - # Undo top-level stub of config_file_name because we are testing that method now. - allow(described_class).to receive(:config_file_name).and_call_original - allow(described_class).to receive(:rails_root).and_return(rails_root) - end - - it 'raises an exception when the config file contains invalid yaml' do - Tempfile.open('bad.yml') do |file| - file.write('{"not":"yaml"') - file.flush - allow(described_class).to receive(:config_file_name) { file.path } - - expect { subject.send(:fetch_config) }.to raise_error(Psych::SyntaxError) - end - end - - it 'has a value for the legacy default URL' do - allow(subject).to receive(:fetch_config) { nil } - - expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z})) - end - - context 'when redis.yml exists' do - subject { described_class.new('test').send(:fetch_config) } - - before do - allow(described_class).to receive(:redis_yml_path).and_call_original - end - - it 'uses config/redis.yml' do - File.write(File.join(rails_root, 'config/redis.yml'), { - 'test' => { described_class.store_name.underscore => { 'foobar' => 123 } } - }.to_json) - - expect(subject).to eq({ 'foobar' => 123 }) - end - end - - context 'when no config file exsits' do - subject { described_class.new('test').send(:fetch_config) } - - it 'returns nil' do - expect(subject).to eq(nil) - end - - context 'but resque.yml exists' do - before do - FileUtils.mkdir_p(File.join(rails_root, 'config')) - File.write(File.join(rails_root, 'config/resque.yml'), { - 'test' => { 'foobar' => 123 } - }.to_json) - end - - it 'returns the config from resque.yml' do - expect(subject).to eq({ 'foobar' => 123 }) - end - end - end - end - - def clear_pool - described_class.remove_instance_variable(:@pool) - rescue NameError - # raised if @pool was not set; ignore - end -end diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml index 74b80c4864c..0426eefebbd 100644 --- a/spec/support/rspec_order_todo.yml +++ b/spec/support/rspec_order_todo.yml @@ -123,7 +123,6 @@ - './ee/spec/controllers/projects/branches_controller_spec.rb' - './ee/spec/controllers/projects/clusters_controller_spec.rb' - './ee/spec/controllers/projects_controller_spec.rb' -- './ee/spec/controllers/projects/dependencies_controller_spec.rb' - './ee/spec/controllers/projects/deploy_keys_controller_spec.rb' - './ee/spec/controllers/projects/environments_controller_spec.rb' - './ee/spec/controllers/projects/feature_flag_issues_controller_spec.rb' @@ -285,7 +284,6 @@ - './ee/spec/features/groups/audit_events_spec.rb' - './ee/spec/features/groups/billing_spec.rb' - './ee/spec/features/groups/contribution_analytics_spec.rb' -- './ee/spec/features/groups/feature_discovery_moments_spec.rb' - './ee/spec/features/groups/group_overview_spec.rb' - './ee/spec/features/groups/group_page_with_external_authorization_service_spec.rb' - './ee/spec/features/groups/group_projects_spec.rb' @@ -496,7 +494,6 @@ - './ee/spec/features/trial_registrations/company_information_spec.rb' - './ee/spec/features/trial_registrations/signin_spec.rb' - './ee/spec/features/trial_registrations/signup_spec.rb' -- './ee/spec/features/trials/select_namespace_spec.rb' - './ee/spec/features/trials/show_trial_banner_spec.rb' - './ee/spec/features/users/arkose_labs_csp_spec.rb' - './ee/spec/features/users/login_spec.rb' @@ -982,14 +979,12 @@ - './ee/spec/helpers/ee/subscribable_banner_helper_spec.rb' - './ee/spec/helpers/ee/system_note_helper_spec.rb' - './ee/spec/helpers/ee/todos_helper_spec.rb' -- './ee/spec/helpers/ee/trial_helper_spec.rb' - './ee/spec/helpers/ee/trial_registration_helper_spec.rb' - './ee/spec/helpers/ee/users/callouts_helper_spec.rb' - './ee/spec/helpers/ee/version_check_helper_spec.rb' - './ee/spec/helpers/ee/wiki_helper_spec.rb' - './ee/spec/helpers/epics_helper_spec.rb' - './ee/spec/helpers/gitlab_subscriptions/upcoming_reconciliation_helper_spec.rb' -- './ee/spec/helpers/groups/feature_discovery_moments_helper_spec.rb' - './ee/spec/helpers/groups/ldap_sync_helper_spec.rb' - './ee/spec/helpers/groups/security_features_helper_spec.rb' - './ee/spec/helpers/groups/sso_helper_spec.rb' @@ -1871,7 +1866,6 @@ - './ee/spec/models/merge_requests/external_status_check_spec.rb' - './ee/spec/models/merge_request_spec.rb' - './ee/spec/models/merge_requests/status_check_response_spec.rb' -- './ee/spec/models/merge_train_spec.rb' - './ee/spec/models/milestone_release_spec.rb' - './ee/spec/models/milestone_spec.rb' - './ee/spec/models/namespace_limit_spec.rb' @@ -2327,7 +2321,6 @@ - './ee/spec/requests/groups_controller_spec.rb' - './ee/spec/requests/groups/epics/epic_links_controller_spec.rb' - './ee/spec/requests/groups/epics/related_epic_links_controller_spec.rb' -- './ee/spec/requests/groups/feature_discovery_moments_spec.rb' - './ee/spec/requests/groups/group_members_controller_spec.rb' - './ee/spec/requests/groups/hook_logs_controller_spec.rb' - './ee/spec/requests/groups/labels_spec.rb' @@ -2812,7 +2805,6 @@ - './ee/spec/services/gitlab_subscriptions/create_hand_raise_lead_service_spec.rb' - './ee/spec/services/gitlab_subscriptions/create_service_spec.rb' - './ee/spec/services/gitlab_subscriptions/create_trial_or_lead_service_spec.rb' -- './ee/spec/services/gitlab_subscriptions/extend_reactivate_trial_service_spec.rb' - './ee/spec/services/gitlab_subscriptions/fetch_purchase_eligible_namespaces_service_spec.rb' - './ee/spec/services/gitlab_subscriptions/fetch_subscription_plans_service_spec.rb' - './ee/spec/services/gitlab_subscriptions/notify_seats_exceeded_service_spec.rb' @@ -3103,7 +3095,6 @@ - './ee/spec/views/groups/_compliance_frameworks.html.haml_spec.rb' - './ee/spec/views/groups/compliance_frameworks/new.html.haml_spec.rb' - './ee/spec/views/groups/edit.html.haml_spec.rb' -- './ee/spec/views/groups/feature_discovery_moments/advanced_features_dashboard.html.haml_spec.rb' - './ee/spec/views/groups/hook_logs/show.html.haml_spec.rb' - './ee/spec/views/groups/hooks/edit.html.haml_spec.rb' - './ee/spec/views/groups/security/discover/show.html.haml_spec.rb' @@ -3113,7 +3104,6 @@ - './ee/spec/views/layouts/checkout.html.haml_spec.rb' - './ee/spec/views/layouts/header/_current_user_dropdown.html.haml_spec.rb' - './ee/spec/views/layouts/header/_ee_subscribable_banner.html.haml_spec.rb' -- './ee/spec/views/layouts/header/help_dropdown/_cross_stage_fdm.html.haml_spec.rb' - './ee/spec/views/layouts/header/_read_only_banner.html.haml_spec.rb' - './ee/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb' - './ee/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb' @@ -3310,7 +3300,6 @@ - './ee/spec/workers/sync_seat_link_worker_spec.rb' - './ee/spec/workers/todos_destroyer/confidential_epic_worker_spec.rb' - './ee/spec/workers/update_all_mirrors_worker_spec.rb' -- './ee/spec/workers/update_max_seats_used_for_gitlab_com_subscriptions_worker_spec.rb' - './ee/spec/workers/vulnerabilities/historical_statistics/deletion_worker_spec.rb' - './ee/spec/workers/vulnerabilities/statistics/adjustment_worker_spec.rb' - './ee/spec/workers/vulnerabilities/statistics/schedule_worker_spec.rb' @@ -3319,7 +3308,6 @@ - './spec/bin/feature_flag_spec.rb' - './spec/bin/sidekiq_cluster_spec.rb' - './spec/channels/application_cable/connection_spec.rb' -- './spec/channels/awareness_channel_spec.rb' - './spec/commands/metrics_server/metrics_server_spec.rb' - './spec/commands/sidekiq_cluster/cli_spec.rb' - './spec/components/diffs/overflow_warning_component_spec.rb' @@ -3614,8 +3602,6 @@ - './spec/experiments/force_company_trial_experiment_spec.rb' - './spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb' - './spec/experiments/ios_specific_templates_experiment_spec.rb' -- './spec/experiments/require_verification_for_namespace_creation_experiment_spec.rb' -- './spec/experiments/security_reports_mr_widget_prompt_experiment_spec.rb' - './spec/features/abuse_report_spec.rb' - './spec/features/action_cable_logging_spec.rb' - './spec/features/admin/admin_abuse_reports_spec.rb' @@ -5737,7 +5723,6 @@ - './spec/lib/gitlab/background_migration/migrate_personal_namespace_project_maintainer_to_owner_spec.rb' - './spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb' - './spec/lib/gitlab/background_migration/migrate_shimo_confluence_integration_category_spec.rb' -- './spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb' - './spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb' - './spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb' - './spec/lib/gitlab/background_migration/populate_container_repository_migration_plan_spec.rb' @@ -6443,7 +6428,6 @@ - './spec/lib/gitlab/error_tracking/processor/sanitize_error_message_processor_spec.rb' - './spec/lib/gitlab/error_tracking/processor/sanitizer_processor_spec.rb' - './spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb' -- './spec/lib/gitlab/error_tracking_spec.rb' - './spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb' - './spec/lib/gitlab/etag_caching/middleware_spec.rb' - './spec/lib/gitlab/etag_caching/router/graphql_spec.rb' @@ -7219,7 +7203,6 @@ - './spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb' - './spec/lib/gitlab/slash_commands/presenters/run_spec.rb' - './spec/lib/gitlab/slash_commands/run_spec.rb' -- './spec/lib/gitlab/slug/environment_spec.rb' - './spec/lib/gitlab/snippet_search_results_spec.rb' - './spec/lib/gitlab/sourcegraph_spec.rb' - './spec/lib/gitlab/spamcheck/client_spec.rb' @@ -7724,7 +7707,6 @@ - './spec/models/audit_event_spec.rb' - './spec/models/authentication_event_spec.rb' - './spec/models/award_emoji_spec.rb' -- './spec/models/awareness_session_spec.rb' - './spec/models/aws/role_spec.rb' - './spec/models/badges/group_badge_spec.rb' - './spec/models/badge_spec.rb' @@ -7856,7 +7838,6 @@ - './spec/models/concerns/atomic_internal_id_spec.rb' - './spec/models/concerns/avatarable_spec.rb' - './spec/models/concerns/awardable_spec.rb' -- './spec/models/concerns/awareness_spec.rb' - './spec/models/concerns/batch_destroy_dependent_associations_spec.rb' - './spec/models/concerns/batch_nullify_dependent_associations_spec.rb' - './spec/models/concerns/blob_language_from_git_attributes_spec.rb' @@ -8232,7 +8213,6 @@ - './spec/models/preloaders/merge_request_diff_preloader_spec.rb' - './spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb' - './spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb' -- './spec/models/preloaders/users_max_access_level_in_projects_preloader_spec.rb' - './spec/models/product_analytics_event_spec.rb' - './spec/models/programming_language_spec.rb' - './spec/models/project_authorization_spec.rb' @@ -8559,7 +8539,6 @@ - './spec/requests/api/environments_spec.rb' - './spec/requests/api/error_tracking/client_keys_spec.rb' - './spec/requests/api/error_tracking/collector_spec.rb' -- './spec/requests/api/error_tracking/project_settings_spec.rb' - './spec/requests/api/events_spec.rb' - './spec/requests/api/feature_flags_spec.rb' - './spec/requests/api/feature_flags_user_lists_spec.rb' @@ -10049,7 +10028,6 @@ - './spec/tasks/cache/clear/redis_spec.rb' - './spec/tasks/config_lint_spec.rb' - './spec/tasks/dev_rake_spec.rb' -- './spec/tasks/gettext_rake_spec.rb' - './spec/tasks/gitlab/artifacts/check_rake_spec.rb' - './spec/tasks/gitlab/artifacts/migrate_rake_spec.rb' - './spec/tasks/gitlab/background_migrations_rake_spec.rb' diff --git a/spec/support/services/clusters/create_service_shared.rb b/spec/support/services/clusters/create_service_shared.rb deleted file mode 100644 index 80fa7c58515..00000000000 --- a/spec/support/services/clusters/create_service_shared.rb +++ /dev/null @@ -1,64 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_context 'valid cluster create params' do - let(:clusterable) { Clusters::Instance.new } - let(:params) do - { - name: 'test-cluster', - provider_type: :gcp, - provider_gcp_attributes: { - gcp_project_id: 'gcp-project', - zone: 'us-central1-a', - num_nodes: 1, - machine_type: 'machine_type-a', - legacy_abac: 'true' - }, - clusterable: clusterable - } - end -end - -RSpec.shared_context 'invalid cluster create params' do - let(:clusterable) { Clusters::Instance.new } - let(:params) do - { - name: 'test-cluster', - provider_type: :gcp, - provider_gcp_attributes: { - gcp_project_id: '!!!!!!!', - zone: 'us-central1-a', - num_nodes: 1, - machine_type: 'machine_type-a' - }, - clusterable: clusterable - - } - end -end - -RSpec.shared_examples 'create cluster service success' do - it 'creates a cluster object' do - expect { subject } - .to change { Clusters::Cluster.count }.by(1) - .and change { Clusters::Providers::Gcp.count }.by(1) - - expect(subject.name).to eq('test-cluster') - expect(subject.user).to eq(user) - expect(subject.project).to eq(project) - expect(subject.provider.gcp_project_id).to eq('gcp-project') - expect(subject.provider.zone).to eq('us-central1-a') - expect(subject.provider.num_nodes).to eq(1) - expect(subject.provider.machine_type).to eq('machine_type-a') - expect(subject.provider.access_token).to eq(access_token) - expect(subject.provider).to be_legacy_abac - expect(subject.platform).to be_nil - expect(subject.namespace_per_environment).to eq true - end -end - -RSpec.shared_examples 'create cluster service error' do - it 'returns an error' do - expect { subject }.to change { Clusters::Cluster.count }.by(0) - expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present - end -end diff --git a/spec/support/services/deploy_token_shared_examples.rb b/spec/support/services/deploy_token_shared_examples.rb deleted file mode 100644 index d322b3fc81d..00000000000 --- a/spec/support/services/deploy_token_shared_examples.rb +++ /dev/null @@ -1,86 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'a deploy token creation service' do - let(:user) { create(:user) } - let(:deploy_token_params) { attributes_for(:deploy_token) } - - describe '#execute' do - subject { described_class.new(entity, user, deploy_token_params).execute } - - context 'when the deploy token is valid' do - it 'creates a new DeployToken' do - expect { subject }.to change { DeployToken.count }.by(1) - end - - it 'creates a new ProjectDeployToken' do - expect { subject }.to change { deploy_token_class.count }.by(1) - end - - it 'returns a DeployToken' do - expect(subject[:deploy_token]).to be_an_instance_of DeployToken - end - - it 'sets the creator_id as the id of the current_user' do - expect(subject[:deploy_token].read_attribute(:creator_id)).to eq(user.id) - end - end - - context 'when expires at date is not passed' do - let(:deploy_token_params) { attributes_for(:deploy_token, expires_at: '') } - - it 'sets Forever.date' do - expect(subject[:deploy_token].read_attribute(:expires_at)).to eq(Forever.date) - end - end - - context 'when username is empty string' do - let(:deploy_token_params) { attributes_for(:deploy_token, username: '') } - - it 'converts it to nil' do - expect(subject[:deploy_token].read_attribute(:username)).to be_nil - end - end - - context 'when username is provided' do - let(:deploy_token_params) { attributes_for(:deploy_token, username: 'deployer') } - - it 'keeps the provided username' do - expect(subject[:deploy_token].read_attribute(:username)).to eq('deployer') - end - end - - context 'when the deploy token is invalid' do - let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false, write_registry: false) } - - it 'does not create a new DeployToken' do - expect { subject }.not_to change { DeployToken.count } - end - - it 'does not create a new ProjectDeployToken' do - expect { subject }.not_to change { deploy_token_class.count } - end - end - end -end - -RSpec.shared_examples 'a deploy token deletion service' do - let(:user) { create(:user) } - let(:deploy_token_params) { { token_id: deploy_token.id } } - - describe '#execute' do - subject { described_class.new(entity, user, deploy_token_params).execute } - - it "destroys a token record and it's associated DeployToken" do - expect { subject }.to change { deploy_token_class.count }.by(-1) - .and change { DeployToken.count }.by(-1) - end - - context 'invalid token id' do - let(:deploy_token_params) { { token_id: 9999 } } - - it 'raises an error' do - expect { subject }.to raise_error(ActiveRecord::RecordNotFound) - end - end - end -end diff --git a/spec/support/services/import_csv_service_shared_examples.rb b/spec/support/services/import_csv_service_shared_examples.rb deleted file mode 100644 index 1555497ae48..00000000000 --- a/spec/support/services/import_csv_service_shared_examples.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.shared_examples_for 'importer with email notification' do - it 'notifies user of import result' do - expect(Notify).to receive_message_chain(email_method, :deliver_later) - - subject - end -end - -RSpec.shared_examples 'correctly handles invalid files' do - shared_examples_for 'invalid file' do - it 'returns invalid file error' do - expect(subject[:success]).to eq(0) - expect(subject[:parse_error]).to eq(true) - end - end - - context 'when given file with unsupported extension' do - let(:file) { fixture_file_upload('spec/fixtures/banana_sample.gif') } - - it_behaves_like 'invalid file' - end - - context 'when given empty file' do - let(:file) { fixture_file_upload('spec/fixtures/csv_empty.csv') } - - it_behaves_like 'invalid file' - end - - context 'when given file without headers' do - let(:file) { fixture_file_upload('spec/fixtures/csv_no_headers.csv') } - - it_behaves_like 'invalid file' - end -end diff --git a/spec/support/services/issuable_description_quick_actions_shared_examples.rb b/spec/support/services/issuable_description_quick_actions_shared_examples.rb deleted file mode 100644 index 1970301e4c9..00000000000 --- a/spec/support/services/issuable_description_quick_actions_shared_examples.rb +++ /dev/null @@ -1,62 +0,0 @@ -# frozen_string_literal: true - -# Specifications for behavior common to all objects with executable attributes. -# It can take a `default_params`. - -RSpec.shared_examples 'issuable record that supports quick actions' do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:user) { create(:user) } - let_it_be(:assignee) { create(:user) } - let_it_be(:milestone) { create(:milestone, project: project) } - let_it_be(:labels) { create_list(:label, 3, project: project) } - - let(:base_params) { { title: 'My issuable title' } } - let(:params) { base_params.merge(defined?(default_params) ? default_params : {}).merge(example_params) } - - before_all do - project.add_maintainer(user) - project.add_maintainer(assignee) - end - - before do - issuable.reload - end - - context 'with labels in command only' do - let(:example_params) do - { - description: "/label ~#{labels.first.name} ~#{labels.second.name}\n/unlabel ~#{labels.third.name}" - } - end - - it 'attaches labels to issuable' do - expect(issuable.label_ids).to match_array([labels.first.id, labels.second.id]) - end - end - - context 'with labels in params and command' do - let(:example_params) do - { - label_ids: [labels.second.id], - description: "/label ~#{labels.first.name}\n/unlabel ~#{labels.third.name}" - } - end - - it 'attaches all labels to issuable' do - expect(issuable.label_ids).to match_array([labels.first.id, labels.second.id]) - end - end - - context 'with assignee and milestone in command only' do - let(:example_params) do - { - description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}") - } - end - - it 'assigns and sets milestone to issuable' do - expect(issuable.assignees).to eq([assignee]) - expect(issuable.milestone).to eq(milestone) - end - end -end diff --git a/spec/support/services/issuable_import_csv_service_shared_examples.rb b/spec/support/services/issuable_import_csv_service_shared_examples.rb deleted file mode 100644 index 71740ba8ab2..00000000000 --- a/spec/support/services/issuable_import_csv_service_shared_examples.rb +++ /dev/null @@ -1,107 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.shared_examples 'issuable import csv service' do |issuable_type| - let_it_be_with_refind(:project) { create(:project) } - let_it_be(:user) { create(:user) } - - subject { service.execute } - - shared_examples_for 'an issuable importer' do - if issuable_type == 'issue' - it 'records the import attempt if resource is an issue' do - expect { subject } - .to change { Issues::CsvImport.where(project: project, user: user).count } - .by 1 - end - end - end - - describe '#execute' do - before do - project.add_developer(user) - end - - it_behaves_like 'correctly handles invalid files' do - it_behaves_like 'importer with email notification' - it_behaves_like 'an issuable importer' - end - - context 'with a file generated by Gitlab CSV export' do - let(:file) { fixture_file_upload('spec/fixtures/csv_gitlab_export.csv') } - - it 'imports the CSV without errors' do - expect(subject[:success]).to eq(4) - expect(subject[:error_lines]).to eq([]) - expect(subject[:parse_error]).to eq(false) - end - - it 'correctly sets the issuable attributes' do - expect { subject }.to change { issuables.count }.by 4 - - expect(issuables.reload).to include(have_attributes({ title: 'Test Title', description: 'Test Description' })) - end - - it_behaves_like 'importer with email notification' - it_behaves_like 'an issuable importer' - end - - context 'comma delimited file' do - let(:file) { fixture_file_upload('spec/fixtures/csv_comma.csv') } - - it 'imports CSV without errors' do - expect(subject[:success]).to eq(3) - expect(subject[:error_lines]).to eq([]) - expect(subject[:parse_error]).to eq(false) - end - - it 'correctly sets the issuable attributes' do - expect { subject }.to change { issuables.count }.by 3 - - expect(issuables.reload).to include(have_attributes(title: 'Title with quote"', description: 'Description')) - end - - it_behaves_like 'importer with email notification' - it_behaves_like 'an issuable importer' - end - - context 'tab delimited file with error row' do - let(:file) { fixture_file_upload('spec/fixtures/csv_tab.csv') } - - it 'imports CSV with some error rows' do - expect(subject[:success]).to eq(2) - expect(subject[:error_lines]).to eq([3]) - expect(subject[:parse_error]).to eq(false) - end - - it 'correctly sets the issuable attributes' do - expect { subject }.to change { issuables.count }.by 2 - - expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World')) - end - - it_behaves_like 'importer with email notification' - it_behaves_like 'an issuable importer' - end - - context 'semicolon delimited file with CRLF' do - let(:file) { fixture_file_upload('spec/fixtures/csv_semicolon.csv') } - - it 'imports CSV with a blank row' do - expect(subject[:success]).to eq(3) - expect(subject[:error_lines]).to eq([4]) - expect(subject[:parse_error]).to eq(false) - end - - it 'correctly sets the issuable attributes' do - expect { subject }.to change { issuables.count }.by 3 - - expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World')) - end - - it_behaves_like 'importer with email notification' - it_behaves_like 'an issuable importer' - end - end -end diff --git a/spec/support/services/issuable_update_service_shared_examples.rb b/spec/support/services/issuable_update_service_shared_examples.rb deleted file mode 100644 index feea21be428..00000000000 --- a/spec/support/services/issuable_update_service_shared_examples.rb +++ /dev/null @@ -1,99 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'issuable update service' do - def update_issuable(opts) - described_class.new(project, user, opts).execute(open_issuable) - end - - context 'changing state' do - let(:hook_event) { :"#{closed_issuable.class.name.underscore.to_sym}_hooks" } - - context 'to reopened' do - let(:expected_payload) do - include( - changes: include( - state_id: { current: 1, previous: 2 }, - updated_at: { current: kind_of(Time), previous: kind_of(Time) } - ), - object_attributes: include( - state: 'opened', - action: 'reopen' - ) - ) - end - - it 'executes hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, hook_event) - expect(project).to receive(:execute_integrations).with(expected_payload, hook_event) - - described_class.new(**described_class.constructor_container_arg(project), current_user: user, params: { state_event: 'reopen' }).execute(closed_issuable) - end - end - - context 'to closed' do - let(:expected_payload) do - include( - changes: include( - state_id: { current: 2, previous: 1 }, - updated_at: { current: kind_of(Time), previous: kind_of(Time) } - ), - object_attributes: include( - state: 'closed', - action: 'close' - ) - ) - end - - it 'executes hooks' do - expect(project).to receive(:execute_hooks).with(expected_payload, hook_event) - expect(project).to receive(:execute_integrations).with(expected_payload, hook_event) - - described_class.new(**described_class.constructor_container_arg(project), current_user: user, params: { state_event: 'close' }).execute(open_issuable) - end - end - end -end - -RSpec.shared_examples 'keeps issuable labels sorted after update' do - before do - update_issuable(label_ids: [label_b.id]) - end - - context 'when label is changed' do - it 'keeps the labels sorted by title ASC' do - update_issuable({ add_label_ids: [label_a.id] }) - - expect(issuable.labels).to eq([label_a, label_b]) - end - end -end - -RSpec.shared_examples 'broadcasting issuable labels updates' do - before do - update_issuable(label_ids: [label_a.id]) - end - - context 'when label is added' do - it 'triggers the GraphQL subscription' do - expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable) - - update_issuable(add_label_ids: [label_b.id]) - end - end - - context 'when label is removed' do - it 'triggers the GraphQL subscription' do - expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable) - - update_issuable(remove_label_ids: [label_a.id]) - end - end - - context 'when label is unchanged' do - it 'does not trigger the GraphQL subscription' do - expect(GraphqlTriggers).not_to receive(:issuable_labels_updated).with(issuable) - - update_issuable(label_ids: [label_a.id]) - end - end -end diff --git a/spec/support/services/issues/move_and_clone_services_shared_examples.rb b/spec/support/services/issues/move_and_clone_services_shared_examples.rb deleted file mode 100644 index 2b2e90c0461..00000000000 --- a/spec/support/services/issues/move_and_clone_services_shared_examples.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'copy or reset relative position' do - before do - # ensure we have a relative position and it is known - old_issue.update!(relative_position: 1000) - end - - context 'when moved to a project within same group hierarchy' do - it 'does not reset the relative_position' do - expect(subject.relative_position).to eq(1000) - end - end - - context 'when moved to a project in a different group hierarchy' do - let_it_be(:new_project) { create(:project, group: create(:group)) } - - it 'does reset the relative_position' do - expect(subject.relative_position).to be_nil - end - end -end diff --git a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb deleted file mode 100644 index ae98ce689e3..00000000000 --- a/spec/support/services/migrate_to_ghost_user_service_shared_examples.rb +++ /dev/null @@ -1,89 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class, fields| - record_class_name = record_class.to_s.titleize.downcase - - let(:project) do - case record_class - when MergeRequest - create(:project, :repository) - else - create(:project) - end - end - - before do - project.add_developer(user) - end - - context "for a #{record_class_name} the user has created" do - let!(:record) { created_record } - let(:migrated_fields) { fields || [:author] } - - it "does not delete the #{record_class_name}" do - service.execute - - expect(record_class.find_by_id(record.id)).to be_present - end - - it "blocks the user before migrating #{record_class_name}s to the 'Ghost User'" do - service.execute - - expect(user).to be_blocked - end - - it 'migrates all associated fields to the "Ghost user"' do - service.execute - - migrated_record = record_class.find_by_id(record.id) - - migrated_fields.each do |field| - expect(migrated_record.public_send(field)).to eq(User.ghost) - end - end - - it 'will only migrate specific records during a hard_delete' do - service.execute(hard_delete: true) - - migrated_record = record_class.find_by_id(record.id) - - check_user = always_ghost ? User.ghost : user - - migrated_fields.each do |field| - expect(migrated_record.public_send(field)).to eq(check_user) - end - end - - context "race conditions" do - context "when #{record_class_name} migration fails and is rolled back" do - before do - allow_any_instance_of(ActiveRecord::Associations::CollectionProxy) - .to receive(:update_all).and_raise(ActiveRecord::StatementTimeout) - end - - it 'rolls back the user block' do - expect { service.execute }.to raise_error(ActiveRecord::StatementTimeout) - - expect(user.reload).not_to be_blocked - end - - it "doesn't unblock a previously-blocked user" do - expect(user.starred_projects).to receive(:update_all).and_call_original - user.block - - expect { service.execute }.to raise_error(ActiveRecord::StatementTimeout) - - expect(user.reload).to be_blocked - end - end - - it "blocks the user before #{record_class_name} migration begins" do - expect(service).to receive("migrate_#{record_class_name.parameterize(separator: '_').pluralize}".to_sym) do - expect(user.reload).to be_blocked - end - - service.execute - end - end - end -end diff --git a/spec/support/services/service_response_shared_examples.rb b/spec/support/services/service_response_shared_examples.rb deleted file mode 100644 index 186627347fb..00000000000 --- a/spec/support/services/service_response_shared_examples.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'returning an error service response' do |message: nil| - it 'returns an error service response' do - result = subject - - expect(result).to be_error - - if message - expect(result.message).to eq(message) - end - end -end - -RSpec.shared_examples 'returning a success service response' do |message: nil| - it 'returns a success service response' do - result = subject - - expect(result).to be_success - - if message - expect(result.message).to eq(message) - end - end -end diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb index 9612b657093..8c52c14085b 100644 --- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb +++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb @@ -76,6 +76,8 @@ Integration.available_integration_names.each do |integration| hash.merge!(k => 'ABC1') elsif integration == 'apple_app_store' && k == :app_store_private_key_file_name hash.merge!(k => 'ssl_key.pem') + elsif integration == 'google_play' && k == :package_name + hash.merge!(k => 'com.gitlab.foo.bar') elsif integration == 'google_play' && k == :service_account_key hash.merge!(k => File.read('spec/fixtures/service_account.json')) elsif integration == 'google_play' && k == :service_account_key_file_name diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb index afb3976e3b8..16d23f63fd0 100644 --- a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb +++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb @@ -12,47 +12,55 @@ RSpec.shared_context 'IssuesFinder context' do let_it_be(:milestone) { create(:milestone, project: project1, releases: [release]) } let_it_be(:label) { create(:label, project: project2) } let_it_be(:label2) { create(:label, project: project2) } - let_it_be(:item1, reload: true) do - create(:issue, - author: user, - assignees: [user], - project: project1, - milestone: milestone, - title: 'gitlab', - created_at: 1.week.ago, - updated_at: 1.week.ago) + let_it_be_with_reload(:item1) do + create( + :issue, + author: user, + assignees: [user], + project: project1, + milestone: milestone, + title: 'gitlab', + created_at: 1.week.ago, + updated_at: 1.week.ago + ) end - let_it_be(:item2, reload: true) do - create(:issue, - author: user, - assignees: [user], - project: project2, - description: 'gitlab', - created_at: 1.week.from_now, - updated_at: 1.week.from_now) + let_it_be_with_reload(:item2) do + create( + :issue, + author: user, + assignees: [user], + project: project2, + description: 'gitlab', + created_at: 1.week.from_now, + updated_at: 1.week.from_now + ) end - let_it_be(:item3, reload: true) do - create(:issue, - author: user2, - assignees: [user2], - project: project2, - title: 'tanuki', - description: 'tanuki', - created_at: 2.weeks.from_now, - updated_at: 2.weeks.from_now) + let_it_be_with_reload(:item3) do + create( + :issue, + author: user2, + assignees: [user2], + project: project2, + title: 'tanuki', + description: 'tanuki', + created_at: 2.weeks.from_now, + updated_at: 2.weeks.from_now + ) end - let_it_be(:item4, reload: true) { create(:issue, project: project3) } - let_it_be(:item5, reload: true) do - create(:issue, - author: user, - assignees: [user], - project: project1, - title: 'wotnot', - created_at: 3.days.ago, - updated_at: 3.days.ago) + let_it_be_with_reload(:item4) { create(:issue, project: project3) } + let_it_be_with_reload(:item5) do + create( + :issue, + author: user, + assignees: [user], + project: project1, + title: 'wotnot', + created_at: 3.days.ago, + updated_at: 3.days.ago + ) end let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) } diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb index 8a64efe9df5..507bcd44ee8 100644 --- a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb +++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb @@ -54,34 +54,44 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests let_it_be(:label2) { create(:label, project: project1) } let!(:merge_request1) do - create(:merge_request, assignees: [user], author: user, reviewers: [user2], - source_project: project2, target_project: project1, - target_branch: 'merged-target') + create( + :merge_request, assignees: [user], author: user, reviewers: [user2], + source_project: project2, target_project: project1, + target_branch: 'merged-target' + ) end let!(:merge_request2) do - create(:merge_request, :conflict, assignees: [user], author: user, reviewers: [user2], - source_project: project2, target_project: project1, - state: 'closed') + create( + :merge_request, :conflict, assignees: [user], author: user, reviewers: [user2], + source_project: project2, target_project: project1, + state: 'closed' + ) end let!(:merge_request3) do - create(:merge_request, :simple, author: user, assignees: [user2], reviewers: [user], - source_project: project2, target_project: project2, - state: 'locked', - title: 'thing WIP thing') + create( + :merge_request, :simple, author: user, assignees: [user2], reviewers: [user], + source_project: project2, target_project: project2, + state: 'locked', + title: 'thing WIP thing' + ) end let!(:merge_request4) do - create(:merge_request, :simple, author: user, - source_project: project3, target_project: project3, - title: 'WIP thing') + create( + :merge_request, :simple, author: user, + source_project: project3, target_project: project3, + title: 'WIP thing' + ) end let_it_be(:merge_request5) do - create(:merge_request, :simple, author: user, - source_project: project4, target_project: project4, - title: '[WIP]') + create( + :merge_request, :simple, author: user, + source_project: project4, target_project: project4, + title: '[WIP]' + ) end let!(:label_link) { create(:label_link, label: label, target: merge_request2) } diff --git a/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb index 8c5bc339db5..1118039d164 100644 --- a/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb +++ b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb @@ -12,47 +12,55 @@ RSpec.shared_context 'WorkItemsFinder context' do let_it_be(:milestone) { create(:milestone, project: project1, releases: [release]) } let_it_be(:label) { create(:label, project: project2) } let_it_be(:label2) { create(:label, project: project2) } - let_it_be(:item1, reload: true) do - create(:work_item, - author: user, - assignees: [user], - project: project1, - milestone: milestone, - title: 'gitlab', - created_at: 1.week.ago, - updated_at: 1.week.ago) + let_it_be_with_reload(:item1) do + create( + :work_item, + author: user, + assignees: [user], + project: project1, + milestone: milestone, + title: 'gitlab', + created_at: 1.week.ago, + updated_at: 1.week.ago + ) end - let_it_be(:item2, reload: true) do - create(:work_item, - author: user, - assignees: [user], - project: project2, - description: 'gitlab', - created_at: 1.week.from_now, - updated_at: 1.week.from_now) + let_it_be_with_reload(:item2) do + create( + :work_item, + author: user, + assignees: [user], + project: project2, + description: 'gitlab', + created_at: 1.week.from_now, + updated_at: 1.week.from_now + ) end - let_it_be(:item3, reload: true) do - create(:work_item, - author: user2, - assignees: [user2], - project: project2, - title: 'tanuki', - description: 'tanuki', - created_at: 2.weeks.from_now, - updated_at: 2.weeks.from_now) + let_it_be_with_reload(:item3) do + create( + :work_item, + author: user2, + assignees: [user2], + project: project2, + title: 'tanuki', + description: 'tanuki', + created_at: 2.weeks.from_now, + updated_at: 2.weeks.from_now + ) end - let_it_be(:item4, reload: true) { create(:work_item, project: project3) } - let_it_be(:item5, reload: true) do - create(:work_item, - author: user, - assignees: [user], - project: project1, - title: 'wotnot', - created_at: 3.days.ago, - updated_at: 3.days.ago) + let_it_be_with_reload(:item4) { create(:work_item, project: project3) } + let_it_be_with_reload(:item5) do + create( + :work_item, + author: user, + assignees: [user], + project: project1, + title: 'wotnot', + created_at: 3.days.ago, + updated_at: 3.days.ago + ) end let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) } diff --git a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb b/spec/support/shared_contexts/issuable/merge_request_shared_context.rb index b9cde12c537..35c1511c96a 100644 --- a/spec/support/shared_contexts/issuable/merge_request_shared_context.rb +++ b/spec/support/shared_contexts/issuable/merge_request_shared_context.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_context 'merge request show action' do - include Spec::Support::Helpers::Features::MergeRequestHelpers + include Features::MergeRequestHelpers let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public, :repository) } diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb index 866a97b0e3c..e28da0bc365 100644 --- a/spec/support/shared_contexts/navbar_structure_context.rb +++ b/spec/support/shared_contexts/navbar_structure_context.rb @@ -68,7 +68,7 @@ RSpec.shared_context 'project navbar structure' do nav_item: _('Deployments'), nav_sub_items: [ _('Environments'), - _('Feature Flags'), + s_('FeatureFlags|Feature flags'), _('Releases') ] }, diff --git a/spec/support/shared_contexts/services/clusters/create_service_shared_context.rb b/spec/support/shared_contexts/services/clusters/create_service_shared_context.rb new file mode 100644 index 00000000000..393e90da1d3 --- /dev/null +++ b/spec/support/shared_contexts/services/clusters/create_service_shared_context.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +RSpec.shared_context 'with valid cluster create params' do + let(:clusterable) { Clusters::Instance.new } + let(:params) do + { + name: 'test-cluster', + provider_type: :gcp, + provider_gcp_attributes: { + gcp_project_id: 'gcp-project', + zone: 'us-central1-a', + num_nodes: 1, + machine_type: 'machine_type-a', + legacy_abac: 'true' + }, + clusterable: clusterable + } + end +end diff --git a/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb index 7db479bcfd2..0cf026749ee 100644 --- a/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb +++ b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb @@ -8,9 +8,11 @@ RSpec.shared_context 'container repository delete tags service shared context' d let(:params) { { tags: tags } } before do - stub_container_registry_config(enabled: true, - api_url: 'http://registry.gitlab', - host_port: 'registry.gitlab') + stub_container_registry_config( + enabled: true, + api_url: 'http://registry.gitlab', + host_port: 'registry.gitlab' + ) stub_container_registry_tags( repository: repository.path, diff --git a/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb index b324a5886a9..cb74d0e8dca 100644 --- a/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb +++ b/spec/support/shared_examples/analytics/cycle_analytics/flow_metrics_examples.rb @@ -1,5 +1,37 @@ # frozen_string_literal: true +RSpec.shared_examples 'validation on Time arguments' do + context 'when `to` parameter is higher than `from`' do + let(:variables) do + { + path: full_path, + from: 1.day.ago.iso8601, + to: 2.days.ago.iso8601 + } + end + + it 'returns error' do + expect(result).to be_nil + expect(graphql_errors.first['message']).to include('`from` argument must be before `to` argument') + end + end + + context 'when from and to parameter range is higher than 180 days' do + let(:variables) do + { + path: full_path, + from: Time.now, + to: 181.days.from_now + } + end + + it 'returns error' do + expect(result).to be_nil + expect(graphql_errors.first['message']).to include('Max of 180 days timespan is allowed') + end + end +end + RSpec.shared_examples 'value stream analytics flow metrics issueCount examples' do let_it_be(:milestone) { create(:milestone, group: group) } let_it_be(:label) { create(:group_label, group: group) } @@ -121,6 +153,8 @@ RSpec.shared_examples 'value stream analytics flow metrics issueCount examples' expect(result).to eq(nil) end end + + it_behaves_like 'validation on Time arguments' end RSpec.shared_examples 'value stream analytics flow metrics deploymentCount examples' do @@ -202,6 +236,8 @@ RSpec.shared_examples 'value stream analytics flow metrics deploymentCount examp }) end end + + it_behaves_like 'validation on Time arguments' end RSpec.shared_examples 'value stream analytics flow metrics leadTime examples' do diff --git a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb index f1ffddf6507..ef9830fbce8 100644 --- a/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb +++ b/spec/support/shared_examples/analytics/cycle_analytics/request_params_examples.rb @@ -119,9 +119,13 @@ RSpec.shared_examples 'unlicensed cycle analytics request params' do it { is_expected.to eq(false) } end - describe 'enable_tasks_by_type_chart data attribute' do - subject(:value) { described_class.new(params).to_data_attributes[:enable_tasks_by_type_chart] } + describe 'feature availablity data attributes' do + subject(:value) { described_class.new(params).to_data_attributes } - it { is_expected.to eq('false') } + it 'disables all paid features' do + is_expected.to match(a_hash_including(enable_tasks_by_type_chart: 'false', + enable_customizable_stages: 'false', + enable_projects_filter: 'false')) + end end end diff --git a/spec/support/shared_examples/banzai/filters/filter_timeout_shared_examples.rb b/spec/support/shared_examples/banzai/filters/filter_timeout_shared_examples.rb new file mode 100644 index 00000000000..1f2ebe6fef6 --- /dev/null +++ b/spec/support/shared_examples/banzai/filters/filter_timeout_shared_examples.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +# This shared_example requires the following variables: +# - text: The text to be run through the filter +# +# Usage: +# +# it_behaves_like 'filter timeout' do +# let(:text) { 'some text' } +# end +RSpec.shared_examples 'filter timeout' do + context 'when rendering takes too long' do + let_it_be(:project) { create(:project) } + let_it_be(:context) { { project: project } } + + it 'times out' do + stub_const("Banzai::Filter::TimeoutHtmlPipelineFilter::RENDER_TIMEOUT", 0.1) + allow_next_instance_of(described_class) do |instance| + allow(instance).to receive(:call_with_timeout) do + sleep(0.2) + text + end + end + + expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original + expect(Gitlab::ErrorTracking).to receive(:track_exception).with( + instance_of(Timeout::Error), + project_id: context[:project].id, + class_name: described_class.name.demodulize + ) + + result = filter(text) + + expect(result.to_html).to eq text + end + end +end diff --git a/spec/support/shared_examples/banzai/filters/reference_filter_shared_examples.rb b/spec/support/shared_examples/banzai/filters/reference_filter_shared_examples.rb new file mode 100644 index 00000000000..6912bcaee34 --- /dev/null +++ b/spec/support/shared_examples/banzai/filters/reference_filter_shared_examples.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +# Specs for reference links containing HTML. +# +# Requires a reference: +# let(:reference) { '#42' } +RSpec.shared_examples 'a reference containing an element node' do + let(:inner_html) { 'element node inside' } + let(:reference_with_element) { %(#{inner_html}) } + + it 'does not escape inner html' do + doc = reference_filter(reference_with_element) + expect(doc.children.first.inner_html).to eq(inner_html) + end +end + +# Requires a reference, subject and subject_name: +# subject { create(:user) } +# let(:reference) { subject.to_reference } +# let(:subject_name) { 'user' } +RSpec.shared_examples 'user reference or project reference' do + shared_examples 'it contains a data- attribute' do + it 'includes a data- attribute' do + doc = reference_filter("Hey #{reference}") + link = doc.css('a').first + + expect(link).to have_attribute("data-#{subject_name}") + expect(link.attr("data-#{subject_name}")).to eq subject.id.to_s + end + end + + context 'when mentioning a resource' do + it_behaves_like 'a reference containing an element node' + it_behaves_like 'it contains a data- attribute' + + it "links to a resource" do + doc = reference_filter("Hey #{reference}") + expect(doc.css('a').first.attr('href')).to eq urls.send("#{subject_name}_url", subject) + end + + it 'links to a resource with a period' do + subject = create(subject_name.to_sym, name: 'alphA.Beta') + + doc = reference_filter("Hey #{get_reference(subject)}") + expect(doc.css('a').length).to eq 1 + end + + it 'links to a resource with an underscore' do + subject = create(subject_name.to_sym, name: 'ping_pong_king') + + doc = reference_filter("Hey #{get_reference(subject)}") + expect(doc.css('a').length).to eq 1 + end + + it 'links to a resource with different case-sensitivity' do + subject = create(subject_name.to_sym, name: 'RescueRanger') + reference = get_reference(subject) + + doc = reference_filter("Hey #{reference.upcase}") + expect(doc.css('a').length).to eq 1 + expect(doc.css('a').text).to eq(reference) + end + end + + it 'supports an :only_path context' do + doc = reference_filter("Hey #{reference}", only_path: true) + link = doc.css('a').first.attr('href') + + expect(link).not_to match %r{https?://} + expect(link).to eq urls.send "#{subject_name}_path", subject + end + + describe 'referencing a resource in a link href' do + let(:reference) { %(Some text) } + + it_behaves_like 'it contains a data- attribute' + + it 'links to the resource' do + doc = reference_filter("Hey #{reference}") + expect(doc.css('a').first.attr('href')).to eq urls.send "#{subject_name}_url", subject + end + + it 'links with adjacent text' do + doc = reference_filter("Mention me (#{reference}.)") + expect(doc.to_html).to match(%r{\(Some text\.\)}) + end + end +end diff --git a/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb b/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb index 40e9726f89c..02eae250e6a 100644 --- a/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb +++ b/spec/support/shared_examples/bulk_imports/visibility_level_examples.rb @@ -27,14 +27,6 @@ RSpec.shared_examples 'visibility level settings' do expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) end end - - context 'when destination is blank' do - let(:destination_namespace) { '' } - - it 'sets visibility level to public' do - expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PUBLIC) - end - end end context 'when internal' do @@ -63,27 +55,6 @@ RSpec.shared_examples 'visibility level settings' do expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) end end - - context 'when destination is blank' do - let(:destination_namespace) { '' } - - it 'sets visibility level to internal' do - expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::INTERNAL) - end - - context 'when visibility level is restricted' do - it 'sets visibility level to private' do - stub_application_setting( - restricted_visibility_levels: [ - Gitlab::VisibilityLevel::INTERNAL, - Gitlab::VisibilityLevel::PUBLIC - ] - ) - - expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) - end - end - end end context 'when private' do @@ -112,13 +83,5 @@ RSpec.shared_examples 'visibility level settings' do expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) end end - - context 'when destination is blank' do - let(:destination_namespace) { '' } - - it 'sets visibility level to private' do - expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel::PRIVATE) - end - end end end diff --git a/spec/support/shared_examples/controllers/project_import_rate_limiter_shared_examples.rb b/spec/support/shared_examples/controllers/project_import_rate_limiter_shared_examples.rb new file mode 100644 index 00000000000..66d753a4010 --- /dev/null +++ b/spec/support/shared_examples/controllers/project_import_rate_limiter_shared_examples.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'project import rate limiter' do + let(:user) { create(:user) } + + before do + sign_in(user) + end + + context 'when limit exceeds' do + before do + allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true) + end + + it 'notifies and redirects user' do + post :create, params: {} + + expect(flash[:alert]).to eq('This endpoint has been requested too many times. Try again later.') + expect(response).to have_gitlab_http_status(:found) + end + end +end diff --git a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb index 38c3157e898..b5528afa0b5 100644 --- a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb +++ b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb @@ -7,6 +7,9 @@ RSpec.shared_examples 'tracking unique hll events' do it 'tracks unique event' do + # Allow any event tracking before we expect the specific event we want to check below + allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).and_call_original + expect(Gitlab::UsageDataCounters::HLLRedisCounter).to( receive(:track_event) .with(target_event, values: expected_value) diff --git a/spec/support/shared_examples/features/2fa_shared_examples.rb b/spec/support/shared_examples/features/2fa_shared_examples.rb index b6339607d6b..6c4e98c9989 100644 --- a/spec/support/shared_examples/features/2fa_shared_examples.rb +++ b/spec/support/shared_examples/features/2fa_shared_examples.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'hardware device for 2fa' do |device_type| - include Spec::Support::Helpers::Features::TwoFactorHelpers + include Features::TwoFactorHelpers include Spec::Support::Helpers::ModalHelpers def register_device(device_type, **kwargs) @@ -96,9 +96,7 @@ RSpec.shared_examples 'hardware device for 2fa' do |device_type| end it 'provides a button that shows the fallback otp code UI' do - expect(page).to have_link('Sign in via 2FA code') - - click_link('Sign in via 2FA code') + click_button(_('Sign in via 2FA code')) assert_fallback_ui(page) end diff --git a/spec/support/shared_examples/features/abuse_report_shared_examples.rb b/spec/support/shared_examples/features/abuse_report_shared_examples.rb index 7a520fb0cd2..ea9b4e9f4b2 100644 --- a/spec/support/shared_examples/features/abuse_report_shared_examples.rb +++ b/spec/support/shared_examples/features/abuse_report_shared_examples.rb @@ -2,10 +2,14 @@ RSpec.shared_examples 'reports the user with an abuse category' do it 'creates abuse report' do - click_button 'Report abuse to administrator' + click_button 'Report abuse' choose "They're posting spam." click_button 'Next' + page.attach_file('spec/fixtures/dk.png') do + click_button "Choose file" + end + fill_in 'abuse_report_message', with: 'This user sends spam' click_button 'Send report' diff --git a/spec/support/shared_examples/features/access_tokens_shared_examples.rb b/spec/support/shared_examples/features/access_tokens_shared_examples.rb index 32a7b32ac72..3c78869ffaa 100644 --- a/spec/support/shared_examples/features/access_tokens_shared_examples.rb +++ b/spec/support/shared_examples/features/access_tokens_shared_examples.rb @@ -9,7 +9,7 @@ RSpec.shared_examples 'resource access tokens missing access rights' do end RSpec.shared_examples 'resource access tokens creation' do |resource_type| - include Spec::Support::Helpers::AccessTokenHelpers + include Features::AccessTokenHelpers it 'allows creation of an access token', :aggregate_failures do name = 'My access token' diff --git a/spec/support/shared_examples/features/confidential_notes_shared_examples.rb b/spec/support/shared_examples/features/confidential_notes_shared_examples.rb index 289da025af6..cd0e8f94934 100644 --- a/spec/support/shared_examples/features/confidential_notes_shared_examples.rb +++ b/spec/support/shared_examples/features/confidential_notes_shared_examples.rb @@ -3,7 +3,7 @@ require "spec_helper" RSpec.shared_examples 'confidential notes on issuables' do - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers context 'when user does not have permissions' do it 'does not show confidential note checkbox' do diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb index 7582e67efbd..55104157f20 100644 --- a/spec/support/shared_examples/features/content_editor_shared_examples.rb +++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb @@ -1,5 +1,7 @@ # frozen_string_literal: true +require 'spec_helper' + RSpec.shared_examples 'edits content using the content editor' do include ContentEditorHelpers @@ -14,7 +16,11 @@ RSpec.shared_examples 'edits content using the content editor' do wait_until_hidden_field_is_updated /Typing text in the content editor/ - refresh + begin + refresh + rescue Selenium::WebDriver::Error::UnexpectedAlertOpenError + page.driver.browser.switch_to.alert.dismiss + end expect(page).to have_text('Typing text in the content editor') end @@ -40,11 +46,233 @@ RSpec.shared_examples 'edits content using the content editor' do end end + describe 'creating and editing links' do + before do + switch_to_content_editor + end + + context 'when clicking the link icon in the toolbar' do + it 'shows the link bubble menu' do + page.find('[data-testid="formatting-toolbar"] [data-testid="link"]').click + + expect(page).to have_css('[data-testid="link-bubble-menu"]') + end + + context 'if no text is selected' do + before do + page.find('[data-testid="formatting-toolbar"] [data-testid="link"]').click + end + + it 'opens an empty inline modal to create a link' do + page.within '[data-testid="link-bubble-menu"]' do + expect(page).to have_field('link-text', with: '') + expect(page).to have_field('link-href', with: '') + end + end + + context 'when the user clicks the apply button' do + it 'applies the changes to the document' do + page.within '[data-testid="link-bubble-menu"]' do + fill_in 'link-text', with: 'Link to GitLab home page' + fill_in 'link-href', with: 'https://gitlab.com' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_css('a[href="https://gitlab.com"]') + expect(page).to have_text('Link to GitLab home page') + end + end + end + + context 'when the user clicks the cancel button' do + it 'does not apply the changes to the document' do + page.within '[data-testid="link-bubble-menu"]' do + fill_in 'link-text', with: 'Link to GitLab home page' + fill_in 'link-href', with: 'https://gitlab.com' + + click_button 'Cancel' + end + + page.within content_editor_testid do + expect(page).not_to have_css('a') + end + end + end + end + + context 'if text is selected' do + before do + type_in_content_editor 'The quick brown fox jumps over the lazy dog' + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + + page.find('[data-testid="formatting-toolbar"] [data-testid="link"]').click + end + + it 'prefills inline modal to create a link' do + page.within '[data-testid="link-bubble-menu"]' do + expect(page).to have_field('link-text', with: 'dog') + expect(page).to have_field('link-href', with: '') + end + end + + context 'when the user clicks the apply button' do + it 'applies the changes to the document' do + page.within '[data-testid="link-bubble-menu"]' do + fill_in 'link-text', with: 'new dog' + fill_in 'link-href', with: 'https://en.wikipedia.org/wiki/Shiba_Inu' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://en.wikipedia.org/wiki/Shiba_Inu"]', + text: 'new dog' + ) + end + end + end + end + end + + context 'if cursor is placed on an existing link' do + before do + type_in_content_editor 'Link to [GitLab home **page**](https://gitlab.com)' + type_in_content_editor :left + end + + it 'prefills inline modal to edit the link' do + page.within '[data-testid="link-bubble-menu"]' do + page.find('[data-testid="edit-link"]').click + + expect(page).to have_field('link-text', with: 'GitLab home page') + expect(page).to have_field('link-href', with: 'https://gitlab.com') + end + end + + it 'updates the link attributes if text is not updated' do + page.within '[data-testid="link-bubble-menu"]' do + page.find('[data-testid="edit-link"]').click + + fill_in 'link-href', with: 'https://about.gitlab.com' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://about.gitlab.com"]') + expect(page.find('a')).to have_text('GitLab home page') + expect(page).to have_selector('strong', text: 'page') + end + end + + it 'updates the link attributes and text if text is updated' do + page.within '[data-testid="link-bubble-menu"]' do + page.find('[data-testid="edit-link"]').click + + fill_in 'link-text', with: 'GitLab about page' + fill_in 'link-href', with: 'https://about.gitlab.com' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://about.gitlab.com"]', + text: 'GitLab about page' + ) + expect(page).not_to have_selector('strong') + end + end + + it 'does nothing if Cancel is clicked' do + page.within '[data-testid="link-bubble-menu"]' do + page.find('[data-testid="edit-link"]').click + + click_button 'Cancel' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://gitlab.com"]', + text: 'GitLab home page' + ) + expect(page).to have_selector('strong') + end + end + + context 'when the user clicks the unlink button' do + it 'removes the link' do + page.within '[data-testid="link-bubble-menu"]' do + page.find('[data-testid="remove-link"]').click + end + + page.within content_editor_testid do + expect(page).not_to have_selector('a') + expect(page).to have_selector('strong', text: 'page') + end + end + end + end + + context 'when selection spans more than a link' do + before do + type_in_content_editor 'a [b **c**](https://gitlab.com)' + + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + type_in_content_editor [:shift, :left] + + page.find('[data-testid="formatting-toolbar"] [data-testid="link"]').click + end + + it 'prefills inline modal with the entire selection' do + page.within '[data-testid="link-bubble-menu"]' do + expect(page).to have_field('link-text', with: 'a b c') + expect(page).to have_field('link-href', with: '') + end + end + + it 'expands the link and updates the link attributes if text is not updated' do + page.within '[data-testid="link-bubble-menu"]' do + fill_in 'link-href', with: 'https://about.gitlab.com' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://about.gitlab.com"]') + expect(page.find('a')).to have_text('a b c') + expect(page).to have_selector('strong', text: 'c') + end + end + + it 'expands the link, updates the link attributes and text if text is updated' do + page.within '[data-testid="link-bubble-menu"]' do + fill_in 'link-text', with: 'new text' + fill_in 'link-href', with: 'https://about.gitlab.com' + + click_button 'Apply' + end + + page.within content_editor_testid do + expect(page).to have_selector('a[href="https://about.gitlab.com"]', + text: 'new text' + ) + expect(page).not_to have_selector('strong') + end + end + end + end + describe 'media elements bubble menu' do before do switch_to_content_editor - open_insert_media_dropdown + click_attachment_button end it 'displays correct media bubble menu for images', :js do @@ -151,7 +379,8 @@ RSpec.shared_examples 'edits content using the content editor' do end end - it 'toggles the diagram when preview button is clicked' do + it 'toggles the diagram when preview button is clicked', + quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/397682' do find('[data-testid="preview-diagram"]').click expect(find(content_editor_testid)).not_to have_selector('iframe') @@ -167,6 +396,62 @@ RSpec.shared_examples 'edits content using the content editor' do end end + describe 'pasting text' do + let(:is_mac) { page.evaluate_script('navigator.platform').include?('Mac') } + let(:modifier_key) { is_mac ? :command : :control } + + before do + switch_to_content_editor + + type_in_content_editor "Some **rich** _text_ ~~content~~ [link](https://gitlab.com)" + + type_in_content_editor [modifier_key, 'a'] + type_in_content_editor [modifier_key, 'x'] + end + + it 'pastes text with formatting if ctrl + v is pressed' do + type_in_content_editor [modifier_key, 'v'] + + page.within content_editor_testid do + expect(page).to have_selector('strong', text: 'rich') + expect(page).to have_selector('em', text: 'text') + expect(page).to have_selector('s', text: 'content') + expect(page).to have_selector('a[href="https://gitlab.com"]', text: 'link') + end + end + + it 'pastes raw text without formatting if shift + ctrl + v is pressed' do + type_in_content_editor [modifier_key, :shift, 'v'] + + page.within content_editor_testid do + expect(page).to have_text('Some rich text content link') + + expect(page).not_to have_selector('strong') + expect(page).not_to have_selector('em') + expect(page).not_to have_selector('s') + expect(page).not_to have_selector('a') + end + end + + it 'pastes raw text without formatting, stripping whitespaces, if shift + ctrl + v is pressed' do + type_in_content_editor " Some **rich**" + type_in_content_editor :enter + type_in_content_editor " _text_" + type_in_content_editor :enter + type_in_content_editor " ~~content~~" + type_in_content_editor :enter + type_in_content_editor " [link](https://gitlab.com)" + + type_in_content_editor [modifier_key, 'a'] + type_in_content_editor [modifier_key, 'x'] + type_in_content_editor [modifier_key, :shift, 'v'] + + page.within content_editor_testid do + expect(page).to have_text('Some rich text content link') + end + end + end + describe 'autocomplete suggestions' do let(:suggestions_dropdown) { '[data-testid="content-editor-suggestions-dropdown"]' } @@ -289,7 +574,7 @@ RSpec.shared_examples 'inserts diagrams.net diagram using the content editor' do before do switch_to_content_editor - open_insert_media_dropdown + click_attachment_button end it 'displays correct media bubble menu with edit diagram button' do diff --git a/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb b/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb index efbd735c451..9b5d9d66890 100644 --- a/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb +++ b/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -RSpec.shared_examples "a dashboard page with sidebar" do |page_path, menu_label| +RSpec.shared_examples 'a "Your work" page with sidebar and breadcrumbs' do |page_path, menu_label| before do sign_in(user) visit send(page_path) @@ -18,4 +18,13 @@ RSpec.shared_examples "a dashboard page with sidebar" do |page_path, menu_label| expect(page).to have_css(active_menu_item_css) end end + + describe "breadcrumbs" do + it 'has "Your work" as its root breadcrumb' do + breadcrumbs = page.find('[data-testid="breadcrumb-links"]') + within breadcrumbs do + expect(page).to have_css("li:first-child a[href=\"#{root_path}\"]", text: "Your work") + end + end + end end diff --git a/spec/support/shared_examples/features/deploy_token_shared_examples.rb b/spec/support/shared_examples/features/deploy_token_shared_examples.rb index 9fe08e5c996..80f5f1d805c 100644 --- a/spec/support/shared_examples/features/deploy_token_shared_examples.rb +++ b/spec/support/shared_examples/features/deploy_token_shared_examples.rb @@ -17,9 +17,11 @@ RSpec.shared_examples 'a deploy token in settings' do it 'add a new deploy token', :js do visit page_path - fill_in _('Name'), with: 'new_deploy_key' - fill_in _('Expiration date (optional)'), with: (Date.today + 1.month).to_s - fill_in _('Username (optional)'), with: 'deployer' + within('#js-deploy-tokens') do + fill_in _('Name'), with: 'new_deploy_key' + fill_in _('Expiration date (optional)'), with: (Date.today + 1.month).to_s + fill_in _('Username (optional)'), with: 'deployer' + end check 'read_repository' check 'read_registry' click_button 'Create deploy token' diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb index ea6d1655694..d2dfb468485 100644 --- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb +++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb @@ -77,7 +77,7 @@ RSpec.shared_examples 'an editable merge request' do expect(page).to have_selector('.js-quick-submit') end - it 'warns about version conflict' do + it 'warns about version conflict', :js do merge_request.update!(title: "New title") fill_in 'merge_request_title', with: 'bug 345' diff --git a/spec/support/shared_examples/features/explore/sidebar_shared_examples.rb b/spec/support/shared_examples/features/explore/sidebar_shared_examples.rb new file mode 100644 index 00000000000..1754c8bf53d --- /dev/null +++ b/spec/support/shared_examples/features/explore/sidebar_shared_examples.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'an "Explore" page with sidebar and breadcrumbs' do |page_path, menu_label| + before do + visit send(page_path) + end + + let(:sidebar_css) { 'aside.nav-sidebar[aria-label="Explore"]' } + let(:active_menu_item_css) { "li.active[data-track-label=\"#{menu_label}_menu\"]" } + + it 'shows the "Explore" sidebar' do + expect(page).to have_css(sidebar_css) + end + + it 'shows the correct sidebar menu item as active' do + within(sidebar_css) do + expect(page).to have_css(active_menu_item_css) + end + end + + describe 'breadcrumbs' do + it 'has "Explore" as its root breadcrumb' do + within '.breadcrumbs-list' do + expect(page).to have_css("li:first a[href=\"#{explore_root_path}\"]", text: 'Explore') + end + end + end +end diff --git a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb index 13adcfe9191..b8c6b85adb2 100644 --- a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb +++ b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'issuable invite members' do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers context 'when a privileged user can invite' do before do diff --git a/spec/support/shared_examples/features/manage_applications_shared_examples.rb b/spec/support/shared_examples/features/manage_applications_shared_examples.rb index 63ba5832771..b8fd58e7efa 100644 --- a/spec/support/shared_examples/features/manage_applications_shared_examples.rb +++ b/spec/support/shared_examples/features/manage_applications_shared_examples.rb @@ -19,7 +19,7 @@ RSpec.shared_examples 'manage applications' do expect(page).to have_content _('This is the only time the secret is accessible. Copy the secret and store it securely') expect(page).to have_link('Continue', href: index_path) - expect(page).to have_css("button[title=\"Copy secret\"]", text: 'Copy') + expect(page).to have_button(_('Copy secret')) click_on 'Edit' diff --git a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb index c2dc87b0fb0..6487e6a94c1 100644 --- a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb +++ b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'Maintainer manages access requests' do - include Spec::Support::Helpers::Features::MembersHelpers + include Features::MembersHelpers let(:user) { create(:user) } let(:maintainer) { create(:user) } diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb index f09cf0613a1..e51ea0e2ad6 100644 --- a/spec/support/shared_examples/features/packages_shared_examples.rb +++ b/spec/support/shared_examples/features/packages_shared_examples.rb @@ -30,6 +30,45 @@ RSpec.shared_examples 'package details link' do |property| expect(page).to have_content('Installation') expect(page).to have_content('Registry setup') + expect(page).to have_content('Other versions 0') + end + + context 'with other versions' do + let_it_be(:npm_package1) { create(:npm_package, project: project, name: 'zzz', version: '1.1.0') } + let_it_be(:npm_package2) { create(:npm_package, project: project, name: 'zzz', version: '1.2.0') } + + before do + page.within(packages_table_selector) do + first(:link, package.name).click + end + end + + it 'shows tab with count' do + expect(page).to have_content('Other versions 2') + end + + it 'visiting tab shows total on page' do + click_link 'Other versions' + + expect(page).to have_content('2 versions') + end + + it 'deleting version updates count' do + click_link 'Other versions' + + find('[data-testid="delete-dropdown"]', match: :first).click + find('[data-testid="action-delete"]', match: :first).click + click_button('Permanently delete') + + expect(page).to have_content 'Package deleted successfully' + + expect(page).to have_content('Other versions 1') + expect(page).to have_content('1 version') + + expect(page).not_to have_content('1.0.0') + expect(page).to have_content('1.1.0') + expect(page).to have_content('1.2.0') + end end end diff --git a/spec/support/shared_examples/features/reportable_note_shared_examples.rb b/spec/support/shared_examples/features/reportable_note_shared_examples.rb index bb3fab5b23e..45ad4d5cf71 100644 --- a/spec/support/shared_examples/features/reportable_note_shared_examples.rb +++ b/spec/support/shared_examples/features/reportable_note_shared_examples.rb @@ -20,7 +20,7 @@ RSpec.shared_examples 'reportable note' do |type| dropdown = comment.find(more_actions_selector) open_dropdown(dropdown) - expect(dropdown).to have_button('Report abuse to administrator') + expect(dropdown).to have_button('Report abuse') if type == 'issue' || type == 'merge_request' expect(dropdown).to have_button('Delete comment') @@ -33,7 +33,7 @@ RSpec.shared_examples 'reportable note' do |type| dropdown = comment.find(more_actions_selector) open_dropdown(dropdown) - dropdown.click_button('Report abuse to administrator') + dropdown.click_button('Report abuse') choose "They're posting spam." click_button "Next" diff --git a/spec/support/shared_examples/features/rss_shared_examples.rb b/spec/support/shared_examples/features/rss_shared_examples.rb index ad865b084e1..29ecbd0dc0e 100644 --- a/spec/support/shared_examples/features/rss_shared_examples.rb +++ b/spec/support/shared_examples/features/rss_shared_examples.rb @@ -13,6 +13,12 @@ RSpec.shared_examples "it has an RSS button with current_user's feed token" do end end +RSpec.shared_examples "it has an RSS link with current_user's feed token" do + it "shows the RSS link with current_user's feed token" do + expect(page).to have_link 'Subscribe to RSS feed', href: /feed_token=#{user.feed_token}/ + end +end + RSpec.shared_examples "an autodiscoverable RSS feed without a feed token" do it "has an RSS autodiscovery link tag without a feed token" do expect(page).to have_css("link[type*='atom+xml']:not([href*='feed_token'])", visible: false) @@ -26,6 +32,13 @@ RSpec.shared_examples "it has an RSS button without a feed token" do end end +RSpec.shared_examples "it has an RSS link without a feed token" do + it "shows the RSS link without a feed token" do + expect(page).to have_link 'Subscribe to RSS feed' + expect(page).not_to have_link 'Subscribe to RSS feed', href: /feed_token/ + end +end + RSpec.shared_examples "updates atom feed link" do |type| it "for #{type}" do sign_in(user) diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb index 63a0832117d..7edf306183e 100644 --- a/spec/support/shared_examples/features/runners_shared_examples.rb +++ b/spec/support/shared_examples/features/runners_shared_examples.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'shows and resets runner registration token' do - include Spec::Support::Helpers::Features::RunnersHelpers + include Features::RunnersHelpers include Spec::Support::Helpers::ModalHelpers before do @@ -63,16 +63,15 @@ RSpec.shared_examples 'shows and resets runner registration token' do end RSpec.shared_examples 'shows no runners registered' do - it 'shows total count with 0' do + it 'shows 0 count and the empty state' do expect(find('[data-testid="runner-type-tabs"]')).to have_text "#{s_('Runners|All')} 0" # No stats are shown expect(page).not_to have_text s_('Runners|Online') expect(page).not_to have_text s_('Runners|Offline') expect(page).not_to have_text s_('Runners|Stale') - end - it 'shows "no runners" message' do + # "no runners" message expect(page).to have_text s_('Runners|Get started with runners') end end @@ -84,16 +83,14 @@ RSpec.shared_examples 'shows no runners found' do end RSpec.shared_examples 'shows runner in list' do - it 'does not show empty state' do - expect(page).not_to have_content s_('Runners|Get started with runners') - end - - it 'shows runner row' do + it 'shows runner row and no empty state' do within_runner_row(runner.id) do expect(page).to have_text "##{runner.id}" expect(page).to have_text runner.short_sha expect(page).to have_text runner.description end + + expect(page).not_to have_content s_('Runners|Get started with runners') end end @@ -229,3 +226,33 @@ RSpec.shared_examples 'submits edit runner form' do end end end + +RSpec.shared_examples 'creates runner and shows register page' do + context 'when runner is saved' do + before do + fill_in s_('Runners|Runner description'), with: 'runner-foo' + fill_in s_('Runners|Tags'), with: 'tag1' + click_on _('Submit') + wait_for_requests + end + + it 'navigates to registration page and opens install instructions drawer' do + expect(page.find('[data-testid="alert-success"]')).to have_content(s_('Runners|Runner created.')) + expect(current_url).to match(register_path_pattern) + + click_on 'How do I install GitLab Runner?' + expect(page.find('[data-testid="runner-platforms-drawer"]')).to have_content('gitlab-runner install') + end + + it 'warns from leaving page without finishing registration' do + click_on s_('Runners|Go to runners page') + + alert = page.driver.browser.switch_to.alert + + expect(alert).not_to be_nil + alert.dismiss + + expect(current_url).to match(register_path_pattern) + end + end +end diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb index b3378c76658..21c7e2b6c75 100644 --- a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb +++ b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb @@ -108,6 +108,20 @@ RSpec.shared_examples 'User views wiki sidebar' do end end + it 'shows create child page button when hover to the page title in the sidebar', :js do + visit wiki_path(wiki) + + within('.right-sidebar') do + first_wiki_list = first("[data-testid='wiki-list']") + wiki_link = first("[data-testid='wiki-list'] a:last-of-type")['href'] + + first_wiki_list.hover + wiki_new_page_link = first("[data-testid='wiki-list'] a")['href'] + + expect(wiki_new_page_link).to eq "#{wiki_link}/%7Bnew_page_title%7D" + end + end + context 'when there are more than 15 existing pages' do before do create(:wiki_page, wiki: wiki, title: 'my page 16') diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb index 0b8bfc4d2a2..6d4d0a5dd0a 100644 --- a/spec/support/shared_examples/features/work_items_shared_examples.rb +++ b/spec/support/shared_examples/features/work_items_shared_examples.rb @@ -30,18 +30,110 @@ RSpec.shared_examples 'work items status' do end end -RSpec.shared_examples 'work items comments' do +RSpec.shared_examples 'work items comments' do |type| let(:form_selector) { '[data-testid="work-item-add-comment"]' } + let(:textarea_selector) { '[data-testid="work-item-add-comment"] #work-item-add-or-edit-comment' } + let(:is_mac) { page.evaluate_script('navigator.platform').include?('Mac') } + let(:modifier_key) { is_mac ? :command : :control } + let(:comment) { 'Test comment' } + + def set_comment + find(form_selector).fill_in(with: comment) + end it 'successfully creates and shows comments' do - click_button 'Add a reply' + set_comment - find(form_selector).fill_in(with: "Test comment") click_button "Comment" wait_for_requests - expect(page).to have_content "Test comment" + page.within(".main-notes-list") do + expect(page).to have_content comment + end + end + + context 'for work item note actions signed in user with developer role' do + it 'shows work item note actions' do + set_comment + + click_button "Comment" + + wait_for_requests + + page.within(".main-notes-list") do + expect(page).to have_selector('[data-testid="work-item-note-actions"]') + + find('[data-testid="work-item-note-actions"]', match: :first).click + + expect(page).to have_selector('[data-testid="copy-link-action"]') + expect(page).not_to have_selector('[data-testid="assign-note-action"]') + end + end + end + + it 'successfully posts comments using shortcut and checks if textarea is blank when reinitiated' do + set_comment + + send_keys([modifier_key, :enter]) + + wait_for_requests + + page.within(".main-notes-list") do + expect(page).to have_content comment + end + + expect(find(textarea_selector)).to have_content "" + end + + context 'when using quick actions' do + it 'autocompletes quick actions common to all work item types', :aggregate_failures do + click_reply_and_enter_slash + + page.within('#at-view-commands') do + expect(page).to have_text("/title") + expect(page).to have_text("/shrug") + expect(page).to have_text("/tableflip") + expect(page).to have_text("/close") + expect(page).to have_text("/cc") + end + end + + context 'when a widget is enabled' do + before do + WorkItems::Type.default_by_type(type).widget_definitions + .find_by_widget_type(:assignees).update!(disabled: false) + end + + it 'autocompletes quick action for the enabled widget' do + click_reply_and_enter_slash + + page.within('#at-view-commands') do + expect(page).to have_text("/assign") + end + end + end + + context 'when a widget is disabled' do + before do + WorkItems::Type.default_by_type(type).widget_definitions + .find_by_widget_type(:assignees).update!(disabled: true) + end + + it 'does not autocomplete quick action for the disabled widget' do + click_reply_and_enter_slash + + page.within('#at-view-commands') do + expect(page).not_to have_text("/assign") + end + end + end + + def click_reply_and_enter_slash + find(form_selector).fill_in(with: "/") + + wait_for_all_requests + end end end @@ -98,7 +190,7 @@ RSpec.shared_examples 'work items description' do wait_for_requests - page.within('.atwho-container') do + page.within('#at-view-commands') do expect(page).to have_text("title") expect(page).to have_text("shrug") expect(page).to have_text("tableflip") @@ -140,7 +232,7 @@ RSpec.shared_examples 'work items description' do end RSpec.shared_examples 'work items invite members' do - include Spec::Support::Helpers::Features::InviteMembersModalHelper + include Features::InviteMembersModalHelpers it 'successfully assigns the current user by searching' do # The button is only when the mouse is over the input @@ -178,3 +270,40 @@ RSpec.shared_examples 'work items milestone' do expect(page.find(milestone_dropdown_selector)).to have_text('Add to milestone') end end + +RSpec.shared_examples 'work items comment actions for guest users' do + context 'for guest user' do + it 'hides other actions other than copy link' do + page.within(".main-notes-list") do + expect(page).to have_selector('[data-testid="work-item-note-actions"]') + + find('[data-testid="work-item-note-actions"]', match: :first).click + + expect(page).to have_selector('[data-testid="copy-link-action"]') + expect(page).not_to have_selector('[data-testid="assign-note-action"]') + end + end + end +end + +RSpec.shared_examples 'work items notifications' do + let(:actions_dropdown_selector) { '[data-testid="work-item-actions-dropdown"]' } + let(:notifications_toggle_selector) { '[data-testid="notifications-toggle-action"] > button' } + + it 'displays toast when notification is toggled' do + find(actions_dropdown_selector).click + + page.within('[data-testid="notifications-toggle-form"]') do + expect(page).not_to have_css(".is-checked") + + find(notifications_toggle_selector).click + wait_for_requests + + expect(page).to have_css(".is-checked") + end + + page.within('.gl-toast') do + expect(find('.toast-body')).to have_content(_('Notifications turned on.')) + end + end +end diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb index 93f9e42241b..c68d53db01e 100644 --- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb +++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb @@ -161,10 +161,12 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context let_it_be(:another_release) { create(:release, project: project1, tag: 'v2.0.0') } let_it_be(:another_milestone) { create(:milestone, project: project1, releases: [another_release]) } let_it_be(:another_item) do - create(factory, - project: project1, - milestone: another_milestone, - title: 'another item') + create( + factory, + project: project1, + milestone: another_milestone, + title: 'another item' + ) end let(:params) { { not: { release_tag: release.tag, project_id: project1.id } } } @@ -421,8 +423,11 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context let!(:created_items) do milestones.map do |milestone| - create(factory, project: milestone.project || project_in_group, - milestone: milestone, author: user, assignees: [user]) + create( + factory, + project: milestone.project || project_in_group, + milestone: milestone, author: user, assignees: [user] + ) end end @@ -593,7 +598,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context end context 'filtering by no label' do - let(:params) { { label_name: described_class::Params::FILTER_NONE } } + let(:params) { { label_name: IssuableFinder::Params::FILTER_NONE } } it 'returns items with no labels' do expect(items).to contain_exactly(item1, item4, item5) @@ -601,7 +606,7 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context end context 'filtering by any label' do - let(:params) { { label_name: described_class::Params::FILTER_ANY } } + let(:params) { { label_name: IssuableFinder::Params::FILTER_ANY } } it 'returns items that have one or more label' do create_list(:label_link, 2, label: create(:label, project: project2), target: item3) @@ -909,9 +914,9 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context end context 'filtering by item type' do - let_it_be(:incident_item) { create(factory, issue_type: :incident, project: project1) } - let_it_be(:objective) { create(factory, issue_type: :objective, project: project1) } - let_it_be(:key_result) { create(factory, issue_type: :key_result, project: project1) } + let_it_be(:incident_item) { create(factory, :incident, project: project1) } + let_it_be(:objective) { create(factory, :objective, project: project1) } + let_it_be(:key_result) { create(factory, :key_result, project: project1) } context 'no type given' do let(:params) { { issue_types: [] } } diff --git a/spec/support/shared_examples/graphql/resolvers/data_transfer_resolver_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/data_transfer_resolver_shared_examples.rb new file mode 100644 index 00000000000..8551bd052ce --- /dev/null +++ b/spec/support/shared_examples/graphql/resolvers/data_transfer_resolver_shared_examples.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'Data transfer resolver' do + it 'returns mock data' do |_query_object| + mocked_data = ['mocked_data'] + + allow_next_instance_of(DataTransfer::MockedTransferFinder) do |instance| + allow(instance).to receive(:execute).and_return(mocked_data) + end + + expect(resolve_egress[:egress_nodes]).to eq(mocked_data) + end + + context 'when data_transfer_monitoring is disabled' do + before do + stub_feature_flags(data_transfer_monitoring: false) + end + + it 'returns empty result' do + expect(resolve_egress).to eq(egress_nodes: []) + end + end +end diff --git a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb index 4dc2ce61c4d..b346f35bdc9 100644 --- a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb +++ b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb @@ -65,7 +65,7 @@ RSpec.shared_examples 'Gitlab-style deprecations' do deprecable = subject(deprecated: { milestone: '1.10', reason: :alpha }) expect(deprecable.deprecation_reason).to eq( - 'This feature is in Alpha. It can be changed or removed at any time. Introduced in 1.10.' + 'This feature is an Experiment. It can be changed or removed at any time. Introduced in 1.10.' ) end @@ -73,7 +73,7 @@ RSpec.shared_examples 'Gitlab-style deprecations' do deprecable = subject(alpha: { milestone: '1.10' }) expect(deprecable.deprecation_reason).to eq( - 'This feature is in Alpha. It can be changed or removed at any time. Introduced in 1.10.' + 'This feature is an Experiment. It can be changed or removed at any time. Introduced in 1.10.' ) end @@ -82,7 +82,7 @@ RSpec.shared_examples 'Gitlab-style deprecations' do subject(alpha: { milestone: '1.10' }, deprecated: { milestone: '1.10', reason: 'my reason' } ) end.to raise_error( ArgumentError, - eq("`alpha` and `deprecated` arguments cannot be passed at the same time") + eq("`experiment` and `deprecated` arguments cannot be passed at the same time") ) end diff --git a/spec/support/shared_examples/lib/gitlab/cycle_analytics/deployment_metrics.rb b/spec/support/shared_examples/lib/gitlab/cycle_analytics/deployment_metrics.rb index d471a758f3e..c8d62205c1e 100644 --- a/spec/support/shared_examples/lib/gitlab/cycle_analytics/deployment_metrics.rb +++ b/spec/support/shared_examples/lib/gitlab/cycle_analytics/deployment_metrics.rb @@ -1,14 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'deployment metrics examples' do - def create_deployment(args) - project = args[:project] - environment = project.environments.production.first || create(:environment, :production, project: project) - create(:deployment, :success, args.merge(environment: environment)) - - # this is needed for the DORA API so we have aggregated data - ::Dora::DailyMetrics::RefreshWorker.new.perform(environment.id, Time.current.to_date.to_s) if Gitlab.ee? - end + include CycleAnalyticsHelpers describe "#deploys" do subject { stage_summary.third } diff --git a/spec/support/shared_examples/lib/gitlab/database/schema_objects_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/schema_objects_shared_examples.rb index d5ecab0cb6b..ec7a881f7ce 100644 --- a/spec/support/shared_examples/lib/gitlab/database/schema_objects_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/database/schema_objects_shared_examples.rb @@ -17,4 +17,10 @@ RSpec.shared_examples "schema objects assertions for" do |stmt_name| expect(schema_object.statement).to eq(statement) end end + + describe '#table_name' do + it 'returns schema object table_name' do + expect(schema_object.table_name).to eq(table_name) + end + end end diff --git a/spec/support/shared_examples/lib/gitlab/database/table_validators_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/table_validators_shared_examples.rb new file mode 100644 index 00000000000..96e58294675 --- /dev/null +++ b/spec/support/shared_examples/lib/gitlab/database/table_validators_shared_examples.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.shared_examples "table validators" do |validator, expected_result| + subject(:result) { validator.new(structure_file, database).execute } + + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:inconsistency_type) { validator.name.demodulize.underscore } + let(:database_model) { Gitlab::Database.database_base_models['main'] } + let(:connection) { database_model.connection } + let(:schema) { connection.current_schema } + let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) } + let(:structure_file) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path, schema) } + let(:database_tables) do + [ + { + 'table_name' => 'wrong_table', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'integer', + 'column_default' => "nextval('audit_events_id_seq'::regclass)" + }, + { + 'table_name' => 'wrong_table', + 'column_name' => 'description', + 'not_null' => true, + 'data_type' => 'character varying', + 'column_default' => nil + }, + { + 'table_name' => 'extra_table', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'integer', + 'column_default' => "nextval('audit_events_id_seq'::regclass)" + }, + { + 'table_name' => 'extra_table', + 'column_name' => 'email', + 'not_null' => true, + 'data_type' => 'character varying', + 'column_default' => nil + }, + { + 'table_name' => 'extra_table_columns', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'bigint', + 'column_default' => "nextval('audit_events_id_seq'::regclass)" + }, + { + 'table_name' => 'extra_table_columns', + 'column_name' => 'name', + 'not_null' => true, + 'data_type' => 'character varying(255)', + 'column_default' => nil + }, + { + 'table_name' => 'extra_table_columns', + 'column_name' => 'extra_column', + 'not_null' => true, + 'data_type' => 'character varying(255)', + 'column_default' => nil + }, + { + 'table_name' => 'missing_table_columns', + 'column_name' => 'id', + 'not_null' => true, + 'data_type' => 'bigint', + 'column_default' => 'NOT NULL' + } + ] + end + + before do + allow(connection).to receive(:exec_query).and_return(database_tables) + end + + it 'returns table inconsistencies' do + expect(result.map(&:object_name)).to match_array(expected_result) + expect(result.map(&:type)).to all(eql inconsistency_type) + end +end diff --git a/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/strategy_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/strategy_shared_examples.rb index ff03051ed37..74570a4da5c 100644 --- a/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/strategy_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/strategy_shared_examples.rb @@ -5,7 +5,7 @@ RSpec.shared_examples 'deduplicating jobs when scheduling' do |strategy_name| instance_double(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, duplicate_key_ttl: Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob::DEFAULT_DUPLICATE_KEY_TTL) end - let(:expected_message) { "dropped #{strategy_name.to_s.humanize.downcase}" } + let(:humanized_strategy_name) { strategy_name.to_s.humanize.downcase } subject(:strategy) { Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies.for(strategy_name).new(fake_duplicate_job) } @@ -155,7 +155,7 @@ RSpec.shared_examples 'deduplicating jobs when scheduling' do |strategy_name| fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger) expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger) - expect(fake_logger).to receive(:deduplicated_log).with(a_hash_including({ 'jid' => 'new jid' }), expected_message, {}) + expect(fake_logger).to receive(:deduplicated_log).with(a_hash_including({ 'jid' => 'new jid' }), humanized_strategy_name, {}) strategy.schedule({ 'jid' => 'new jid' }) {} end @@ -165,7 +165,7 @@ RSpec.shared_examples 'deduplicating jobs when scheduling' do |strategy_name| expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger) allow(fake_duplicate_job).to receive(:options).and_return({ foo: :bar }) - expect(fake_logger).to receive(:deduplicated_log).with(a_hash_including({ 'jid' => 'new jid' }), expected_message, { foo: :bar }) + expect(fake_logger).to receive(:deduplicated_log).with(a_hash_including({ 'jid' => 'new jid' }), humanized_strategy_name, { foo: :bar }) strategy.schedule({ 'jid' => 'new jid' }) {} end diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb index 9873bab1caf..6f7d5784a5c 100644 --- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb +++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb @@ -37,8 +37,8 @@ RSpec.shared_examples 'daily tracked issuable snowplow and service ping events w .to_h end - let(:track_params) { { project: project } } - let(:event_params) { track_params.merge(label: event_label, property: event_property, namespace: project.namespace, context: [context]) } + let(:track_params) { original_params || { project: project } } + let(:event_params) { { project: project }.merge(label: event_label, property: event_property, namespace: project.namespace, context: [context]) } end end diff --git a/spec/support/shared_examples/lib/sidebars/admin/menus/admin_menus_shared_examples.rb b/spec/support/shared_examples/lib/sidebars/admin/menus/admin_menus_shared_examples.rb new file mode 100644 index 00000000000..a9fd66528bd --- /dev/null +++ b/spec/support/shared_examples/lib/sidebars/admin/menus/admin_menus_shared_examples.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'Admin menu' do |link:, title:, icon:| + let_it_be(:user) { build(:user, :admin) } + + before do + allow(user).to receive(:can_admin_all_resources?).and_return(true) + end + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it 'renders the correct link' do + expect(subject.link).to match link + end + + it 'renders the correct title' do + expect(subject.title).to eq title + end + + it 'renders the correct icon' do + expect(subject.sprite_icon).to be icon + end + + describe '#render?' do + context 'when user is admin' do + it 'renders' do + expect(subject.render?).to be true + end + end + + context 'when user is not admin' do + it 'does not render' do + expect(described_class.new(Sidebars::Context.new(current_user: build(:user), + container: nil)).render?).to be false + end + end + + context 'when user is not logged in' do + it 'does not render' do + expect(described_class.new(Sidebars::Context.new(current_user: nil, container: nil)).render?).to be false + end + end + end +end + +RSpec.shared_examples 'Admin menu without sub menus' do |active_routes:| + let_it_be(:user) { build(:user, :admin) } + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it 'does not contain any sub menu(s)' do + expect(subject.has_items?).to be false + end + + it 'defines correct active route' do + expect(subject.active_routes).to eq active_routes + end +end + +RSpec.shared_examples 'Admin menu with sub menus' do + let_it_be(:user) { build(:user, :admin) } + + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } + + subject { described_class.new(context) } + + it 'contains submemus' do + expect(subject.has_items?).to be true + end +end diff --git a/spec/support/shared_examples/mailers/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb index 2e182fb399d..4d039eccbf7 100644 --- a/spec/support/shared_examples/mailers/notify_shared_examples.rb +++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb @@ -294,3 +294,17 @@ RSpec.shared_examples 'does not render a manage notifications link' do end end end + +RSpec.shared_examples 'email with default notification reason' do + it do + is_expected.to have_body_text("You're receiving this email because of your account") + is_expected.to have_plain_text_content("You're receiving this email because of your account") + end +end + +RSpec.shared_examples 'email with link to issue' do + it do + is_expected.to have_body_text(%(view it on GitLab)) + is_expected.to have_plain_text_content("view it on GitLab: #{project_issue_url(project, issue)}") + end +end diff --git a/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb new file mode 100644 index 00000000000..cef9860fe25 --- /dev/null +++ b/spec/support/shared_examples/metrics_instrumentation_shared_examples.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'a correct instrumented metric value' do |params| + let(:time_frame) { params[:time_frame] } + let(:options) { params[:options] } + let(:metric) { described_class.new(time_frame: time_frame, options: options) } + + around do |example| + freeze_time { example.run } + end + + before do + if metric.respond_to?(:relation, true) && metric.send(:relation).respond_to?(:connection) + allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false) + end + end + + it 'has correct value' do + expect(metric.value).to eq(expected_value) + end +end + +RSpec.shared_examples 'a correct instrumented metric query' do |params| + let(:time_frame) { params[:time_frame] } + let(:options) { params[:options] } + let(:metric) { described_class.new(time_frame: time_frame, options: options) } + + around do |example| + freeze_time { example.run } + end + + before do + allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false) + end + + it 'has correct generate query' do + expect(metric.to_sql).to eq(expected_query) + end +end + +RSpec.shared_examples 'a correct instrumented metric value and query' do |params| + it_behaves_like 'a correct instrumented metric value', params + it_behaves_like 'a correct instrumented metric query', params +end diff --git a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb new file mode 100644 index 00000000000..28eac52256f --- /dev/null +++ b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'migration that adds widget to work items definitions' do |widget_name:| + let(:migration) { described_class.new } + let(:work_item_definitions) { table(:work_item_widget_definitions) } + + describe '#up' do + it "creates widget definition in all types" do + work_item_definitions.where(name: widget_name).delete_all + + expect { migrate! }.to change { work_item_definitions.count }.by(7) + expect(work_item_definitions.all.pluck(:name)).to include(widget_name) + end + + it 'logs a warning if the type is missing' do + allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id).and_call_original + allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id) + .with('Issue', nil).and_return(nil) + + expect(Gitlab::AppLogger).to receive(:warn).with('type Issue is missing, not adding widget') + migrate! + end + end + + describe '#down' do + it "removes definitions for widget" do + migrate! + + expect { migration.down }.to change { work_item_definitions.count }.by(-7) + expect(work_item_definitions.all.pluck(:name)).not_to include(widget_name) + end + end +end diff --git a/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb b/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb index a26c20ccc61..a196b63585c 100644 --- a/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/auto_disabling_hooks_shared_examples.rb @@ -17,8 +17,12 @@ RSpec.shared_examples 'a hook that gets automatically disabled on failure' do [4, 1.second.from_now], # Exceeded the grace period, set by #backoff! [4, Time.current] # Exceeded the grace period, set by #backoff!, edge-case ].map do |(recent_failures, disabled_until)| - create(hook_factory, **default_factory_arguments, recent_failures: recent_failures, -disabled_until: disabled_until) + create( + hook_factory, + **default_factory_arguments, + recent_failures: recent_failures, + disabled_until: disabled_until + ) end end @@ -45,8 +49,12 @@ disabled_until: disabled_until) [0, suspended], [0, expired] ].map do |(recent_failures, disabled_until)| - create(hook_factory, **default_factory_arguments, recent_failures: recent_failures, -disabled_until: disabled_until) + create( + hook_factory, + **default_factory_arguments, + recent_failures: recent_failures, + disabled_until: disabled_until + ) end end diff --git a/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb b/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb index 9dec1a5056c..c51e4999e81 100644 --- a/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb @@ -112,9 +112,10 @@ RSpec.shared_examples 'a cascading namespace setting boolean attribute' do it 'does not allow the local value to be saved' do subgroup_settings.send("#{settings_attribute_name}=", nil) - expect { subgroup_settings.save! } - .to raise_error(ActiveRecord::RecordInvalid, - /cannot be changed because it is locked by an ancestor/) + expect { subgroup_settings.save! }.to raise_error( + ActiveRecord::RecordInvalid, + /cannot be changed because it is locked by an ancestor/ + ) end end @@ -321,9 +322,10 @@ RSpec.shared_examples 'a cascading namespace setting boolean attribute' do it 'does not allow the attribute to be saved' do subgroup_settings.send("lock_#{settings_attribute_name}=", true) - expect { subgroup_settings.save! } - .to raise_error(ActiveRecord::RecordInvalid, - /cannot be changed because it is locked by an ancestor/) + expect { subgroup_settings.save! }.to raise_error( + ActiveRecord::RecordInvalid, + /cannot be changed because it is locked by an ancestor/ + ) end end @@ -343,9 +345,10 @@ RSpec.shared_examples 'a cascading namespace setting boolean attribute' do it 'does not allow the lock to be saved when the attribute is nil' do subgroup_settings.send("#{settings_attribute_name}=", nil) - expect { subgroup_settings.save! } - .to raise_error(ActiveRecord::RecordInvalid, - /cannot be nil when locking the attribute/) + expect { subgroup_settings.save! }.to raise_error( + ActiveRecord::RecordInvalid, + /cannot be nil when locking the attribute/ + ) end it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do @@ -364,9 +367,10 @@ RSpec.shared_examples 'a cascading namespace setting boolean attribute' do it 'does not allow the attribute to be saved' do subgroup_settings.send("lock_#{settings_attribute_name}=", true) - expect { subgroup_settings.save! } - .to raise_error(ActiveRecord::RecordInvalid, - /cannot be changed because it is locked by an ancestor/) + expect { subgroup_settings.save! }.to raise_error( + ActiveRecord::RecordInvalid, + /cannot be changed because it is locked by an ancestor/ + ) end end diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb index 0ef9ab25505..28d2d4f1597 100644 --- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb @@ -465,10 +465,13 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'when commit comment event executed' do let(:commit_note) do - create(:note_on_commit, author: user, - project: project, - commit_id: project.repository.commit.id, - note: 'a comment on a commit') + create( + :note_on_commit, + author: user, + project: project, + commit_id: project.repository.commit.id, + note: 'a comment on a commit' + ) end let(:data) do @@ -480,8 +483,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'when merge request comment event executed' do let(:merge_request_note) do - create(:note_on_merge_request, project: project, - note: 'a comment on a merge request') + create(:note_on_merge_request, project: project, note: 'a comment on a merge request') end let(:data) do @@ -493,8 +495,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'when issue comment event executed' do let(:issue_note) do - create(:note_on_issue, project: project, - note: 'a comment on an issue') + create(:note_on_issue, project: project, note: 'a comment on an issue') end let(:data) do @@ -506,8 +507,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'when snippet comment event executed' do let(:snippet_note) do - create(:note_on_project_snippet, project: project, - note: 'a comment on a snippet') + create(:note_on_project_snippet, project: project, note: 'a comment on a snippet') end let(:data) do @@ -522,9 +522,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name let_it_be(:user) { create(:user) } let_it_be_with_refind(:project) { create(:project, :repository, creator: user) } let(:pipeline) do - create(:ci_pipeline, - project: project, status: status, - sha: project.commit.sha, ref: project.default_branch) + create(:ci_pipeline, project: project, status: status, sha: project.commit.sha, ref: project.default_branch) end before do @@ -557,9 +555,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'with failed pipeline' do context 'on default branch' do let(:pipeline) do - create(:ci_pipeline, - project: project, status: :failed, - sha: project.commit.sha, ref: project.default_branch) + create(:ci_pipeline, project: project, status: :failed, sha: project.commit.sha, ref: project.default_branch) end let(:data) { Gitlab::DataBuilder::Pipeline.build(pipeline) } @@ -587,9 +583,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name end let(:pipeline) do - create(:ci_pipeline, - project: project, status: :failed, - sha: project.commit.sha, ref: 'a-protected-branch') + create(:ci_pipeline, project: project, status: :failed, sha: project.commit.sha, ref: 'a-protected-branch') end let(:data) { Gitlab::DataBuilder::Pipeline.build(pipeline) } @@ -617,9 +611,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name end let(:pipeline) do - create(:ci_pipeline, - project: project, status: :failed, - sha: project.commit.sha, ref: '1-stable') + create(:ci_pipeline, project: project, status: :failed, sha: project.commit.sha, ref: '1-stable') end let(:data) { Gitlab::DataBuilder::Pipeline.build(pipeline) } @@ -643,9 +635,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name context 'on a neither protected nor default branch' do let(:pipeline) do - create(:ci_pipeline, - project: project, status: :failed, - sha: project.commit.sha, ref: 'a-random-branch') + create(:ci_pipeline, project: project, status: :failed, sha: project.commit.sha, ref: 'a-random-branch') end let(:data) { Gitlab::DataBuilder::Pipeline.build(pipeline) } diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb index e4958779957..b04ac40b309 100644 --- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb @@ -84,9 +84,12 @@ RSpec.shared_examples 'a timebox' do |timebox_type| let(:max_date) { mid_point + 10.days } def box(from, to) - create(factory, *timebox_args, - start_date: from || open_on_left, - due_date: to || open_on_right) + create( + factory, + *timebox_args, + start_date: from || open_on_left, + due_date: to || open_on_right + ) end it 'can find overlapping timeboxes' do diff --git a/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb b/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb index 24d114bbe23..f98528ffedc 100644 --- a/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb +++ b/spec/support/shared_examples/models/concerns/unstoppable_hooks_shared_examples.rb @@ -18,8 +18,12 @@ RSpec.shared_examples 'a hook that does not get automatically disabled on failur [3, nil], [3, 1.day.ago] ].map do |(recent_failures, disabled_until)| - create(hook_factory, **default_factory_arguments, recent_failures: recent_failures, -disabled_until: disabled_until) + create( + hook_factory, + **default_factory_arguments, + recent_failures: recent_failures, + disabled_until: disabled_until + ) end end diff --git a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb index 6d519e561ee..d438918eb60 100644 --- a/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb +++ b/spec/support/shared_examples/models/issue_tracker_service_shared_examples.rb @@ -10,19 +10,19 @@ end RSpec.shared_examples 'allows project key on reference pattern' do |url_attr| it 'allows underscores in the project name' do - expect(described_class.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234' + expect(subject.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234' end it 'allows numbers in the project name' do - expect(described_class.reference_pattern.match('EXT3_EXT-1234')[0]).to eq 'EXT3_EXT-1234' + expect(subject.reference_pattern.match('EXT3_EXT-1234')[0]).to eq 'EXT3_EXT-1234' end it 'requires the project name to begin with A-Z' do - expect(described_class.reference_pattern.match('3EXT_EXT-1234')).to eq nil - expect(described_class.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234' + expect(subject.reference_pattern.match('3EXT_EXT-1234')).to eq nil + expect(subject.reference_pattern.match('EXT_EXT-1234')[0]).to eq 'EXT_EXT-1234' end it 'does not allow issue number to finish with a letter' do - expect(described_class.reference_pattern.match('EXT-123A')).to eq(nil) + expect(subject.reference_pattern.match('EXT-123A')).to eq(nil) end end diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb index 7159c55e303..32b1cf24c0f 100644 --- a/spec/support/shared_examples/models/member_shared_examples.rb +++ b/spec/support/shared_examples/models/member_shared_examples.rb @@ -392,6 +392,30 @@ RSpec.shared_examples_for "bulk member creation" do expect(members.first).to be_invite end + context 'with different source types' do + shared_examples 'supports multiple sources' do + specify do + members = described_class.add_members(sources, [user1, user2], :maintainer) + + expect(members.map(&:user)).to contain_exactly(user1, user2, user1, user2) + expect(members).to all(be_a(member_type)) + expect(members).to all(be_persisted) + end + end + + context 'with an array of sources' do + let_it_be(:sources) { [source, source2] } + + it_behaves_like 'supports multiple sources' + end + + context 'with a query producing sources' do + let_it_be(:sources) { source_type.id_in([source, source2]) } + + it_behaves_like 'supports multiple sources' + end + end + context 'with de-duplication' do it 'has the same user by id and user' do members = described_class.add_members(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer) diff --git a/spec/support/shared_examples/models/members_notifications_shared_example.rb b/spec/support/shared_examples/models/members_notifications_shared_example.rb index e28220334ac..329cb812a08 100644 --- a/spec/support/shared_examples/models/members_notifications_shared_example.rb +++ b/spec/support/shared_examples/models/members_notifications_shared_example.rb @@ -69,7 +69,7 @@ RSpec.shared_examples 'members notifications' do |entity_type| let(:member) { create(:"#{entity_type}_member", :invited) } it "calls NotificationService.decline_#{entity_type}_invite" do - expect(notification_service).to receive(:"decline_#{entity_type}_invite").with(member) + expect(notification_service).to receive(:decline_invite).with(member) member.decline_invite! end diff --git a/spec/support/shared_examples/models/resource_event_shared_examples.rb b/spec/support/shared_examples/models/resource_event_shared_examples.rb index 038ff33c68a..1409f7caea8 100644 --- a/spec/support/shared_examples/models/resource_event_shared_examples.rb +++ b/spec/support/shared_examples/models/resource_event_shared_examples.rb @@ -10,6 +10,8 @@ RSpec.shared_examples 'a resource event' do let_it_be(:issue2) { create(:issue, author: user1) } let_it_be(:issue3) { create(:issue, author: user2) } + let(:resource_event) { described_class.name.demodulize.underscore.to_sym } + describe 'importable' do it { is_expected.to respond_to(:importing?) } it { is_expected.to respond_to(:imported?) } @@ -36,9 +38,9 @@ RSpec.shared_examples 'a resource event' do let!(:created_at2) { 2.days.ago } let!(:created_at3) { 3.days.ago } - let!(:event1) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: created_at1) } - let!(:event2) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: created_at2) } - let!(:event3) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: created_at3) } + let!(:event1) { create(resource_event, issue: issue1, created_at: created_at1) } + let!(:event2) { create(resource_event, issue: issue2, created_at: created_at2) } + let!(:event3) { create(resource_event, issue: issue2, created_at: created_at3) } it 'returns the expected events' do events = described_class.created_after(created_at3) @@ -62,9 +64,10 @@ RSpec.shared_examples 'a resource event for issues' do let_it_be(:issue2) { create(:issue, author: user1) } let_it_be(:issue3) { create(:issue, author: user2) } - let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1) } - let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2) } - let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1) } + let_it_be(:resource_event) { described_class.name.demodulize.underscore.to_sym } + let_it_be(:event1) { create(resource_event, issue: issue1) } + let_it_be(:event2) { create(resource_event, issue: issue2) } + let_it_be(:event3) { create(resource_event, issue: issue1) } describe 'associations' do it { is_expected.to belong_to(:issue) } @@ -93,9 +96,9 @@ RSpec.shared_examples 'a resource event for issues' do end describe '.by_created_at_earlier_or_equal_to' do - let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: '2020-03-10') } - let_it_be(:event2) { create(described_class.name.underscore.to_sym, issue: issue2, created_at: '2020-03-10') } - let_it_be(:event3) { create(described_class.name.underscore.to_sym, issue: issue1, created_at: '2020-03-12') } + let_it_be(:event1) { create(resource_event, issue: issue1, created_at: '2020-03-10') } + let_it_be(:event2) { create(resource_event, issue: issue2, created_at: '2020-03-10') } + let_it_be(:event3) { create(resource_event, issue: issue1, created_at: '2020-03-12') } it 'returns the expected events' do events = described_class.by_created_at_earlier_or_equal_to('2020-03-11 23:59:59') @@ -112,7 +115,7 @@ RSpec.shared_examples 'a resource event for issues' do if described_class.method_defined?(:issuable) describe '#issuable' do - let_it_be(:event1) { create(described_class.name.underscore.to_sym, issue: issue2) } + let_it_be(:event1) { create(resource_event, issue: issue2) } it 'returns the expected issuable' do expect(event1.issuable).to eq(issue2) @@ -125,6 +128,7 @@ RSpec.shared_examples 'a resource event for merge requests' do let_it_be(:user1) { create(:user) } let_it_be(:user2) { create(:user) } + let_it_be(:resource_event) { described_class.name.demodulize.underscore.to_sym } let_it_be(:merge_request1) { create(:merge_request, author: user1) } let_it_be(:merge_request2) { create(:merge_request, author: user1) } let_it_be(:merge_request3) { create(:merge_request, author: user2) } @@ -134,9 +138,9 @@ RSpec.shared_examples 'a resource event for merge requests' do end describe '.by_merge_request' do - let_it_be(:event1) { create(described_class.name.underscore.to_sym, merge_request: merge_request1) } - let_it_be(:event2) { create(described_class.name.underscore.to_sym, merge_request: merge_request2) } - let_it_be(:event3) { create(described_class.name.underscore.to_sym, merge_request: merge_request1) } + let_it_be(:event1) { create(resource_event, merge_request: merge_request1) } + let_it_be(:event2) { create(resource_event, merge_request: merge_request2) } + let_it_be(:event3) { create(resource_event, merge_request: merge_request1) } it 'returns the expected records for an issue with events' do events = described_class.by_merge_request(merge_request1) @@ -153,7 +157,7 @@ RSpec.shared_examples 'a resource event for merge requests' do if described_class.method_defined?(:issuable) describe '#issuable' do - let_it_be(:event1) { create(described_class.name.underscore.to_sym, merge_request: merge_request2) } + let_it_be(:event1) { create(resource_event, merge_request: merge_request2) } it 'returns the expected issuable' do expect(event1.issuable).to eq(merge_request2) @@ -163,7 +167,7 @@ RSpec.shared_examples 'a resource event for merge requests' do context 'on callbacks' do it 'does not trigger note created subscription' do - event = build(described_class.name.underscore.to_sym, merge_request: merge_request1) + event = build(resource_event, merge_request: merge_request1) expect(GraphqlTriggers).not_to receive(:work_item_note_created) expect(event).not_to receive(:trigger_note_subscription_create) @@ -177,15 +181,17 @@ RSpec.shared_examples 'a note for work item resource event' do let_it_be(:project) { create(:project) } let_it_be(:work_item) { create(:work_item, :task, project: project, author: user) } + let(:resource_event) { described_class.name.demodulize.underscore.to_sym } + it 'builds synthetic note with correct synthetic_note_class' do - event = build(described_class.name.underscore.to_sym, issue: work_item) + event = build(resource_event, issue: work_item) expect(event.work_item_synthetic_system_note.class.name).to eq(event.synthetic_note_class.name) end context 'on callbacks' do it 'triggers note created subscription' do - event = build(described_class.name.underscore.to_sym, issue: work_item) + event = build(resource_event, issue: work_item) expect(GraphqlTriggers).to receive(:work_item_note_created) expect(event).to receive(:trigger_note_subscription_create).and_call_original diff --git a/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb b/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb new file mode 100644 index 00000000000..d196114b227 --- /dev/null +++ b/spec/support/shared_examples/prometheus/additional_metrics_shared_examples.rb @@ -0,0 +1,161 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'additional metrics query' do + include Prometheus::MetricBuilders + + let(:metric_group_class) { Gitlab::Prometheus::MetricGroup } + let(:metric_class) { Gitlab::Prometheus::Metric } + + let(:metric_names) { %w[metric_a metric_b] } + + let(:query_range_result) do + [{ metric: {}, values: [[1488758662.506, '0.00002996364761904785'], [1488758722.506, '0.00003090239047619091']] }] + end + + let(:client) { instance_double('Gitlab::PrometheusClient') } + let(:query_result) { described_class.new(client).query(*query_params) } + let(:project) { create(:project, :repository) } + let(:environment) { create(:environment, slug: 'environment-slug', project: project) } + + before do + allow(client).to receive(:label_values).and_return(metric_names) + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group(metrics: [simple_metric])]) + end + + describe 'metrics query context' do + subject! { described_class.new(client) } + + shared_examples 'query context containing environment slug and filter' do + it 'contains ci_environment_slug' do + expect(subject) + .to receive(:query_metrics).with(project, environment, hash_including(ci_environment_slug: environment.slug)) + + subject.query(*query_params) + end + + it 'contains environment filter' do + expect(subject).to receive(:query_metrics).with( + project, + environment, + hash_including( + environment_filter: "container_name!=\"POD\",environment=\"#{environment.slug}\"" + ) + ) + + subject.query(*query_params) + end + end + + describe 'project has Kubernetes service' do + context 'when user configured kubernetes from CI/CD > Clusters' do + let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } + let(:environment) { create(:environment, slug: 'environment-slug', project: project) } + let(:kube_namespace) { environment.deployment_namespace } + + it_behaves_like 'query context containing environment slug and filter' + + it 'query context contains kube_namespace' do + expect(subject) + .to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: kube_namespace)) + + subject.query(*query_params) + end + end + end + + describe 'project without Kubernetes service' do + it_behaves_like 'query context containing environment slug and filter' + + it 'query context contains empty kube_namespace' do + expect(subject).to receive(:query_metrics).with(project, environment, hash_including(kube_namespace: '')) + + subject.query(*query_params) + end + end + end + + context 'with one group where two metrics is found' do + before do + allow(metric_group_class).to receive(:common_metrics).and_return([simple_metric_group]) + end + + context 'when some queries return results' do + before do + allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) + allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result) + allow(client).to receive(:query_range).with('query_range_empty', any_args).and_return([]) + end + + it 'return group data only for queries with results' do + expected = [ + { + group: 'name', + priority: 1, + metrics: [ + { + title: 'title', weight: 1, y_label: 'Values', queries: [ + { query_range: 'query_range_a', result: query_range_result }, + { query_range: 'query_range_b', label: 'label', unit: 'unit', result: query_range_result } + ] + } + ] + } + ] + + expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') + expect(query_result).to eq(expected) + end + end + end + + context 'with two groups with one metric each' do + let(:metrics) { [simple_metric(queries: [simple_query])] } + + before do + allow(metric_group_class).to receive(:common_metrics).and_return( + [ + simple_metric_group(name: 'group_a', metrics: [simple_metric(queries: [simple_query])]), + simple_metric_group(name: 'group_b', metrics: [simple_metric(title: 'title_b', queries: [simple_query('b')])]) + ]) + allow(client).to receive(:label_values).and_return(metric_names) + end + + context 'when both queries return results' do + before do + allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) + allow(client).to receive(:query_range).with('query_range_b', any_args).and_return(query_range_result) + end + + it 'return group data both queries' do + queries_with_result_a = { queries: [{ query_range: 'query_range_a', result: query_range_result }] } + queries_with_result_b = { queries: [{ query_range: 'query_range_b', result: query_range_result }] } + + expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') + + expect(query_result.count).to eq(2) + expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 }) + + expect(query_result[0][:metrics].first).to include(queries_with_result_a) + expect(query_result[1][:metrics].first).to include(queries_with_result_b) + end + end + + context 'when one query returns result' do + before do + allow(client).to receive(:query_range).with('query_range_a', any_args).and_return(query_range_result) + allow(client).to receive(:query_range).with('query_range_b', any_args).and_return([]) + end + + it 'return group data only for query with results' do + queries_with_result = { queries: [{ query_range: 'query_range_a', result: query_range_result }] } + + expect(query_result.to_json).to match_schema('prometheus/additional_metrics_query_result') + + expect(query_result.count).to eq(1) + expect(query_result).to all(satisfy { |r| r[:metrics].count == 1 }) + + expect(query_result.first[:metrics].first).to include(queries_with_result) + end + end + end +end diff --git a/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb b/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb new file mode 100644 index 00000000000..6aa9647bcec --- /dev/null +++ b/spec/support/shared_examples/protected_tags/access_control_ce_shared_examples.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +RSpec.shared_examples "protected tags > access control > CE" do + ProtectedRefAccess::HUMAN_ACCESS_LEVELS.each do |(access_type_id, access_type_name)| + it "allows creating protected tags that #{access_type_name} can create" do + visit project_protected_tags_path(project) + + set_protected_tag_name('master') + set_allowed_to('create', access_type_name) + click_on_protect + + expect(ProtectedTag.count).to eq(1) + expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to eq([access_type_id]) + end + + it "allows updating protected tags so that #{access_type_name} can create them" do + visit project_protected_tags_path(project) + + set_protected_tag_name('master') + set_allowed_to('create', 'No one') + click_on_protect + + expect(ProtectedTag.count).to eq(1) + + set_allowed_to('create', access_type_name, form: '.protected-tags-list') + + wait_for_requests + + expect(ProtectedTag.last.create_access_levels.map(&:access_level)).to include(access_type_id) + end + end +end diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb index d8690356f81..7cbaf40721a 100644 --- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb +++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'close quick action' do |issuable_type| - include Spec::Support::Helpers::Features::NotesHelpers + include Features::NotesHelpers before do project.add_maintainer(maintainer) diff --git a/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb index 3f1a98ca08e..7bd7500d546 100644 --- a/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb +++ b/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true RSpec.shared_examples 'promote_to_incident quick action' do + include ListboxHelpers + describe '/promote_to_incident' do context 'when issue can be promoted' do it 'promotes issue to incident' do @@ -52,9 +54,11 @@ RSpec.shared_examples 'promote_to_incident quick action' do context 'when incident is selected for issue type' do it 'promotes issue to incident' do visit new_project_issue_path(project) + wait_for_requests + fill_in('Title', with: 'Title') find('.js-issuable-type-filter-dropdown-wrap').click - click_link('Incident') + select_listbox_item(_('Incident')) fill_in('Description', with: '/promote_to_incident') click_button('Create issue') diff --git a/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb b/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb new file mode 100644 index 00000000000..4c3aad95a56 --- /dev/null +++ b/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb @@ -0,0 +1,111 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_class| + include TmpdirHelper + + let(:instance_specific_config_file) { "config/redis.#{name}.yml" } + let(:environment_config_file_name) { "GITLAB_REDIS_#{name.upcase}_CONFIG_FILE" } + let(:fallback_config_file) { nil } + let(:rails_root) { mktmpdir } + + before do + allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file) + end + + it_behaves_like "redis_shared_examples" + + describe '.config_file_name' do + subject { described_class.config_file_name } + + before do + # Undo top-level stub of config_file_name because we are testing that method now. + allow(described_class).to receive(:config_file_name).and_call_original + + allow(described_class).to receive(:rails_root).and_return(rails_root) + FileUtils.mkdir_p(File.join(rails_root, 'config')) + end + + context 'and there is a global env override' do + before do + stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override') + end + + it { expect(subject).to eq('global override') } + + context "and #{fallback_class.name.demodulize} has a different config file" do + let(:fallback_config_file) { 'fallback config file' } + + it { expect(subject).to eq('fallback config file') } + end + end + end + + describe '#fetch_config' do + subject { described_class.new('test').send(:fetch_config) } + + before do + FileUtils.mkdir_p(File.join(rails_root, 'config')) + + allow(described_class).to receive(:rails_root).and_return(rails_root) + end + + context 'when redis.yml exists' do + before do + allow(described_class).to receive(:config_file_name).and_call_original + allow(described_class).to receive(:redis_yml_path).and_call_original + end + + context 'when the fallback has a redis.yml entry' do + before do + File.write(File.join(rails_root, 'config/redis.yml'), { + 'test' => { + described_class.config_fallback.store_name.underscore => { 'fallback redis.yml' => 123 } + } + }.to_json) + end + + it { expect(subject).to eq({ 'fallback redis.yml' => 123 }) } + + context 'and an instance config file exists' do + before do + File.write(File.join(rails_root, instance_specific_config_file), { + 'test' => { 'instance specific file' => 456 } + }.to_json) + end + + it { expect(subject).to eq({ 'instance specific file' => 456 }) } + + context 'and the instance has a redis.yml entry' do + before do + File.write(File.join(rails_root, 'config/redis.yml'), { + 'test' => { name => { 'instance redis.yml' => 789 } } + }.to_json) + end + + it { expect(subject).to eq({ 'instance redis.yml' => 789 }) } + end + end + end + end + + context 'when no redis config file exsits' do + it 'returns nil' do + expect(subject).to eq(nil) + end + + context 'when resque.yml exists' do + before do + File.write(File.join(rails_root, 'config/resque.yml'), { + 'test' => { 'foobar' => 123 } + }.to_json) + end + + it 'returns the config from resque.yml' do + expect(subject).to eq({ 'foobar' => 123 }) + end + end + end + end +end diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb new file mode 100644 index 00000000000..34d8ba5c30d --- /dev/null +++ b/spec/support/shared_examples/redis/redis_shared_examples.rb @@ -0,0 +1,463 @@ +# frozen_string_literal: true + +RSpec.shared_examples "redis_shared_examples" do + include StubENV + include TmpdirHelper + + let(:test_redis_url) { "redis://redishost:#{redis_port}" } + let(:test_cluster_config) { { cluster: [{ host: "redis://redishost", port: redis_port }] } } + let(:config_file_name) { instance_specific_config_file } + let(:config_old_format_socket) { "spec/fixtures/config/redis_old_format_socket.yml" } + let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + let(:old_socket_path) { "/path/to/old/redis.sock" } + let(:new_socket_path) { "/path/to/redis.sock" } + let(:config_old_format_host) { "spec/fixtures/config/redis_old_format_host.yml" } + let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } + let(:config_cluster_format_host) { "spec/fixtures/config/redis_cluster_format_host.yml" } + let(:redis_port) { 6379 } + let(:redis_database) { 99 } + let(:sentinel_port) { 26379 } + let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_config_with_env.yml" } + let(:config_env_variable_url) { "TEST_GITLAB_REDIS_URL" } + let(:rails_root) { mktmpdir } + + before do + allow(described_class).to receive(:config_file_name).and_return(Rails.root.join(config_file_name).to_s) + allow(described_class).to receive(:redis_yml_path).and_return('/dev/null') + end + + describe '.config_file_name' do + subject { described_class.config_file_name } + + before do + # Undo top-level stub of config_file_name because we are testing that method now. + allow(described_class).to receive(:config_file_name).and_call_original + + allow(described_class).to receive(:rails_root).and_return(rails_root) + FileUtils.mkdir_p(File.join(rails_root, 'config')) + end + + context 'when there is no config file anywhere' do + it { expect(subject).to be_nil } + + context 'and there is a global env override' do + before do + stub_env('GITLAB_REDIS_CONFIG_FILE', 'global override') + end + + it { expect(subject).to eq('global override') } + + context 'and there is an instance specific config file' do + before do + FileUtils.touch(File.join(rails_root, instance_specific_config_file)) + end + + it { expect(subject).to eq("#{rails_root}/#{instance_specific_config_file}") } + + it 'returns a path that exists' do + expect(File.file?(subject)).to eq(true) + end + + context 'and there is a specific env override' do + before do + stub_env(environment_config_file_name, 'instance specific override') + end + + it { expect(subject).to eq('instance specific override') } + end + end + end + end + end + + describe '.store' do + let(:rails_env) { 'development' } + + subject { described_class.new(rails_env).store } + + shared_examples 'redis store' do + let(:redis_store) { ::Redis::Store } + let(:redis_store_to_s) { "Redis Client connected to #{host} against DB #{redis_database}" } + + it 'instantiates Redis::Store' do + is_expected.to be_a(redis_store) + + expect(subject.to_s).to eq(redis_store_to_s) + end + + context 'with the namespace' do + let(:namespace) { 'namespace_name' } + let(:redis_store_to_s) do + "Redis Client connected to #{host} against DB #{redis_database} with namespace #{namespace}" + end + + subject { described_class.new(rails_env).store(namespace: namespace) } + + it "uses specified namespace" do + expect(subject.to_s).to eq(redis_store_to_s) + end + end + end + + context 'with old format' do + it_behaves_like 'redis store' do + let(:config_file_name) { config_old_format_host } + let(:host) { "localhost:#{redis_port}" } + end + end + + context 'with new format' do + it_behaves_like 'redis store' do + let(:config_file_name) { config_new_format_host } + let(:host) { "development-host:#{redis_port}" } + end + end + end + + describe '.params' do + subject { described_class.new(rails_env).params } + + let(:rails_env) { 'development' } + let(:config_file_name) { config_old_format_socket } + + it 'withstands mutation' do + params1 = described_class.params + params2 = described_class.params + params1[:foo] = :bar + + expect(params2).not_to have_key(:foo) + end + + context 'when url contains unix socket reference' do + context 'with old format' do + let(:config_file_name) { config_old_format_socket } + + it 'returns path key instead' do + is_expected.to include(path: old_socket_path) + is_expected.not_to have_key(:url) + end + end + + context 'with new format' do + let(:config_file_name) { config_new_format_socket } + + it 'returns path key instead' do + is_expected.to include(path: new_socket_path) + is_expected.not_to have_key(:url) + end + end + end + + context 'when url is host based' do + context 'with old format' do + let(:config_file_name) { config_old_format_host } + + it 'returns hash with host, port, db, and password' do + is_expected.to include(host: 'localhost', password: 'mypassword', port: redis_port, db: redis_database) + is_expected.not_to have_key(:url) + end + end + + context 'with new format' do + let(:config_file_name) { config_new_format_host } + + where(:rails_env, :host) do + [ + %w[development development-host], + %w[test test-host], + %w[production production-host] + ] + end + + with_them do + it 'returns hash with host, port, db, and password' do + is_expected.to include(host: host, password: 'mynewpassword', port: redis_port, db: redis_database) + is_expected.not_to have_key(:url) + end + end + end + + context 'with redis cluster format' do + let(:config_file_name) { config_cluster_format_host } + + where(:rails_env, :host) do + [ + %w[development development-master], + %w[test test-master], + %w[production production-master] + ] + end + + with_them do + it 'returns hash with cluster and password' do + is_expected.to include( + password: 'myclusterpassword', + cluster: [ + { host: "#{host}1", port: redis_port }, + { host: "#{host}2", port: redis_port } + ] + ) + is_expected.not_to have_key(:url) + end + end + end + end + end + + describe '.url' do + let(:config_file_name) { config_old_format_socket } + + it 'withstands mutation' do + url1 = described_class.url + url2 = described_class.url + url1 << 'foobar' unless url1.frozen? + + expect(url2).not_to end_with('foobar') + end + + context 'when yml file with env variable' do + let(:config_file_name) { config_with_environment_variable_inside } + + before do + stub_env(config_env_variable_url, test_redis_url) + end + + it 'reads redis url from env variable' do + expect(described_class.url).to eq test_redis_url + end + end + end + + describe '.version' do + it 'returns a version' do + expect(described_class.version).to be_present + end + end + + describe '.with' do + let(:config_file_name) { config_old_format_socket } + + before do + clear_pool + end + + after do + clear_pool + end + + context 'when running on single-threaded runtime' do + before do + allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(false) + end + + it 'instantiates a connection pool with size 5' do + expect(ConnectionPool).to receive(:new).with(size: 5).and_call_original + + described_class.with { |_redis_shared_example| true } + end + end + + context 'when running on multi-threaded runtime' do + before do + allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true) + allow(Gitlab::Runtime).to receive(:max_threads).and_return(18) + end + + it 'instantiates a connection pool with a size based on the concurrency of the worker' do + expect(ConnectionPool).to receive(:new).with(size: 18 + 5).and_call_original + + described_class.with { |_redis_shared_example| true } + end + end + + context 'when there is no config at all' do + before do + # Undo top-level stub of config_file_name because we are testing that method now. + allow(described_class).to receive(:config_file_name).and_call_original + + allow(described_class).to receive(:rails_root).and_return(rails_root) + end + + it 'can run an empty block' do + expect { described_class.with { nil } }.not_to raise_error + end + end + end + + describe '#db' do + let(:rails_env) { 'development' } + + subject { described_class.new(rails_env).db } + + context 'with old format' do + let(:config_file_name) { config_old_format_host } + + it 'returns the correct db' do + expect(subject).to eq(redis_database) + end + end + + context 'with new format' do + let(:config_file_name) { config_new_format_host } + + it 'returns the correct db' do + expect(subject).to eq(redis_database) + end + end + + context 'with cluster-mode' do + let(:config_file_name) { config_cluster_format_host } + + it 'returns the correct db' do + expect(subject).to eq(0) + end + end + end + + describe '#sentinels' do + subject { described_class.new(rails_env).sentinels } + + let(:rails_env) { 'development' } + + context 'when sentinels are defined' do + let(:config_file_name) { config_new_format_host } + + where(:rails_env, :hosts) do + [ + ['development', %w[development-replica1 development-replica2]], + ['test', %w[test-replica1 test-replica2]], + ['production', %w[production-replica1 production-replica2]] + ] + end + + with_them do + it 'returns an array of hashes with host and port keys' do + is_expected.to include(host: hosts[0], port: sentinel_port) + is_expected.to include(host: hosts[1], port: sentinel_port) + end + end + end + + context 'when sentinels are not defined' do + let(:config_file_name) { config_old_format_host } + + it 'returns nil' do + is_expected.to be_nil + end + end + + context 'when cluster is defined' do + let(:config_file_name) { config_cluster_format_host } + + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#sentinels?' do + subject { described_class.new(Rails.env).sentinels? } + + context 'when sentinels are defined' do + let(:config_file_name) { config_new_format_host } + + it 'returns true' do + is_expected.to be_truthy + end + end + + context 'when sentinels are not defined' do + let(:config_file_name) { config_old_format_host } + + it { expect(subject).to eq(nil) } + end + + context 'when cluster is defined' do + let(:config_file_name) { config_cluster_format_host } + + it 'returns false' do + is_expected.to be_falsey + end + end + end + + describe '#raw_config_hash' do + it 'returns old-style single url config in a hash' do + expect(subject).to receive(:fetch_config) { test_redis_url } + expect(subject.send(:raw_config_hash)).to eq(url: test_redis_url) + end + + it 'returns cluster config without url key in a hash' do + expect(subject).to receive(:fetch_config) { test_cluster_config } + expect(subject.send(:raw_config_hash)).to eq(test_cluster_config) + end + end + + describe '#fetch_config' do + before do + FileUtils.mkdir_p(File.join(rails_root, 'config')) + # Undo top-level stub of config_file_name because we are testing that method now. + allow(described_class).to receive(:config_file_name).and_call_original + allow(described_class).to receive(:rails_root).and_return(rails_root) + end + + it 'raises an exception when the config file contains invalid yaml' do + Tempfile.open('bad.yml') do |file| + file.write('{"not":"yaml"') + file.flush + allow(described_class).to receive(:config_file_name) { file.path } + + expect { subject.send(:fetch_config) }.to raise_error(Psych::SyntaxError) + end + end + + it 'has a value for the legacy default URL' do + allow(subject).to receive(:fetch_config).and_return(nil) + + expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z})) + end + + context 'when redis.yml exists' do + subject { described_class.new('test').send(:fetch_config) } + + before do + allow(described_class).to receive(:redis_yml_path).and_call_original + end + + it 'uses config/redis.yml' do + File.write(File.join(rails_root, 'config/redis.yml'), { + 'test' => { described_class.store_name.underscore => { 'foobar' => 123 } } + }.to_json) + + expect(subject).to eq({ 'foobar' => 123 }) + end + end + + context 'when no config file exsits' do + subject { described_class.new('test').send(:fetch_config) } + + it 'returns nil' do + expect(subject).to eq(nil) + end + + context 'when resque.yml exists' do + before do + FileUtils.mkdir_p(File.join(rails_root, 'config')) + File.write(File.join(rails_root, 'config/resque.yml'), { + 'test' => { 'foobar' => 123 } + }.to_json) + end + + it 'returns the config from resque.yml' do + expect(subject).to eq({ 'foobar' => 123 }) + end + end + end + end + + def clear_pool + described_class.remove_instance_variable(:@pool) + rescue NameError + # raised if @pool was not set; ignore + end +end diff --git a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb index 2170025824f..74dbec063e0 100644 --- a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb +++ b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb @@ -14,7 +14,7 @@ RSpec.shared_examples 'GET resource access tokens available' do it 'lists all available scopes' do get_access_tokens - expect(assigns(:scopes)).to eq(Gitlab::Auth.resource_bot_scopes) + expect(assigns(:scopes)).to eq(Gitlab::Auth.available_scopes_for(resource)) end it 'returns for json response' do diff --git a/spec/support/shared_examples/requests/admin_mode_shared_examples.rb b/spec/support/shared_examples/requests/admin_mode_shared_examples.rb index ceb57fca786..4f198dfb740 100644 --- a/spec/support/shared_examples/requests/admin_mode_shared_examples.rb +++ b/spec/support/shared_examples/requests/admin_mode_shared_examples.rb @@ -1,103 +1,79 @@ # frozen_string_literal: true -RSpec.shared_examples 'GET request permissions for admin mode' do |failed_status_code = :forbidden| - it_behaves_like 'GET request permissions for admin mode when user', failed_status_code - it_behaves_like 'GET request permissions for admin mode when admin', failed_status_code -end - -RSpec.shared_examples 'PUT request permissions for admin mode' do |failed_status_code = :forbidden| - it_behaves_like 'PUT request permissions for admin mode when user', failed_status_code - it_behaves_like 'PUT request permissions for admin mode when admin', failed_status_code -end - -RSpec.shared_examples 'POST request permissions for admin mode' do |failed_status_code = :forbidden| - it_behaves_like 'POST request permissions for admin mode when user', failed_status_code - it_behaves_like 'POST request permissions for admin mode when admin', failed_status_code -end - -RSpec.shared_examples 'DELETE request permissions for admin mode' do |success_status_code: :no_content, - failed_status_code: :forbidden| - - it_behaves_like 'DELETE request permissions for admin mode when user', failed_status_code - it_behaves_like 'DELETE request permissions for admin mode when admin', success_status_code: success_status_code, - failed_status_code: failed_status_code -end -RSpec.shared_examples 'GET request permissions for admin mode when user' do |failed_status_code = :forbidden| - subject { get api(path, current_user, admin_mode: admin_mode) } +RSpec.shared_examples 'DELETE request permissions for admin mode' do + subject { delete api(path, current_user, admin_mode: admin_mode) } - let_it_be(:current_user) { create(:user) } + let_it_be(:success_status_code) { :no_content } + let_it_be(:failed_status_code) { :forbidden } - it_behaves_like 'admin mode on', true, failed_status_code - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'when admin' + it_behaves_like 'when user' end -RSpec.shared_examples 'GET request permissions for admin mode when admin' do |failed_status_code = :forbidden| +RSpec.shared_examples 'GET request permissions for admin mode' do subject { get api(path, current_user, admin_mode: admin_mode) } - let_it_be(:current_user) { create(:admin) } - - it_behaves_like 'admin mode on', true, :ok - it_behaves_like 'admin mode on', false, failed_status_code -end - -RSpec.shared_examples 'PUT request permissions for admin mode when user' do |failed_status_code = :forbidden| - subject { put api(path, current_user, admin_mode: admin_mode), params: params } - - let_it_be(:current_user) { create(:user) } + let_it_be(:success_status_code) { :ok } + let_it_be(:failed_status_code) { :forbidden } - it_behaves_like 'admin mode on', true, failed_status_code - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'when admin' + it_behaves_like 'when user' end -RSpec.shared_examples 'PUT request permissions for admin mode when admin' do |failed_status_code = :forbidden| +RSpec.shared_examples 'PUT request permissions for admin mode' do subject { put api(path, current_user, admin_mode: admin_mode), params: params } - let_it_be(:current_user) { create(:admin) } + let_it_be(:success_status_code) { :ok } + let_it_be(:failed_status_code) { :forbidden } - it_behaves_like 'admin mode on', true, :ok - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'when admin' + it_behaves_like 'when user' end -RSpec.shared_examples 'POST request permissions for admin mode when user' do |failed_status_code = :forbidden| +RSpec.shared_examples 'POST request permissions for admin mode' do subject { post api(path, current_user, admin_mode: admin_mode), params: params } - let_it_be(:current_user) { create(:user) } + let_it_be(:success_status_code) { :created } + let_it_be(:failed_status_code) { :forbidden } - it_behaves_like 'admin mode on', true, failed_status_code - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'when admin' + it_behaves_like 'when user' end -RSpec.shared_examples 'POST request permissions for admin mode when admin' do |failed_status_code = :forbidden| - subject { post api(path, current_user, admin_mode: admin_mode), params: params } +RSpec.shared_examples 'when user' do + let_it_be(:current_user) { create(:user) } - let_it_be(:current_user) { create(:admin) } + include_examples 'makes request' do + let(:status) { failed_status_code } + let(:admin_mode) { true } + end - it_behaves_like 'admin mode on', true, :created - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'makes request' do + let(:status) { failed_status_code } + let(:admin_mode) { false } + end end -RSpec.shared_examples 'DELETE request permissions for admin mode when user' do |failed_status_code = :forbidden| - subject { delete api(path, current_user, admin_mode: admin_mode) } +RSpec.shared_examples 'when admin' do + let_it_be(:current_user) { create(:admin) } - let_it_be(:current_user) { create(:user) } + it_behaves_like 'makes request' do + let(:status) { success_status_code } + let(:admin_mode) { true } + end - it_behaves_like 'admin mode on', true, failed_status_code - it_behaves_like 'admin mode on', false, failed_status_code + it_behaves_like 'makes request' do + let(:status) { failed_status_code } + let(:admin_mode) { false } + end end -RSpec.shared_examples 'DELETE request permissions for admin mode when admin' do |success_status_code: :no_content, - failed_status_code: :forbidden| - - subject { delete api(path, current_user, admin_mode: admin_mode) } - - let_it_be(:current_user) { create(:admin) } - - it_behaves_like 'admin mode on', true, success_status_code - it_behaves_like 'admin mode on', false, failed_status_code -end +RSpec.shared_examples "makes request" do + let_it_be(:status) { nil } -RSpec.shared_examples "admin mode on" do |admin_mode, status| - let_it_be(:admin_mode) { admin_mode } + it "returns" do + subject - it_behaves_like 'returning response status', status + expect(response).to have_gitlab_http_status(status) + end end diff --git a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb index f31cbcfdec1..e3ba51addaf 100644 --- a/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/custom_attributes_shared_examples.rb @@ -4,7 +4,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| let!(:custom_attribute1) { attributable.custom_attributes.create! key: 'foo', value: 'foo' } let!(:custom_attribute2) { attributable.custom_attributes.create! key: 'bar', value: 'bar' } - describe "GET /#{attributable_name} with custom attributes filter" do + describe "GET /#{attributable_name} with custom attributes filter", :aggregate_failures do before do other_attributable end @@ -20,7 +20,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'filters by custom attributes' do - get api("/#{attributable_name}", admin), params: { custom_attributes: { foo: 'foo', bar: 'bar' } } + get api("/#{attributable_name}", admin, admin_mode: true), params: { custom_attributes: { foo: 'foo', bar: 'bar' } } expect(response).to have_gitlab_http_status(:ok) expect(json_response.size).to be 1 @@ -29,7 +29,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "GET /#{attributable_name} with custom attributes" do + describe "GET /#{attributable_name} with custom attributes", :aggregate_failures do before do other_attributable end @@ -46,7 +46,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'does not include custom attributes by default' do - get api("/#{attributable_name}", admin) + get api("/#{attributable_name}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to be_empty @@ -54,7 +54,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end it 'includes custom attributes if requested' do - get api("/#{attributable_name}", admin), params: { with_custom_attributes: true } + get api("/#{attributable_name}", admin, admin_mode: true), params: { with_custom_attributes: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to be_empty @@ -72,7 +72,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "GET /#{attributable_name}/:id with custom attributes" do + describe "GET /#{attributable_name}/:id with custom attributes", :aggregate_failures do context 'with an unauthorized user' do it 'does not include custom attributes' do get api("/#{attributable_name}/#{attributable.id}", user), params: { with_custom_attributes: true } @@ -84,14 +84,14 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'does not include custom attributes by default' do - get api("/#{attributable_name}/#{attributable.id}", admin) + get api("/#{attributable_name}/#{attributable.id}", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).not_to include 'custom_attributes' end it 'includes custom attributes if requested' do - get api("/#{attributable_name}/#{attributable.id}", admin), params: { with_custom_attributes: true } + get api("/#{attributable_name}/#{attributable.id}", admin, admin_mode: true), params: { with_custom_attributes: true } expect(response).to have_gitlab_http_status(:ok) expect(json_response['custom_attributes']).to contain_exactly( @@ -102,7 +102,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "GET /#{attributable_name}/:id/custom_attributes" do + describe "GET /#{attributable_name}/:id/custom_attributes", :aggregate_failures do context 'with an unauthorized user' do subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes", user) } @@ -111,7 +111,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'returns all custom attributes' do - get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin) + get api("/#{attributable_name}/#{attributable.id}/custom_attributes", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to contain_exactly( @@ -122,7 +122,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "GET /#{attributable_name}/:id/custom_attributes/:key" do + describe "GET /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do context 'with an unauthorized user' do subject { get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) } @@ -131,7 +131,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'returns a single custom attribute' do - get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) + get api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to eq({ 'key' => 'foo', 'value' => 'foo' }) @@ -139,7 +139,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "PUT /#{attributable_name}/:id/custom_attributes/:key" do + describe "PUT /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do context 'with an unauthorized user' do subject { put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user), params: { value: 'new' } } @@ -149,7 +149,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'creates a new custom attribute' do expect do - put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin), params: { value: 'new' } + put api("/#{attributable_name}/#{attributable.id}/custom_attributes/new", admin, admin_mode: true), params: { value: 'new' } end.to change { attributable.custom_attributes.count }.by(1) expect(response).to have_gitlab_http_status(:ok) @@ -159,7 +159,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| it 'updates an existing custom attribute' do expect do - put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin), params: { value: 'new' } + put api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true), params: { value: 'new' } end.not_to change { attributable.custom_attributes.count } expect(response).to have_gitlab_http_status(:ok) @@ -169,7 +169,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| end end - describe "DELETE /#{attributable_name}/:id/custom_attributes/:key" do + describe "DELETE /#{attributable_name}/:id/custom_attributes/:key", :aggregate_failures do context 'with an unauthorized user' do subject { delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", user) } @@ -179,7 +179,7 @@ RSpec.shared_examples 'custom attributes endpoints' do |attributable_name| context 'with an authorized user' do it 'deletes an existing custom attribute' do expect do - delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin) + delete api("/#{attributable_name}/#{attributable.id}/custom_attributes/foo", admin, admin_mode: true) end.to change { attributable.custom_attributes.count }.by(-1) expect(response).to have_gitlab_http_status(:no_content) diff --git a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb index 797c5be802e..44bd943950a 100644 --- a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb @@ -1,13 +1,13 @@ # frozen_string_literal: true RSpec.shared_examples 'web-hook API endpoints test hook' do |prefix| - describe "POST #{prefix}/:hook_id" do + describe "POST #{prefix}/:hook_id", :aggregate_failures do it 'tests the hook' do expect(WebHookService) .to receive(:new).with(hook, anything, String, force: false) .and_return(instance_double(WebHookService, execute: nil)) - post api(hook_uri, user) + post api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:created) end @@ -17,7 +17,7 @@ end RSpec.shared_examples 'web-hook API endpoints with branch-filter' do |prefix| describe "POST #{prefix}/hooks" do it "returns a 422 error if branch filter is not valid" do - post api(collection_uri, user), + post api(collection_uri, user, admin_mode: user.admin?), params: { url: "http://example.com", push_events_branch_filter: '~badbranchname/' } expect(response).to have_gitlab_http_status(:unprocessable_entity) @@ -58,10 +58,10 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| let(:default_values) { {} } - describe "GET #{prefix}/hooks" do + describe "GET #{prefix}/hooks", :aggregate_failures do context "authorized user" do it "returns all hooks" do - get api(collection_uri, user) + get api(collection_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_collection_schema @@ -70,7 +70,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| context "when user is forbidden" do it "prevents access to hooks" do - get api(collection_uri, unauthorized_user) + get api(collection_uri, unauthorized_user, admin_mode: true) expect(response).to have_gitlab_http_status(:forbidden) end @@ -90,7 +90,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it 'returns the names of the url variables' do - get api(collection_uri, user) + get api(collection_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) expect(json_response).to contain_exactly( @@ -102,10 +102,10 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end end - describe "GET #{prefix}/hooks/:hook_id" do + describe "GET #{prefix}/hooks/:hook_id", :aggregate_failures do context "authorized user" do it "returns a project hook" do - get api(hook_uri, user) + get api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) expect(response).to match_hook_schema @@ -114,7 +114,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "returns a 404 error if hook id is not available" do - get api(hook_uri(non_existing_record_id), user) + get api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end @@ -125,7 +125,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "has the correct alert status", :aggregate_failures do - get api(hook_uri, user) + get api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) @@ -140,7 +140,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "has the correct alert status", :aggregate_failures do - get api(hook_uri, user) + get api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) @@ -156,7 +156,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| context "when user is forbidden" do it "does not access an existing hook" do - get api(hook_uri, unauthorized_user) + get api(hook_uri, unauthorized_user, admin_mode: true) expect(response).to have_gitlab_http_status(:forbidden) end @@ -171,12 +171,12 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end end - describe "POST #{prefix}/hooks" do + describe "POST #{prefix}/hooks", :aggregate_failures do let(:hook_creation_params) { hook_params } it "adds hook", :aggregate_failures do expect do - post api(collection_uri, user), + post api(collection_uri, user, admin_mode: user.admin?), params: hook_creation_params end.to change { hooks_count }.by(1) @@ -201,7 +201,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| token = "secret token" expect do - post api(collection_uri, user), + post api(collection_uri, user, admin_mode: user.admin?), params: { url: "http://example.com", token: token } end.to change { hooks_count }.by(1) @@ -216,19 +216,19 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "returns a 400 error if url not given" do - post api(collection_uri, user), params: { event_names.first => true } + post api(collection_uri, user, admin_mode: user.admin?), params: { event_names.first => true } expect(response).to have_gitlab_http_status(:bad_request) end it "returns a 400 error if no parameters are provided" do - post api(collection_uri, user) + post api(collection_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:bad_request) end it 'sets default values for events', :aggregate_failures do - post api(collection_uri, user), params: { url: 'http://mep.mep' } + post api(collection_uri, user, admin_mode: user.admin?), params: { url: 'http://mep.mep' } expect(response).to have_gitlab_http_status(:created) expect(response).to match_hook_schema @@ -239,22 +239,22 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "returns a 422 error if token not valid" do - post api(collection_uri, user), + post api(collection_uri, user, admin_mode: user.admin?), params: { url: "http://example.com", token: "foo\nbar" } expect(response).to have_gitlab_http_status(:unprocessable_entity) end it "returns a 422 error if url not valid" do - post api(collection_uri, user), params: { url: "ftp://example.com" } + post api(collection_uri, user, admin_mode: user.admin?), params: { url: "ftp://example.com" } expect(response).to have_gitlab_http_status(:unprocessable_entity) end end - describe "PUT #{prefix}/hooks/:hook_id" do + describe "PUT #{prefix}/hooks/:hook_id", :aggregate_failures do it "updates an existing hook" do - put api(hook_uri, user), params: update_params + put api(hook_uri, user, admin_mode: user.admin?), params: update_params expect(response).to have_gitlab_http_status(:ok) expect(response).to match_hook_schema @@ -267,7 +267,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| it 'updates the URL variables' do hook.update!(url_variables: { 'abc' => 'some value' }) - put api(hook_uri, user), + put api(hook_uri, user, admin_mode: user.admin?), params: { url_variables: [{ key: 'def', value: 'other value' }] } expect(response).to have_gitlab_http_status(:ok) @@ -280,7 +280,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| it "adds the token without including it in the response" do token = "secret token" - put api(hook_uri, user), params: { url: "http://example.org", token: token } + put api(hook_uri, user, admin_mode: user.admin?), params: { url: "http://example.org", token: token } expect(response).to have_gitlab_http_status(:ok) expect(json_response["url"]).to eq("http://example.org") @@ -291,67 +291,67 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "returns 404 error if hook id not found" do - put api(hook_uri(non_existing_record_id), user), params: { url: 'http://example.org' } + put api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?), params: { url: 'http://example.org' } expect(response).to have_gitlab_http_status(:not_found) end it "returns 400 error if no parameters are provided" do - put api(hook_uri, user) + put api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:bad_request) end it "returns a 422 error if url is not valid" do - put api(hook_uri, user), params: { url: 'ftp://example.com' } + put api(hook_uri, user, admin_mode: user.admin?), params: { url: 'ftp://example.com' } expect(response).to have_gitlab_http_status(:unprocessable_entity) end it "returns a 422 error if token is not valid" do - put api(hook_uri, user), params: { token: %w[foo bar].join("\n") } + put api(hook_uri, user, admin_mode: user.admin?), params: { token: %w[foo bar].join("\n") } expect(response).to have_gitlab_http_status(:unprocessable_entity) end end - describe "DELETE /projects/:id/hooks/:hook_id" do + describe "DELETE /projects/:id/hooks/:hook_id", :aggregate_failures do it "deletes hook from project" do expect do - delete api(hook_uri, user) + delete api(hook_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:no_content) end.to change { hooks_count }.by(-1) end it "returns a 404 error when deleting non existent hook" do - delete api(hook_uri(non_existing_record_id), user) + delete api(hook_uri(non_existing_record_id), user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end it "returns a 404 error if hook id not given" do - delete api(collection_uri, user) + delete api(collection_uri, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end it "returns forbidden if a user attempts to delete hooks they do not own" do - delete api(hook_uri, unauthorized_user) + delete api(hook_uri, unauthorized_user, admin_mode: true) expect(response).to have_gitlab_http_status(:forbidden) expect(WebHook.exists?(hook.id)).to be_truthy end it_behaves_like '412 response' do - let(:request) { api(hook_uri, user) } + let(:request) { api(hook_uri, user, admin_mode: user.admin?) } end end describe "PUT #{prefix}/hooks/:hook_id/url_variables/:key", :aggregate_failures do it 'sets the variable' do expect do - put api("#{hook_uri}/url_variables/abc", user), + put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?), params: { value: 'some secret value' } end.to change { hook.reload.url_variables }.to(eq('abc' => 'some secret value')) @@ -361,7 +361,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| it 'overwrites existing values' do hook.update!(url_variables: { 'abc' => 'xyz', 'def' => 'other value' }) - put api("#{hook_uri}/url_variables/abc", user), + put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?), params: { value: 'some secret value' } expect(response).to have_gitlab_http_status(:no_content) @@ -369,21 +369,21 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| end it "returns a 404 error when editing non existent hook" do - put api("#{hook_uri(non_existing_record_id)}/url_variables/abc", user), + put api("#{hook_uri(non_existing_record_id)}/url_variables/abc", user, admin_mode: user.admin?), params: { value: 'xyz' } expect(response).to have_gitlab_http_status(:not_found) end it "returns a 422 error when the key is illegal" do - put api("#{hook_uri}/url_variables/abc%20def", user), + put api("#{hook_uri}/url_variables/abc%20def", user, admin_mode: user.admin?), params: { value: 'xyz' } expect(response).to have_gitlab_http_status(:unprocessable_entity) end it "returns a 422 error when the value is illegal" do - put api("#{hook_uri}/url_variables/abc", user), + put api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?), params: { value: '' } expect(response).to have_gitlab_http_status(:unprocessable_entity) @@ -397,7 +397,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| it 'unsets the variable' do expect do - delete api("#{hook_uri}/url_variables/abc", user) + delete api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?) end.to change { hook.reload.url_variables }.to(eq({ 'def' => 'other value' })) expect(response).to have_gitlab_http_status(:no_content) @@ -406,13 +406,13 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix| it 'returns 404 for keys that do not exist' do hook.update!(url_variables: { 'def' => 'other value' }) - delete api("#{hook_uri}/url_variables/abc", user) + delete api("#{hook_uri}/url_variables/abc", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end it "returns a 404 error when deleting a variable from a non existent hook" do - delete api(hook_uri(non_existing_record_id) + "/url_variables/abc", user) + delete api(hook_uri(non_existing_record_id) + "/url_variables/abc", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end diff --git a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb index 1045a92f332..e2c9874e7fc 100644 --- a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb @@ -34,5 +34,14 @@ RSpec.shared_examples 'issuable update endpoint' do expect(json_response['labels']).to include '&' expect(json_response['labels']).to include '?' end + + it 'clears milestone when milestone_id=0' do + entity.update!(milestone: milestone) + + put api(url, user), params: { milestone_id: 0 } + + expect(response).to have_gitlab_http_status(:ok) + expect(json_response['milestone']).to be_nil + end end end diff --git a/spec/support/shared_examples/requests/api/notes_shared_examples.rb b/spec/support/shared_examples/requests/api/notes_shared_examples.rb index efe5ed3bcf9..1299899ecd2 100644 --- a/spec/support/shared_examples/requests/api/notes_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/notes_shared_examples.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| - describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do + describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do context 'sorting' do before do params = { noteable: noteable, author: user } @@ -12,7 +12,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| context 'without sort params' do it 'sorts by created_at in descending order by default' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?) response_dates = json_response.map { |note| note['created_at'] } @@ -23,7 +23,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| it 'fetches notes using parent path as id paremeter' do parent_id = CGI.escape(parent.full_path) - get api("/#{parent_type}/#{parent_id}/#{noteable_type}/#{noteable[id_name]}/notes", user) + get api("/#{parent_type}/#{parent_id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) end @@ -40,7 +40,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'page breaks first page correctly' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4", user, admin_mode: user.admin?) response_ids = json_response.map { |note| note['id'] } @@ -49,7 +49,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'page breaks second page correctly' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4&page=2", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?per_page=4&page=2", user, admin_mode: user.admin?) response_ids = json_response.map { |note| note['id'] } @@ -60,7 +60,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'sorts by ascending order when requested' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?sort=asc", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?sort=asc", user, admin_mode: user.admin?) response_dates = json_response.map { |note| note['created_at'] } @@ -69,7 +69,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'sorts by updated_at in descending order when requested' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at", user, admin_mode: user.admin?) response_dates = json_response.map { |note| note['updated_at'] } @@ -78,7 +78,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'sorts by updated_at in ascending order when requested' do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at&sort=asc", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes?order_by=updated_at&sort=asc", user, admin_mode: user.admin?) response_dates = json_response.map { |note| note['updated_at'] } @@ -88,7 +88,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it "returns an array of notes" do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) expect(response).to include_pagination_headers @@ -97,7 +97,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it "returns a 404 error when noteable id not found" do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{non_existing_record_id}/notes", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{non_existing_record_id}/notes", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end @@ -105,36 +105,36 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| it "returns 404 when not authorized" do parent.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user, admin_mode: private_user.admin?) expect(response).to have_gitlab_http_status(:not_found) end end - describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do + describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do it "returns a note by id" do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:ok) expect(json_response['body']).to eq(note.note) end it "returns a 404 error if note not found" do - get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user) + get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end end - describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do + describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do let(:params) { { body: 'hi!' } } subject do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params end it "creates a new note" do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: 'hi!' } + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: { body: 'hi!' } expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') @@ -143,7 +143,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it "returns a 400 bad request error if body not given" do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user) + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:bad_request) end @@ -158,7 +158,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| uri = "/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes" expect do - post api(uri, user), params: { body: 'hi!' } + post api(uri, user, admin_mode: user.admin?), params: { body: 'hi!' } end.to change { Event.count }.by(1) end @@ -169,7 +169,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| context 'by an admin' do it 'sets the creation time on the new note' do admin = create(:admin) - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", admin), params: params + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", admin, admin_mode: true), params: params expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') @@ -185,7 +185,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| let(:user) { project.first_owner } it 'sets the creation time on the new note' do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') @@ -215,7 +215,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| when 'groups' context 'by a group owner' do it 'sets the creation time on the new note' do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') @@ -253,7 +253,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| context 'when the user is posting an award emoji on their own noteable' do it 'creates a new note' do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: ':+1:' } + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: { body: ':+1:' } expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq(':+1:') @@ -266,7 +266,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'responds with 404' do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user), + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", private_user, admin_mode: private_user.admin?), params: { body: 'Foo' } expect(response).to have_gitlab_http_status(:not_found) @@ -299,11 +299,11 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end end - describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do + describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do let(:params) { { body: 'Hello!' } } subject do - put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user), params: params + put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?), params: params end context 'when only body param is present' do @@ -329,7 +329,7 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| end it 'returns a 404 error when note id not found' do - put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user), + put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?), params: { body: 'Hello!' } expect(response).to have_gitlab_http_status(:not_found) @@ -337,32 +337,32 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name| it 'returns a 400 bad request error if body is empty' do put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\ - "notes/#{note.id}", user), params: { body: '' } + "notes/#{note.id}", user, admin_mode: user.admin?), params: { body: '' } expect(response).to have_gitlab_http_status(:bad_request) end end - describe "DELETE /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do + describe "DELETE /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id", :aggregate_failures do it 'deletes a note' do delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\ - "notes/#{note.id}", user) + "notes/#{note.id}", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:no_content) # Check if note is really deleted delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\ - "notes/#{note.id}", user) + "notes/#{note.id}", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end it 'returns a 404 error when note id not found' do - delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user) + delete api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{non_existing_record_id}", user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end it_behaves_like '412 response' do - let(:request) { api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user) } + let(:request) { api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user, admin_mode: user.admin?) } end end end @@ -370,16 +370,16 @@ end RSpec.shared_examples 'noteable API with confidential notes' do |parent_type, noteable_type, id_name| it_behaves_like 'noteable API', parent_type, noteable_type, id_name - describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes" do + describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes", :aggregate_failures do let(:params) { { body: 'hi!' } } subject do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params end context 'with internal param' do it "creates a confidential note if internal is set to true" do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(internal: true) + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params.merge(internal: true) expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') @@ -391,7 +391,7 @@ RSpec.shared_examples 'noteable API with confidential notes' do |parent_type, no context 'with deprecated confidential param' do it "creates a confidential note if confidential is set to true" do - post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(confidential: true) + post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user, admin_mode: user.admin?), params: params.merge(confidential: true) expect(response).to have_gitlab_http_status(:created) expect(json_response['body']).to eq('hi!') diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb index ace76b5ef84..f53532d00d7 100644 --- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb @@ -725,77 +725,66 @@ RSpec.shared_examples 'handling different package names, visibilities and user r role = action == :create ? :developer : :maintainer where(:auth, :package_name_type, :visibility, :user_role, :expected_result, :expected_status) do - :oauth | :scoped_naming_convention | :public | nil | :reject | :forbidden + nil | :scoped_naming_convention | :public | nil | :reject | :unauthorized + nil | :scoped_no_naming_convention | :public | nil | :reject | :unauthorized + nil | :unscoped | :public | nil | :reject | :unauthorized + nil | :non_existing | :public | nil | :reject | :unauthorized + nil | :scoped_naming_convention | :private | nil | :reject | :unauthorized + nil | :scoped_no_naming_convention | :private | nil | :reject | :unauthorized + nil | :unscoped | :private | nil | :reject | :unauthorized + nil | :non_existing | :private | nil | :reject | :unauthorized + nil | :scoped_naming_convention | :internal | nil | :reject | :unauthorized + nil | :scoped_no_naming_convention | :internal | nil | :reject | :unauthorized + nil | :unscoped | :internal | nil | :reject | :unauthorized + nil | :non_existing | :internal | nil | :reject | :unauthorized + :oauth | :scoped_naming_convention | :public | :guest | :reject | :forbidden :oauth | :scoped_naming_convention | :public | role | :accept | :ok - :oauth | :scoped_no_naming_convention | :public | nil | :reject | :forbidden :oauth | :scoped_no_naming_convention | :public | :guest | :reject | :forbidden :oauth | :scoped_no_naming_convention | :public | role | :accept | :ok - :oauth | :unscoped | :public | nil | :reject | :forbidden :oauth | :unscoped | :public | :guest | :reject | :forbidden :oauth | :unscoped | :public | role | :accept | :ok - :oauth | :non_existing | :public | nil | :reject | :forbidden :oauth | :non_existing | :public | :guest | :reject | :forbidden :oauth | :non_existing | :public | role | :reject | :not_found - :oauth | :scoped_naming_convention | :private | nil | :reject | :not_found :oauth | :scoped_naming_convention | :private | :guest | :reject | :forbidden :oauth | :scoped_naming_convention | :private | role | :accept | :ok - :oauth | :scoped_no_naming_convention | :private | nil | :reject | :not_found :oauth | :scoped_no_naming_convention | :private | :guest | :reject | :forbidden :oauth | :scoped_no_naming_convention | :private | role | :accept | :ok - :oauth | :unscoped | :private | nil | :reject | :not_found :oauth | :unscoped | :private | :guest | :reject | :forbidden :oauth | :unscoped | :private | role | :accept | :ok - :oauth | :non_existing | :private | nil | :reject | :not_found :oauth | :non_existing | :private | :guest | :reject | :forbidden :oauth | :non_existing | :private | role | :reject | :not_found - :oauth | :scoped_naming_convention | :internal | nil | :reject | :forbidden :oauth | :scoped_naming_convention | :internal | :guest | :reject | :forbidden :oauth | :scoped_naming_convention | :internal | role | :accept | :ok - :oauth | :scoped_no_naming_convention | :internal | nil | :reject | :forbidden :oauth | :scoped_no_naming_convention | :internal | :guest | :reject | :forbidden :oauth | :scoped_no_naming_convention | :internal | role | :accept | :ok - :oauth | :unscoped | :internal | nil | :reject | :forbidden :oauth | :unscoped | :internal | :guest | :reject | :forbidden :oauth | :unscoped | :internal | role | :accept | :ok - :oauth | :non_existing | :internal | nil | :reject | :forbidden :oauth | :non_existing | :internal | :guest | :reject | :forbidden :oauth | :non_existing | :internal | role | :reject | :not_found - :personal_access_token | :scoped_naming_convention | :public | nil | :reject | :forbidden :personal_access_token | :scoped_naming_convention | :public | :guest | :reject | :forbidden :personal_access_token | :scoped_naming_convention | :public | role | :accept | :ok - :personal_access_token | :scoped_no_naming_convention | :public | nil | :reject | :forbidden :personal_access_token | :scoped_no_naming_convention | :public | :guest | :reject | :forbidden :personal_access_token | :scoped_no_naming_convention | :public | role | :accept | :ok - :personal_access_token | :unscoped | :public | nil | :reject | :forbidden :personal_access_token | :unscoped | :public | :guest | :reject | :forbidden :personal_access_token | :unscoped | :public | role | :accept | :ok - :personal_access_token | :non_existing | :public | nil | :reject | :forbidden :personal_access_token | :non_existing | :public | :guest | :reject | :forbidden :personal_access_token | :non_existing | :public | role | :reject | :not_found - :personal_access_token | :scoped_naming_convention | :private | nil | :reject | :not_found :personal_access_token | :scoped_naming_convention | :private | :guest | :reject | :forbidden :personal_access_token | :scoped_naming_convention | :private | role | :accept | :ok - :personal_access_token | :scoped_no_naming_convention | :private | nil | :reject | :not_found :personal_access_token | :scoped_no_naming_convention | :private | :guest | :reject | :forbidden :personal_access_token | :scoped_no_naming_convention | :private | role | :accept | :ok - :personal_access_token | :unscoped | :private | nil | :reject | :not_found :personal_access_token | :unscoped | :private | :guest | :reject | :forbidden :personal_access_token | :unscoped | :private | role | :accept | :ok - :personal_access_token | :non_existing | :private | nil | :reject | :not_found :personal_access_token | :non_existing | :private | :guest | :reject | :forbidden :personal_access_token | :non_existing | :private | role | :reject | :not_found - :personal_access_token | :scoped_naming_convention | :internal | nil | :reject | :forbidden :personal_access_token | :scoped_naming_convention | :internal | :guest | :reject | :forbidden :personal_access_token | :scoped_naming_convention | :internal | role | :accept | :ok - :personal_access_token | :scoped_no_naming_convention | :internal | nil | :reject | :forbidden :personal_access_token | :scoped_no_naming_convention | :internal | :guest | :reject | :forbidden :personal_access_token | :scoped_no_naming_convention | :internal | role | :accept | :ok - :personal_access_token | :unscoped | :internal | nil | :reject | :forbidden :personal_access_token | :unscoped | :internal | :guest | :reject | :forbidden :personal_access_token | :unscoped | :internal | role | :accept | :ok - :personal_access_token | :non_existing | :internal | nil | :reject | :forbidden :personal_access_token | :non_existing | :internal | :guest | :reject | :forbidden :personal_access_token | :non_existing | :internal | role | :reject | :not_found @@ -837,6 +826,8 @@ RSpec.shared_examples 'handling different package names, visibilities and user r build_token_auth_header(job.token) when :deploy_token build_token_auth_header(deploy_token.token) + else + {} end end @@ -850,7 +841,9 @@ RSpec.shared_examples 'handling different package names, visibilities and user r if scope == :instance && params[:package_name_type] != :scoped_naming_convention example_name = "reject #{action} package tag request" - status = :not_found + # Due to #authenticate_non_get, anonymous requests on private resources + # are rejected with unauthorized status + status = params[:auth].nil? ? :unauthorized : :not_found end it_behaves_like example_name, status: status diff --git a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb index 1d79a61fbb0..17e48d6b581 100644 --- a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb @@ -23,6 +23,7 @@ RSpec.shared_examples 'accept package tags request' do |status:| end it_behaves_like 'returning response status', status + it_behaves_like 'track event', :list_tags it 'returns a valid json response' do subject @@ -63,6 +64,7 @@ RSpec.shared_examples 'accept create package tag request' do |user_type| end it_behaves_like 'returning response status', :no_content + it_behaves_like 'track event', :create_tag it 'creates the package tag' do expect { subject }.to change { Packages::Tag.count }.by(1) @@ -145,6 +147,7 @@ RSpec.shared_examples 'accept delete package tag request' do |user_type| end it_behaves_like 'returning response status', :no_content + it_behaves_like 'track event', :delete_tag it 'returns a valid response' do subject @@ -190,3 +193,21 @@ RSpec.shared_examples 'accept delete package tag request' do |user_type| end end end + +RSpec.shared_examples 'track event' do |event_name| + let(:event_user) do + if auth == :deploy_token + deploy_token + elsif user_role + user + end + end + + let(:snowplow_gitlab_standard_context) do + { project: project, namespace: project.namespace, property: 'i_package_npm_user' }.tap do |context| + context[:user] = event_user if event_user + end + end + + it_behaves_like 'a package tracking event', described_class.name, event_name.to_s +end diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb index 98264baa61d..1f2450c864b 100644 --- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb @@ -143,17 +143,13 @@ RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: fa end RSpec.shared_examples 'a package tracking event' do |category, action, service_ping_context = true| - before do - stub_feature_flags(collect_package_events: true) - end - let(:context) do [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: snowplow_gitlab_standard_context[:property]).to_h] end it "creates a gitlab tracking event #{action}", :snowplow, :aggregate_failures do - expect { subject }.to change { Packages::Event.count }.by(1) + subject if service_ping_context expect_snowplow_event(category: category, action: action, @@ -166,12 +162,8 @@ RSpec.shared_examples 'a package tracking event' do |category, action, service_p end RSpec.shared_examples 'not a package tracking event' do - before do - stub_feature_flags(collect_package_events: true) - end - it 'does not create a gitlab tracking event', :snowplow, :aggregate_failures do - expect { subject }.not_to change { Packages::Event.count } + subject expect_no_snowplow_event end @@ -183,3 +175,15 @@ RSpec.shared_examples 'bumping the package last downloaded at field' do .to change { package.reload.last_downloaded_at }.from(nil).to(instance_of(ActiveSupport::TimeWithZone)) end end + +RSpec.shared_examples 'a successful package creation' do + it 'creates npm package with file' do + expect { subject } + .to change { project.packages.count }.by(1) + .and change { Packages::PackageFile.count }.by(1) + .and change { Packages::Tag.count }.by(1) + .and change { Packages::Npm::Metadatum.count }.by(1) + + expect(response).to have_gitlab_http_status(:ok) + end +end diff --git a/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb b/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb index 8dd2ef6ccc6..9847ea4e1e2 100644 --- a/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/pipelines/visibility_table_shared_examples.rb @@ -224,10 +224,10 @@ RSpec.shared_examples 'pipelines visibility table' do project.project_feature.update!(project_feature_attributes) project.add_role(ci_user, user_role) if user_role && user_role != :non_member - get api(pipelines_api_path, api_user) + get api(pipelines_api_path, api_user, admin_mode: is_admin) end - it do + specify do expect(response).to have_gitlab_http_status(response_status) expect(api_response).to match(expected_response) end diff --git a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb index 2154a76d765..7df8d6a513d 100644 --- a/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/repository_storage_moves_shared_examples.rb @@ -9,7 +9,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| let(:repository_storage_move_id) { storage_move.id } def get_container_repository_storage_move - get api(url, user) + get api(url, user, admin_mode: user.admin?) end it 'returns a container repository storage move', :aggregate_failures do @@ -39,7 +39,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| shared_examples 'get container repository storage move list' do def get_container_repository_storage_moves - get api(url, user) + get api(url, user, admin_mode: user.admin?) end it 'returns container repository storage moves', :aggregate_failures do @@ -90,7 +90,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| let(:container_id) { non_existing_record_id } it 'returns not found' do - get api(url, user) + get api(url, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end @@ -108,7 +108,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| let(:repository_storage_move_id) { storage_move.id } it 'returns not found' do - get api(url, user) + get api(url, user, admin_mode: user.admin?) expect(response).to have_gitlab_http_status(:not_found) end @@ -127,20 +127,20 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| end end - describe "POST /#{container_type}/:id/repository_storage_moves" do + describe "POST /#{container_type}/:id/repository_storage_moves", :aggregate_failures do let(:container_id) { container.id } let(:url) { "/#{container_type}/#{container_id}/repository_storage_moves" } let(:destination_storage_name) { 'test_second_storage' } def create_container_repository_storage_move - post api(url, user), params: { destination_storage_name: destination_storage_name } + post api(url, user, admin_mode: user.admin?), params: { destination_storage_name: destination_storage_name } end before do stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' }) end - it 'schedules a container repository storage move', :aggregate_failures do + it 'schedules a container repository storage move' do create_container_repository_storage_move storage_move = container.repository_storage_moves.last @@ -158,7 +158,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| it { expect { create_container_repository_storage_move }.to be_denied_for(:user) } end - context 'destination_storage_name is missing', :aggregate_failures do + context 'destination_storage_name is missing' do let(:destination_storage_name) { nil } it 'schedules a container repository storage move' do @@ -192,7 +192,7 @@ RSpec.shared_examples 'repository_storage_moves API' do |container_type| let(:destination_storage_name) { 'test_second_storage' } def create_container_repository_storage_moves - post api(url, user), params: { + post api(url, user, admin_mode: user.admin?), params: { source_storage_name: source_storage_name, destination_storage_name: destination_storage_name } diff --git a/spec/support/shared_examples/requests/api/snippets_shared_examples.rb b/spec/support/shared_examples/requests/api/snippets_shared_examples.rb index 1b92eb56f54..5187609da25 100644 --- a/spec/support/shared_examples/requests/api/snippets_shared_examples.rb +++ b/spec/support/shared_examples/requests/api/snippets_shared_examples.rb @@ -1,12 +1,19 @@ # frozen_string_literal: true RSpec.shared_examples 'raw snippet files' do - let_it_be(:user_token) { create(:personal_access_token, user: snippet.author) } let(:snippet_id) { snippet.id } - let(:user) { snippet.author } + let_it_be(:user) { snippet.author } let(:file_path) { '%2Egitattributes' } let(:ref) { 'master' } + let_it_be(:user_token) do + if user.admin? + create(:personal_access_token, :admin_mode, user: user) + else + create(:personal_access_token, user: user) + end + end + subject { get api(api_path, personal_access_token: user_token) } context 'with an invalid snippet ID' do @@ -15,8 +22,10 @@ RSpec.shared_examples 'raw snippet files' do it 'returns 404' do subject - expect(response).to have_gitlab_http_status(:not_found) - expect(json_response['message']).to eq('404 Snippet Not Found') + aggregate_failures do + expect(response).to have_gitlab_http_status(:not_found) + expect(json_response['message']).to eq('404 Snippet Not Found') + end end end @@ -185,7 +194,7 @@ RSpec.shared_examples 'snippet individual non-file updates' do end RSpec.shared_examples 'invalid snippet updates' do - it 'returns 404 for invalid snippet id' do + it 'returns 404 for invalid snippet id', :aggregate_failures do update_snippet(snippet_id: non_existing_record_id, params: { title: 'foo' }) expect(response).to have_gitlab_http_status(:not_found) @@ -204,7 +213,7 @@ RSpec.shared_examples 'invalid snippet updates' do expect(response).to have_gitlab_http_status(:bad_request) end - it 'returns 400 if title is blank' do + it 'returns 400 if title is blank', :aggregate_failures do update_snippet(params: { title: '' }) expect(response).to have_gitlab_http_status(:bad_request) @@ -236,7 +245,9 @@ RSpec.shared_examples 'snippet access with different users' do it 'returns the correct response' do request_user = user_for(requester) - get api(path, request_user) + admin_mode = requester == :admin + + get api(path, request_user, admin_mode: admin_mode) expect(response).to have_gitlab_http_status(status) end diff --git a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb index 32adf98969c..df01f9a5b0b 100644 --- a/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb +++ b/spec/support/shared_examples/serializers/diff_file_entity_shared_examples.rb @@ -2,13 +2,15 @@ RSpec.shared_examples 'diff file base entity' do it 'exposes essential attributes' do - expect(subject).to include(:content_sha, :submodule, :submodule_link, - :submodule_tree_url, :old_path_html, - :new_path_html, :blob, :can_modify_blob, - :file_hash, :file_path, :old_path, :new_path, - :viewer, :diff_refs, :stored_externally, - :external_storage, :renamed_file, :deleted_file, - :a_mode, :b_mode, :new_file, :file_identifier_hash) + expect(subject).to include( + :content_sha, :submodule, :submodule_link, + :submodule_tree_url, :old_path_html, + :new_path_html, :blob, :can_modify_blob, + :file_hash, :file_path, :old_path, :new_path, + :viewer, :diff_refs, :stored_externally, + :external_storage, :renamed_file, :deleted_file, + :a_mode, :b_mode, :new_file, :file_identifier_hash + ) end # Converted diff files from GitHub import does not contain blob file @@ -30,13 +32,70 @@ RSpec.shared_examples 'diff file entity' do it_behaves_like 'diff file base entity' it 'exposes correct attributes' do - expect(subject).to include(:added_lines, :removed_lines, - :context_lines_path) + expect(subject).to include(:added_lines, :removed_lines, :context_lines_path) end - it 'includes viewer' do - expect(subject[:viewer].with_indifferent_access) + context 'when a viewer' do + let(:collapsed) { false } + let(:added_lines) { 1 } + let(:removed_lines) { 0 } + let(:highlighted_lines) { nil } + + before do + allow(diff_file).to receive(:diff_lines_for_serializer) + .and_return(highlighted_lines) + + allow(diff_file).to receive(:added_lines) + .and_return(added_lines) + + allow(diff_file).to receive(:removed_lines) + .and_return(removed_lines) + + allow(diff_file).to receive(:collapsed?) + .and_return(collapsed) + end + + it 'matches the schema' do + expect(subject[:viewer].with_indifferent_access) .to match_schema('entities/diff_viewer') + end + + context 'when it is a whitespace only change' do + it 'has whitespace_only true' do + expect(subject[:viewer][:whitespace_only]) + .to eq(true) + end + end + + context 'when the highlighted lines arent shown' do + before do + allow(diff_file).to receive(:text?) + .and_return(false) + end + + it 'has whitespace_only nil' do + expect(subject[:viewer][:whitespace_only]) + .to eq(nil) + end + end + + context 'when it is a new file' do + let(:added_lines) { 0 } + + it 'has whitespace_only false' do + expect(subject[:viewer][:whitespace_only]) + .to eq(false) + end + end + + context 'when it is a collapsed file' do + let(:collapsed) { true } + + it 'has whitespace_only false' do + expect(subject[:viewer][:whitespace_only]) + .to eq(false) + end + end end context 'diff files' do diff --git a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb b/spec/support/shared_examples/services/base_helm_service_shared_examples.rb deleted file mode 100644 index c2252c83140..00000000000 --- a/spec/support/shared_examples/services/base_helm_service_shared_examples.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'logs kubernetes errors' do - let(:error_hash) do - { - service: service.class.name, - app_id: application.id, - project_ids: application.cluster.project_ids, - group_ids: [], - error_code: error_code - } - end - - it 'logs into kubernetes.log and Sentry' do - expect(Gitlab::ErrorTracking).to receive(:track_exception).with( - error, - hash_including(error_hash) - ) - - service.execute - end -end diff --git a/spec/support/shared_examples/services/clusters/create_service_shared_examples.rb b/spec/support/shared_examples/services/clusters/create_service_shared_examples.rb new file mode 100644 index 00000000000..7cd76e45ecd --- /dev/null +++ b/spec/support/shared_examples/services/clusters/create_service_shared_examples.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'create cluster service success' do + it 'creates a cluster object' do + expect { subject } + .to change { Clusters::Cluster.count }.by(1) + .and change { Clusters::Providers::Gcp.count }.by(1) + + expect(subject.name).to eq('test-cluster') + expect(subject.user).to eq(user) + expect(subject.project).to eq(project) + expect(subject.provider.gcp_project_id).to eq('gcp-project') + expect(subject.provider.zone).to eq('us-central1-a') + expect(subject.provider.num_nodes).to eq(1) + expect(subject.provider.machine_type).to eq('machine_type-a') + expect(subject.provider.access_token).to eq(access_token) + expect(subject.provider).to be_legacy_abac + expect(subject.platform).to be_nil + expect(subject.namespace_per_environment).to eq true + end +end + +RSpec.shared_examples 'create cluster service error' do + it 'returns an error' do + expect { subject }.to change { Clusters::Cluster.count }.by(0) + expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present + end +end diff --git a/spec/support/shared_examples/services/deploy_token_shared_examples.rb b/spec/support/shared_examples/services/deploy_token_shared_examples.rb new file mode 100644 index 00000000000..814b6565497 --- /dev/null +++ b/spec/support/shared_examples/services/deploy_token_shared_examples.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'a deploy token creation service' do + let(:user) { create(:user) } + let(:deploy_token_params) { attributes_for(:deploy_token) } + + describe '#execute' do + subject { described_class.new(entity, user, deploy_token_params).execute } + + context 'when the deploy token is valid' do + it 'creates a new DeployToken' do + expect { subject }.to change { DeployToken.count }.by(1) + end + + it 'creates a new ProjectDeployToken' do + expect { subject }.to change { deploy_token_class.count }.by(1) + end + + it 'returns a DeployToken' do + expect(subject[:deploy_token]).to be_an_instance_of DeployToken + end + + it 'sets the creator_id as the id of the current_user' do + expect(subject[:deploy_token].read_attribute(:creator_id)).to eq(user.id) + end + end + + context 'when expires at date is not passed' do + let(:deploy_token_params) { attributes_for(:deploy_token, expires_at: '') } + + it 'sets Forever.date' do + expect(subject[:deploy_token].read_attribute(:expires_at)).to eq(Forever.date) + end + end + + context 'when username is empty string' do + let(:deploy_token_params) { attributes_for(:deploy_token, username: '') } + + it 'converts it to nil' do + expect(subject[:deploy_token].read_attribute(:username)).to be_nil + end + end + + context 'when username is provided' do + let(:deploy_token_params) { attributes_for(:deploy_token, username: 'deployer') } + + it 'keeps the provided username' do + expect(subject[:deploy_token].read_attribute(:username)).to eq('deployer') + end + end + + context 'when the deploy token is invalid' do + let(:deploy_token_params) do + attributes_for(:deploy_token, read_repository: false, read_registry: false, write_registry: false) + end + + it 'does not create a new DeployToken' do + expect { subject }.not_to change { DeployToken.count } + end + + it 'does not create a new ProjectDeployToken' do + expect { subject }.not_to change { deploy_token_class.count } + end + end + end +end + +RSpec.shared_examples 'a deploy token deletion service' do + let(:user) { create(:user) } + let(:deploy_token_params) { { token_id: deploy_token.id } } + + describe '#execute' do + subject { described_class.new(entity, user, deploy_token_params).execute } + + it "destroys a token record and it's associated DeployToken" do + expect { subject }.to change { deploy_token_class.count }.by(-1) + .and change { DeployToken.count }.by(-1) + end + + context 'with invalid token id' do + let(:deploy_token_params) { { token_id: 9999 } } + + it 'raises an error' do + expect { subject }.to raise_error(ActiveRecord::RecordNotFound) + end + end + end +end diff --git a/spec/support/shared_examples/services/import_csv_service_shared_examples.rb b/spec/support/shared_examples/services/import_csv_service_shared_examples.rb new file mode 100644 index 00000000000..1555497ae48 --- /dev/null +++ b/spec/support/shared_examples/services/import_csv_service_shared_examples.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.shared_examples_for 'importer with email notification' do + it 'notifies user of import result' do + expect(Notify).to receive_message_chain(email_method, :deliver_later) + + subject + end +end + +RSpec.shared_examples 'correctly handles invalid files' do + shared_examples_for 'invalid file' do + it 'returns invalid file error' do + expect(subject[:success]).to eq(0) + expect(subject[:parse_error]).to eq(true) + end + end + + context 'when given file with unsupported extension' do + let(:file) { fixture_file_upload('spec/fixtures/banana_sample.gif') } + + it_behaves_like 'invalid file' + end + + context 'when given empty file' do + let(:file) { fixture_file_upload('spec/fixtures/csv_empty.csv') } + + it_behaves_like 'invalid file' + end + + context 'when given file without headers' do + let(:file) { fixture_file_upload('spec/fixtures/csv_no_headers.csv') } + + it_behaves_like 'invalid file' + end +end diff --git a/spec/support/shared_examples/services/issuable/issuable_description_quick_actions_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_description_quick_actions_shared_examples.rb new file mode 100644 index 00000000000..1970301e4c9 --- /dev/null +++ b/spec/support/shared_examples/services/issuable/issuable_description_quick_actions_shared_examples.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +# Specifications for behavior common to all objects with executable attributes. +# It can take a `default_params`. + +RSpec.shared_examples 'issuable record that supports quick actions' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } + let_it_be(:assignee) { create(:user) } + let_it_be(:milestone) { create(:milestone, project: project) } + let_it_be(:labels) { create_list(:label, 3, project: project) } + + let(:base_params) { { title: 'My issuable title' } } + let(:params) { base_params.merge(defined?(default_params) ? default_params : {}).merge(example_params) } + + before_all do + project.add_maintainer(user) + project.add_maintainer(assignee) + end + + before do + issuable.reload + end + + context 'with labels in command only' do + let(:example_params) do + { + description: "/label ~#{labels.first.name} ~#{labels.second.name}\n/unlabel ~#{labels.third.name}" + } + end + + it 'attaches labels to issuable' do + expect(issuable.label_ids).to match_array([labels.first.id, labels.second.id]) + end + end + + context 'with labels in params and command' do + let(:example_params) do + { + label_ids: [labels.second.id], + description: "/label ~#{labels.first.name}\n/unlabel ~#{labels.third.name}" + } + end + + it 'attaches all labels to issuable' do + expect(issuable.label_ids).to match_array([labels.first.id, labels.second.id]) + end + end + + context 'with assignee and milestone in command only' do + let(:example_params) do + { + description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}") + } + end + + it 'assigns and sets milestone to issuable' do + expect(issuable.assignees).to eq([assignee]) + expect(issuable.milestone).to eq(milestone) + end + end +end diff --git a/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb new file mode 100644 index 00000000000..5336e0f4c2f --- /dev/null +++ b/spec/support/shared_examples/services/issuable/issuable_import_csv_service_shared_examples.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.shared_examples 'issuable import csv service' do |issuable_type| + let_it_be_with_refind(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + subject { service.execute } + + shared_examples_for 'an issuable importer' do + if issuable_type == 'issue' + it 'records the import attempt if resource is an issue' do + expect { subject } + .to change { Issues::CsvImport.where(project: project, user: user).count } + .by 1 + end + end + end + + describe '#execute' do + before do + project.add_developer(user) + end + + it_behaves_like 'correctly handles invalid files' do + it_behaves_like 'importer with email notification' + it_behaves_like 'an issuable importer' + end + + context 'with a file generated by Gitlab CSV export' do + let(:file) { fixture_file_upload('spec/fixtures/csv_gitlab_export.csv') } + + it 'imports the CSV without errors' do + expect(subject[:success]).to eq(4) + expect(subject[:error_lines]).to eq([]) + expect(subject[:parse_error]).to eq(false) + end + + it 'correctly sets the issuable attributes' do + expect { subject }.to change { issuables.count }.by 4 + + expect(issuables.reload).to include(have_attributes({ title: 'Test Title', description: 'Test Description' })) + end + + it_behaves_like 'importer with email notification' + it_behaves_like 'an issuable importer' + end + + context 'with comma delimited file' do + let(:file) { fixture_file_upload('spec/fixtures/csv_comma.csv') } + + it 'imports CSV without errors' do + expect(subject[:success]).to eq(3) + expect(subject[:error_lines]).to eq([]) + expect(subject[:parse_error]).to eq(false) + end + + it 'correctly sets the issuable attributes' do + expect { subject }.to change { issuables.count }.by 3 + + expect(issuables.reload).to include(have_attributes(title: 'Title with quote"', description: 'Description')) + end + + it_behaves_like 'importer with email notification' + it_behaves_like 'an issuable importer' + end + + context 'with tab delimited file with error row' do + let(:file) { fixture_file_upload('spec/fixtures/csv_tab.csv') } + + it 'imports CSV with some error rows' do + expect(subject[:success]).to eq(2) + expect(subject[:error_lines]).to eq([3]) + expect(subject[:parse_error]).to eq(false) + end + + it 'correctly sets the issuable attributes' do + expect { subject }.to change { issuables.count }.by 2 + + expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World')) + end + + it_behaves_like 'importer with email notification' + it_behaves_like 'an issuable importer' + end + + context 'with semicolon delimited file with CRLF' do + let(:file) { fixture_file_upload('spec/fixtures/csv_semicolon.csv') } + + it 'imports CSV with a blank row' do + expect(subject[:success]).to eq(3) + expect(subject[:error_lines]).to eq([4]) + expect(subject[:parse_error]).to eq(false) + end + + it 'correctly sets the issuable attributes' do + expect { subject }.to change { issuables.count }.by 3 + + expect(issuables.reload).to include(have_attributes(title: 'Hello', description: 'World')) + end + + it_behaves_like 'importer with email notification' + it_behaves_like 'an issuable importer' + end + end +end diff --git a/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb new file mode 100644 index 00000000000..85a05bbe56d --- /dev/null +++ b/spec/support/shared_examples/services/issuable/issuable_update_service_shared_examples.rb @@ -0,0 +1,137 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'issuable update service' do + def update_issuable(opts) + described_class.new(project, user, opts).execute(open_issuable) + end + + describe 'changing state' do + let(:hook_event) { :"#{closed_issuable.class.name.underscore.to_sym}_hooks" } + + describe 'to reopened' do + let(:expected_payload) do + include( + changes: include( + state_id: { current: 1, previous: 2 }, + updated_at: { current: kind_of(Time), previous: kind_of(Time) } + ), + object_attributes: include( + state: 'opened', + action: 'reopen' + ) + ) + end + + it 'executes hooks' do + hooks_container = described_class < Issues::BaseService ? project.project_namespace : project + expect(hooks_container).to receive(:execute_hooks).with(expected_payload, hook_event) + expect(hooks_container).to receive(:execute_integrations).with(expected_payload, hook_event) + + described_class.new( + **described_class.constructor_container_arg(project), + current_user: user, + params: { state_event: 'reopen' } + ).execute(closed_issuable) + end + end + + describe 'to closed' do + let(:expected_payload) do + include( + changes: include( + state_id: { current: 2, previous: 1 }, + updated_at: { current: kind_of(Time), previous: kind_of(Time) } + ), + object_attributes: include( + state: 'closed', + action: 'close' + ) + ) + end + + it 'executes hooks' do + hooks_container = described_class < Issues::BaseService ? project.project_namespace : project + expect(hooks_container).to receive(:execute_hooks).with(expected_payload, hook_event) + expect(hooks_container).to receive(:execute_integrations).with(expected_payload, hook_event) + + described_class.new( + **described_class.constructor_container_arg(project), + current_user: user, + params: { state_event: 'close' } + ).execute(open_issuable) + end + end + end +end + +RSpec.shared_examples 'keeps issuable labels sorted after update' do + before do + update_issuable(label_ids: [label_b.id]) + end + + context 'when label is changed' do + it 'keeps the labels sorted by title ASC' do + update_issuable({ add_label_ids: [label_a.id] }) + + expect(issuable.labels).to eq([label_a, label_b]) + end + end +end + +RSpec.shared_examples 'broadcasting issuable labels updates' do + before do + update_issuable(label_ids: [label_a.id]) + end + + context 'when label is added' do + it 'triggers the GraphQL subscription' do + expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable) + + update_issuable(add_label_ids: [label_b.id]) + end + end + + context 'when label is removed' do + it 'triggers the GraphQL subscription' do + expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable) + + update_issuable(remove_label_ids: [label_a.id]) + end + end + + context 'when label is unchanged' do + it 'does not trigger the GraphQL subscription' do + expect(GraphqlTriggers).not_to receive(:issuable_labels_updated).with(issuable) + + update_issuable(label_ids: [label_a.id]) + end + end +end + +RSpec.shared_examples_for 'issuable update service updating last_edited_at values' do + context 'when updating the title of the issuable' do + let(:update_params) { { title: 'updated title' } } + + it 'does not update last_edited values' do + expect { update_issuable }.to change { issuable.title }.from(issuable.title).to('updated title').and( + not_change(issuable, :last_edited_at) + ).and( + not_change(issuable, :last_edited_by) + ) + end + end + + context 'when updating the description of the issuable' do + let(:update_params) { { description: 'updated description' } } + + it 'updates last_edited values' do + expect do + update_issuable + end.to change { issuable.description }.from(issuable.description).to('updated description').and( + change { issuable.last_edited_at } + ).and( + change { issuable.last_edited_by } + ) + end + end +end diff --git a/spec/support/shared_examples/services/issuable/update_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/update_service_shared_examples.rb deleted file mode 100644 index ff7acc7e907..00000000000 --- a/spec/support/shared_examples/services/issuable/update_service_shared_examples.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples_for 'issuable update service updating last_edited_at values' do - context 'when updating the title of the issuable' do - let(:update_params) { { title: 'updated title' } } - - it 'does not update last_edited values' do - expect { update_issuable }.to change { issuable.title }.from(issuable.title).to('updated title').and( - not_change(issuable, :last_edited_at) - ).and( - not_change(issuable, :last_edited_by) - ) - end - end - - context 'when updating the description of the issuable' do - let(:update_params) { { description: 'updated description' } } - - it 'updates last_edited values' do - expect do - update_issuable - end.to change { issuable.description }.from(issuable.description).to('updated description').and( - change { issuable.last_edited_at } - ).and( - change { issuable.last_edited_by } - ) - end - end -end diff --git a/spec/support/shared_examples/services/issues/move_and_clone_services_shared_examples.rb b/spec/support/shared_examples/services/issues/move_and_clone_services_shared_examples.rb new file mode 100644 index 00000000000..2b2e90c0461 --- /dev/null +++ b/spec/support/shared_examples/services/issues/move_and_clone_services_shared_examples.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'copy or reset relative position' do + before do + # ensure we have a relative position and it is known + old_issue.update!(relative_position: 1000) + end + + context 'when moved to a project within same group hierarchy' do + it 'does not reset the relative_position' do + expect(subject.relative_position).to eq(1000) + end + end + + context 'when moved to a project in a different group hierarchy' do + let_it_be(:new_project) { create(:project, group: create(:group)) } + + it 'does reset the relative_position' do + expect(subject.relative_position).to be_nil + end + end +end diff --git a/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb b/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb new file mode 100644 index 00000000000..e77d73d1c72 --- /dev/null +++ b/spec/support/shared_examples/services/migrate_to_ghost_user_service_shared_examples.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +RSpec.shared_examples "migrating a deleted user's associated records to the ghost user" do |record_class, fields| + record_class_name = record_class.to_s.titleize.downcase + + let(:project) do + case record_class + when MergeRequest + create(:project, :repository) + else + create(:project) + end + end + + before do + project.add_developer(user) + end + + context "for a #{record_class_name} the user has created" do + let!(:record) { created_record } + let(:migrated_fields) { fields || [:author] } + + it "does not delete the #{record_class_name}" do + service.execute + + expect(record_class.find_by_id(record.id)).to be_present + end + + it "blocks the user before migrating #{record_class_name}s to the 'Ghost User'" do + service.execute + + expect(user).to be_blocked + end + + it 'migrates all associated fields to the "Ghost user"' do + service.execute + + migrated_record = record_class.find_by_id(record.id) + + migrated_fields.each do |field| + expect(migrated_record.public_send(field)).to eq(User.ghost) + end + end + + it 'will only migrate specific records during a hard_delete' do + service.execute(hard_delete: true) + + migrated_record = record_class.find_by_id(record.id) + + check_user = always_ghost ? User.ghost : user + + migrated_fields.each do |field| + expect(migrated_record.public_send(field)).to eq(check_user) + end + end + + describe "race conditions" do + context "when #{record_class_name} migration fails and is rolled back" do + before do + allow_next_instance_of(ActiveRecord::Associations::CollectionProxy) + .to receive(:update_all).and_raise(ActiveRecord::StatementTimeout) + end + + it 'rolls back the user block' do + expect { service.execute }.to raise_error(ActiveRecord::StatementTimeout) + + expect(user.reload).not_to be_blocked + end + + it "doesn't unblock a previously-blocked user" do + expect(user.starred_projects).to receive(:update_all).and_call_original + user.block + + expect { service.execute }.to raise_error(ActiveRecord::StatementTimeout) + + expect(user.reload).to be_blocked + end + end + + it "blocks the user before #{record_class_name} migration begins" do + expect(service).to receive("migrate_#{record_class_name.parameterize(separator: '_').pluralize}".to_sym) do + expect(user.reload).to be_blocked + end + + service.execute + end + end + end +end diff --git a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb index 5abdac07431..cb544f42765 100644 --- a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb +++ b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb @@ -207,7 +207,7 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do check_component_file(current_time.round, 'contrib', :sources, nil, nil) - expected_main_amd64_size = expected_main_amd64_content.length + expected_main_amd64_size = expected_main_amd64_content.bytesize expected_main_amd64_sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content) expected_main_amd64_di_size = expected_main_amd64_di_content.length diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb index 9fcdd296ebe..094c91f2ab5 100644 --- a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb +++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb @@ -114,7 +114,8 @@ RSpec.shared_examples_for 'services security ci configuration create service' do it 'fails with error' do expect(project).to receive(:ci_config_for).and_return(unsupported_yaml) - expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, '.gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually.') + expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, Gitlab::Utils::ErrorMessage.to_user_facing( + _(".gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually."))) end end diff --git a/spec/support/shared_examples/services/service_response_shared_examples.rb b/spec/support/shared_examples/services/service_response_shared_examples.rb new file mode 100644 index 00000000000..e55f16a2994 --- /dev/null +++ b/spec/support/shared_examples/services/service_response_shared_examples.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +RSpec.shared_examples 'returning an error service response' do |message: nil| + it 'returns an error service response' do + result = subject + + expect(result).to be_error + + expect(result.message).to eq(message) if message + end +end + +RSpec.shared_examples 'returning a success service response' do |message: nil| + it 'returns a success service response' do + result = subject + + expect(result).to be_success + + expect(result.message).to eq(message) if message + end +end diff --git a/spec/support/shared_examples/services/work_items/widgets/milestone_service_shared_examples.rb b/spec/support/shared_examples/services/work_items/widgets/milestone_service_shared_examples.rb deleted file mode 100644 index ac064ed4c33..00000000000 --- a/spec/support/shared_examples/services/work_items/widgets/milestone_service_shared_examples.rb +++ /dev/null @@ -1,42 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples "setting work item's milestone" do - context "when 'milestone' param does not exist" do - let(:params) { {} } - - it "does not set the work item's milestone" do - expect { execute_callback }.to not_change(work_item, :milestone) - end - end - - context "when 'milestone' is not in the work item's project's hierarchy" do - let(:another_group_milestone) { create(:milestone, group: create(:group)) } - let(:params) { { milestone_id: another_group_milestone.id } } - - it "does not set the work item's milestone" do - expect { execute_callback }.to not_change(work_item, :milestone) - end - end - - context 'when assigning a group milestone' do - let(:params) { { milestone_id: group_milestone.id } } - - it "sets the work item's milestone" do - expect { execute_callback } - .to change { work_item.milestone } - .from(nil) - .to(group_milestone) - end - end - - context 'when assigning a project milestone' do - let(:params) { { milestone_id: project_milestone.id } } - - it "sets the work item's milestone" do - expect { execute_callback } - .to change { work_item.milestone } - .from(nil) - .to(project_milestone) - end - end -end diff --git a/spec/support/shared_examples/work_items/export_and_import_shared_examples.rb b/spec/support/shared_examples/work_items/export_and_import_shared_examples.rb new file mode 100644 index 00000000000..bbbfacfdf53 --- /dev/null +++ b/spec/support/shared_examples/work_items/export_and_import_shared_examples.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +RSpec.shared_examples_for 'a exported file that can be imported' do + before do + origin_project.add_reporter(user) + target_project.add_reporter(user) + end + + def export_work_items_for(project) + origin_work_items = WorkItem.where(project: origin_project) + export = described_class.new(origin_work_items, project) + export.email(user) + attachment = ActionMailer::Base.deliveries.last.attachments.first + file = Tempfile.new('temp_work_item_export.csv') + file.write(attachment.read) + + file + end + + def import_file_for(project, file) + uploader = FileUploader.new(project) + uploader.store!(file) + import_service = WorkItems::ImportCsvService.new(user, target_project, uploader) + + import_service.execute + end + + it 'imports work item with correct attributes', :aggregate_failures do + csv_file = export_work_items_for(origin_project) + + imported_work_items = ::WorkItems::WorkItemsFinder.new(user, project: target_project).execute + expect { import_file_for(target_project, csv_file) }.to change { imported_work_items.count }.by 1 + imported_work_item = imported_work_items.first + expect(imported_work_item.author).to eq(user) + expected_matching_fields.each do |field| + expect(imported_work_item.public_send(field)).to eq(work_item.public_send(field)) + end + end +end diff --git a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb index e224b71da91..095c32c3136 100644 --- a/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb +++ b/spec/support/shared_examples/workers/batched_background_migration_execution_worker_shared_example.rb @@ -50,14 +50,20 @@ RSpec.shared_examples 'batched background migrations execution worker' do end describe '.max_running_jobs' do - it 'returns MAX_RUNNING_MIGRATIONS' do - expect(described_class.max_running_jobs).to eq(described_class::MAX_RUNNING_MIGRATIONS) + it 'returns database_max_running_batched_background_migrations application setting' do + stub_application_setting(database_max_running_batched_background_migrations: 3) + + expect(described_class.max_running_jobs) + .to eq(Gitlab::CurrentSettings.database_max_running_batched_background_migrations) end end describe '#max_running_jobs' do - it 'returns MAX_RUNNING_MIGRATIONS' do - expect(described_class.new.max_running_jobs).to eq(described_class::MAX_RUNNING_MIGRATIONS) + it 'returns database_max_running_batched_background_migrations application setting' do + stub_application_setting(database_max_running_batched_background_migrations: 3) + + expect(described_class.new.max_running_jobs) + .to eq(Gitlab::CurrentSettings.database_max_running_batched_background_migrations) end end diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb index 8ec955940c0..06877aee565 100644 --- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb +++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb @@ -88,9 +88,9 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d end end - context 'when the feature flag is disabled' do + context 'when the tracking database is shared' do before do - stub_feature_flags(execute_batched_migrations_on_schedule: false) + skip_if_database_exists(tracking_database) end it 'does nothing' do @@ -101,22 +101,17 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d end end - context 'when the feature flag is enabled' do - let(:base_model) { Gitlab::Database.database_base_models[tracking_database] } - + context 'when the tracking database is not shared' do before do - stub_feature_flags(execute_batched_migrations_on_schedule: true) - - allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration) - .with(connection: base_model.connection) - .and_return(nil) + skip_if_shared_database(tracking_database) end - context 'when database config is shared' do - it 'does nothing' do - expect(Gitlab::Database).to receive(:db_config_share_with) - .with(base_model.connection_db_config).and_return('main') + context 'when the feature flag is disabled' do + before do + stub_feature_flags(execute_batched_migrations_on_schedule: false) + end + it 'does nothing' do expect(worker).not_to receive(:active_migration) expect(worker).not_to receive(:run_active_migration) @@ -124,123 +119,146 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d end end - context 'when no active migrations exist' do - context 'when parallel execution is disabled' do - before do - stub_feature_flags(batched_migrations_parallel_execution: false) - end + context 'when the feature flag is enabled' do + let(:base_model) { Gitlab::Database.database_base_models[tracking_database] } + let(:connection) { base_model.connection } + + before do + stub_feature_flags(execute_batched_migrations_on_schedule: true) + allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration) + .with(connection: connection) + .and_return(nil) + end + + context 'when database config is shared' do it 'does nothing' do + expect(Gitlab::Database).to receive(:db_config_share_with) + .with(base_model.connection_db_config).and_return('main') + + expect(worker).not_to receive(:active_migration) expect(worker).not_to receive(:run_active_migration) worker.perform end end - context 'when parallel execution is enabled' do - before do - stub_feature_flags(batched_migrations_parallel_execution: true) - end + context 'when no active migrations exist' do + context 'when parallel execution is disabled' do + before do + stub_feature_flags(batched_migrations_parallel_execution: false) + end - it 'does nothing' do - expect(worker).not_to receive(:queue_migrations_for_execution) + it 'does nothing' do + expect(worker).not_to receive(:run_active_migration) - worker.perform + worker.perform + end end - end - end - context 'when active migrations exist' do - let(:job_interval) { 5.minutes } - let(:lease_timeout) { 15.minutes } - let(:lease_key) { described_class.name.demodulize.underscore } - let(:migration_id) { 123 } - let(:migration) do - build( - :batched_background_migration, :active, - id: migration_id, interval: job_interval, table_name: table_name - ) - end + context 'when parallel execution is enabled' do + before do + stub_feature_flags(batched_migrations_parallel_execution: true) + end - let(:execution_worker_class) do - case tracking_database - when :main - Database::BatchedBackgroundMigration::MainExecutionWorker - when :ci - Database::BatchedBackgroundMigration::CiExecutionWorker + it 'does nothing' do + expect(worker).not_to receive(:queue_migrations_for_execution) + + worker.perform + end end end - before do - allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration) - .with(connection: base_model.connection) - .and_return(migration) - end + context 'when active migrations exist' do + let(:job_interval) { 5.minutes } + let(:lease_timeout) { 15.minutes } + let(:lease_key) { described_class.name.demodulize.underscore } + let(:migration_id) { 123 } + let(:migration) do + build( + :batched_background_migration, :active, + id: migration_id, interval: job_interval, table_name: table_name + ) + end + + let(:execution_worker_class) do + case tracking_database + when :main + Database::BatchedBackgroundMigration::MainExecutionWorker + when :ci + Database::BatchedBackgroundMigration::CiExecutionWorker + end + end - context 'when parallel execution is disabled' do before do - stub_feature_flags(batched_migrations_parallel_execution: false) + allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration) + .with(connection: connection) + .and_return(migration) end - let(:execution_worker) { instance_double(execution_worker_class) } + context 'when parallel execution is disabled' do + before do + stub_feature_flags(batched_migrations_parallel_execution: false) + end - context 'when the calculated timeout is less than the minimum allowed' do - let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT } - let(:job_interval) { 2.minutes } + let(:execution_worker) { instance_double(execution_worker_class) } - it 'sets the lease timeout to the minimum value' do - expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout) + context 'when the calculated timeout is less than the minimum allowed' do + let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT } + let(:job_interval) { 2.minutes } - expect(execution_worker_class).to receive(:new).and_return(execution_worker) - expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id) + it 'sets the lease timeout to the minimum value' do + expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout) - expect(worker).to receive(:run_active_migration).and_call_original + expect(execution_worker_class).to receive(:new).and_return(execution_worker) + expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id) - worker.perform - end - end + expect(worker).to receive(:run_active_migration).and_call_original - it 'always cleans up the exclusive lease' do - lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout) + worker.perform + end + end - expect(lease).to receive(:try_obtain).and_return(true) + it 'always cleans up the exclusive lease' do + lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout) - expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke') - expect(lease).to receive(:cancel) + expect(lease).to receive(:try_obtain).and_return(true) - expect { worker.perform }.to raise_error(RuntimeError, 'I broke') - end + expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke') + expect(lease).to receive(:cancel) - it 'delegetes the execution to ExecutionWorker' do - base_model = Gitlab::Database.database_base_models[tracking_database] + expect { worker.perform }.to raise_error(RuntimeError, 'I broke') + end - expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(base_model.connection).and_yield - expect(execution_worker_class).to receive(:new).and_return(execution_worker) - expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id) + it 'delegetes the execution to ExecutionWorker' do + expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection).and_yield + expect(execution_worker_class).to receive(:new).and_return(execution_worker) + expect(execution_worker).to receive(:perform_work).with(tracking_database, migration_id) - worker.perform + worker.perform + end end - end - context 'when parallel execution is enabled' do - before do - stub_feature_flags(batched_migrations_parallel_execution: true) - end + context 'when parallel execution is enabled' do + before do + stub_feature_flags(batched_migrations_parallel_execution: true) + end - it 'delegetes the execution to ExecutionWorker' do - expect(Gitlab::Database::BackgroundMigration::BatchedMigration) - .to receive(:active_migrations_distinct_on_table).with( - connection: base_model.connection, - limit: execution_worker_class.max_running_jobs - ).and_return([migration]) + it 'delegetes the execution to ExecutionWorker' do + expect(Gitlab::Database::BackgroundMigration::BatchedMigration) + .to receive(:active_migrations_distinct_on_table).with( + connection: base_model.connection, + limit: execution_worker_class.max_running_jobs + ).and_return([migration]) - expected_arguments = [ - [tracking_database.to_s, migration_id] - ] + expected_arguments = [ + [tracking_database.to_s, migration_id] + ] - expect(execution_worker_class).to receive(:perform_with_capacity).with(expected_arguments) + expect(execution_worker_class).to receive(:perform_with_capacity).with(expected_arguments) - worker.perform + worker.perform + end end end end @@ -248,7 +266,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d end end - describe 'executing an entire migration', :freeze_time, if: Gitlab::Database.has_config?(tracking_database) do + describe 'executing an entire migration', :freeze_time, if: Gitlab::Database.has_database?(tracking_database) do include Gitlab::Database::DynamicModelHelpers include Database::DatabaseHelpers diff --git a/spec/support/stub_member_access_level.rb b/spec/support/stub_member_access_level.rb new file mode 100644 index 00000000000..62e932ee1fc --- /dev/null +++ b/spec/support/stub_member_access_level.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module StubMemberAccessLevel + # Stubs access level of a member of +object+. + # + # The following types are supported: + # * `Project` - stubs `project.team.max_member_access(user.id)` + # * `Group` - stubs `group.max_member_access_for_user(user)` + # + # @example + # + # stub_member_access_level(project, maintainer: user) + # project.team.max_member_access(user.id) # => Gitlab::Access::MAINTAINER + # + # stub_member_access_level(group, developer: user) + # group.max_member_access_for_user(user) # => Gitlab::Access::DEVELOPER + # + # stub_member_access_level(project, reporter: user, guest: [guest1, guest2]) + # project.team.max_member_access(user.id) # => Gitlab::Access::REPORTER + # project.team.max_member_access(guests.first.id) # => Gitlab::Access::GUEST + # project.team.max_member_access(guests.last.id) # => Gitlab::Access::GUEST + # + # @param object [Project, Group] Object to be stubbed. + # @param access_levels [Hash, Hash] Map of access level to users + def stub_member_access_level(object, **access_levels) + expectation = case object + when Project + ->(user) { expect(object.team).to receive(:max_member_access).with(user.id) } + when Group + ->(user) { expect(object).to receive(:max_member_access_for_user).with(user) } + else + raise ArgumentError, + "Stubbing member access level unsupported for #{object.inspect} (#{object.class})" + end + + access_levels.each do |access_level, users| + access_level = Gitlab::Access.sym_options_with_owner.fetch(access_level) do + raise ArgumentError, "Invalid access level #{access_level.inspect}" + end + + Array(users).each do |user| + expectation.call(user).at_least(1).times.and_return(access_level) + end + end + end +end diff --git a/spec/support/test_reports/test_reports_helper.rb b/spec/support/test_reports/test_reports_helper.rb deleted file mode 100644 index 85483062958..00000000000 --- a/spec/support/test_reports/test_reports_helper.rb +++ /dev/null @@ -1,103 +0,0 @@ -# frozen_string_literal: true - -module TestReportsHelper - def create_test_case_rspec_success(name = 'test_spec') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'rspec', - name: 'Test#sum when a is 1 and b is 3 returns summary', - classname: "spec.#{name}", - file: './spec/test_spec.rb', - execution_time: 1.11, - status: Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS) - end - - def create_test_case_rspec_failed(name = 'test_spec', execution_time = 2.22) - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'rspec', - name: 'Test#sum when a is 1 and b is 3 returns summary', - classname: "spec.#{name}", - file: './spec/test_spec.rb', - execution_time: execution_time, - system_output: sample_rspec_failed_message, - status: Gitlab::Ci::Reports::TestCase::STATUS_FAILED) - end - - def create_test_case_rspec_skipped(name = 'test_spec') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'rspec', - name: 'Test#sum when a is 3 and b is 3 returns summary', - classname: "spec.#{name}", - file: './spec/test_spec.rb', - execution_time: 3.33, - status: Gitlab::Ci::Reports::TestCase::STATUS_SKIPPED) - end - - def create_test_case_rspec_error(name = 'test_spec') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'rspec', - name: 'Test#sum when a is 4 and b is 4 returns summary', - classname: "spec.#{name}", - file: './spec/test_spec.rb', - execution_time: 4.44, - status: Gitlab::Ci::Reports::TestCase::STATUS_ERROR) - end - - def sample_rspec_failed_message - <<-EOF.strip_heredoc - Failure/Error: is_expected.to eq(3) - - expected: 3 - got: -1 - - (compared using ==) - ./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>' - EOF - end - - def create_test_case_java_success(name = 'addTest') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'java', - name: name, - classname: 'CalculatorTest', - execution_time: 5.55, - status: Gitlab::Ci::Reports::TestCase::STATUS_SUCCESS) - end - - def create_test_case_java_failed(name = 'addTest') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'java', - name: name, - classname: 'CalculatorTest', - execution_time: 6.66, - system_output: sample_java_failed_message, - status: Gitlab::Ci::Reports::TestCase::STATUS_FAILED) - end - - def create_test_case_java_skipped(name = 'addTest') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'java', - name: name, - classname: 'CalculatorTest', - execution_time: 7.77, - status: Gitlab::Ci::Reports::TestCase::STATUS_SKIPPED) - end - - def create_test_case_java_error(name = 'addTest') - Gitlab::Ci::Reports::TestCase.new( - suite_name: 'java', - name: name, - classname: 'CalculatorTest', - execution_time: 8.88, - status: Gitlab::Ci::Reports::TestCase::STATUS_ERROR) - end - - def sample_java_failed_message - <<-EOF.strip_heredoc - junit.framework.AssertionFailedError: expected:<1> but was:<3> - at CalculatorTest.subtractExpression(Unknown Source) - at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke0(Native Method) - at java.base/jdk.internal.database.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) - at java.base/jdk.internal.database.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - EOF - end -end diff --git a/spec/support/tmpdir.rb b/spec/support/tmpdir.rb index ea8e26d2878..92126ec1522 100644 --- a/spec/support/tmpdir.rb +++ b/spec/support/tmpdir.rb @@ -1,5 +1,7 @@ # frozen_string_literal: true +require 'tmpdir' + module TmpdirHelper def mktmpdir @tmpdir_helper_dirs ||= [] diff --git a/spec/support/trace/trace_helpers.rb b/spec/support/trace/trace_helpers.rb deleted file mode 100644 index 9255715ff71..00000000000 --- a/spec/support/trace/trace_helpers.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -module TraceHelpers - def create_legacy_trace(build, content) - File.open(legacy_trace_path(build), 'wb') { |stream| stream.write(content) } - end - - def create_legacy_trace_in_db(build, content) - build.update_column(:trace, content) - end - - def legacy_trace_path(build) - legacy_trace_dir = File.join(Settings.gitlab_ci.builds_path, - build.created_at.utc.strftime("%Y_%m"), - build.project_id.to_s) - - FileUtils.mkdir_p(legacy_trace_dir) - - File.join(legacy_trace_dir, "#{build.id}.log") - end - - def archived_trace_path(job_artifact) - disk_hash = Digest::SHA2.hexdigest(job_artifact.project_id.to_s) - creation_date = job_artifact.created_at.utc.strftime('%Y_%m_%d') - - File.join(Gitlab.config.artifacts.path, disk_hash[0..1], disk_hash[2..3], disk_hash, - creation_date, job_artifact.job_id.to_s, job_artifact.id.to_s, 'job.log') - end -end diff --git a/spec/support_specs/helpers/migrations_helpers_spec.rb b/spec/support_specs/helpers/migrations_helpers_spec.rb index 5d44dac8eb7..2af16151350 100644 --- a/spec/support_specs/helpers/migrations_helpers_spec.rb +++ b/spec/support_specs/helpers/migrations_helpers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe MigrationsHelpers do +RSpec.describe MigrationsHelpers, feature_category: :database do let(:helper_class) do Class.new.tap do |klass| klass.include described_class @@ -71,4 +71,40 @@ RSpec.describe MigrationsHelpers do end end end + + describe '#reset_column_information' do + context 'with a regular ActiveRecord model class' do + let(:klass) { Project } + + it 'calls reset_column_information' do + expect(klass).to receive(:reset_column_information) + + helper.reset_column_information(klass) + end + end + + context 'with an anonymous class with table name defined' do + let(:klass) do + Class.new(ActiveRecord::Base) do + self.table_name = :projects + end + end + + it 'calls reset_column_information' do + expect(klass).to receive(:reset_column_information) + + helper.reset_column_information(klass) + end + end + + context 'with an anonymous class with no table name defined' do + let(:klass) { Class.new(ActiveRecord::Base) } + + it 'does not call reset_column_information' do + expect(klass).not_to receive(:reset_column_information) + + helper.reset_column_information(klass) + end + end + end end diff --git a/spec/support_specs/matchers/event_store_spec.rb b/spec/support_specs/matchers/event_store_spec.rb index 3614d05fde8..bd77f7124c1 100644 --- a/spec/support_specs/matchers/event_store_spec.rb +++ b/spec/support_specs/matchers/event_store_spec.rb @@ -5,7 +5,7 @@ require 'json_schemer' load File.expand_path('../../../spec/support/matchers/event_store.rb', __dir__) -RSpec.describe 'event store matchers', :aggregate_errors do +RSpec.describe 'event store matchers', feature_category: :shared do let(:event_type1) do Class.new(Gitlab::EventStore::Event) do def schema diff --git a/spec/support_specs/stub_member_access_level_spec.rb b/spec/support_specs/stub_member_access_level_spec.rb new file mode 100644 index 00000000000..c76bd2ee417 --- /dev/null +++ b/spec/support_specs/stub_member_access_level_spec.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +require 'spec_helper' + +require_relative '../support/stub_member_access_level' + +RSpec.describe StubMemberAccessLevel, feature_category: :system_access do + include described_class + + describe 'stub_member_access_level' do + shared_examples 'access level stubs' do + let(:guests) { build_stubbed_list(:user, 2) } + let(:maintainer) { build_stubbed(:user) } + let(:no_access) { build_stubbed(:user) } + + it 'stubs max member access level per user' do + stub_member_access_level(object, maintainer: maintainer, guest: guests) + + # Ensure that multple calls are allowed + 2.times do + expect(access_level_for(maintainer)).to eq(Gitlab::Access::MAINTAINER) + expect(access_level_for(guests.first)).to eq(Gitlab::Access::GUEST) + expect(access_level_for(guests.last)).to eq(Gitlab::Access::GUEST) + + # Partially stub so we expect a mock error. + expect { access_level_for(no_access) }.to raise_error(RSpec::Mocks::MockExpectationError) + end + end + + it 'fails for unstubbed access' do + expect(access_level_for(no_access)).to eq(Gitlab::Access::NO_ACCESS) + end + + it 'fails for invalid access level' do + expect { stub_member_access_level(object, unknown: :anything) } + .to raise_error(ArgumentError, "Invalid access level :unknown") + end + end + + context 'with project' do + let(:object) { build_stubbed(:project) } + + it_behaves_like 'access level stubs' do + def access_level_for(user) + object.team.max_member_access(user.id) + end + end + end + + context 'with group' do + let(:object) { build_stubbed(:group) } + + it_behaves_like 'access level stubs' do + def access_level_for(user) + object.max_member_access_for_user(user) + end + end + end + + context 'with unsupported object' do + let(:object) { :a_symbol } + + it 'raises an error' do + expect { stub_member_access_level(object) } + .to raise_error(ArgumentError, "Stubbing member access level unsupported for :a_symbol (Symbol)") + end + end + end +end diff --git a/spec/tasks/dev_rake_spec.rb b/spec/tasks/dev_rake_spec.rb index ef047b383a6..82c9bb4faa2 100644 --- a/spec/tasks/dev_rake_spec.rb +++ b/spec/tasks/dev_rake_spec.rb @@ -121,7 +121,7 @@ RSpec.describe 'dev rake tasks' do context 'when a database is not found' do before do - skip_if_multiple_databases_not_setup + skip_if_shared_database(:ci) end it 'continues to next connection' do @@ -135,7 +135,7 @@ RSpec.describe 'dev rake tasks' do context 'multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end context 'with a valid database' do diff --git a/spec/tasks/gettext_rake_spec.rb b/spec/tasks/gettext_rake_spec.rb index 29caa363f7b..c44c1734432 100644 --- a/spec/tasks/gettext_rake_spec.rb +++ b/spec/tasks/gettext_rake_spec.rb @@ -1,8 +1,10 @@ # frozen_string_literal: true require 'rake_helper' +require_relative '../../tooling/lib/tooling/gettext_extractor' +require_relative '../support/matchers/abort_matcher' -RSpec.describe 'gettext', :silence_stdout do +RSpec.describe 'gettext', :silence_stdout, feature_category: :internationalization do let(:locale_path) { Rails.root.join('tmp/gettext_spec') } let(:pot_file_path) { File.join(locale_path, 'gitlab.pot') } @@ -21,28 +23,43 @@ RSpec.describe 'gettext', :silence_stdout do end describe ':compile' do - before do - allow(Rake::Task).to receive(:[]).and_call_original + let(:compile_command) do + [ + "node", "./scripts/frontend/po_to_json.js", + "--locale-root", Rails.root.join('locale').to_s, + "--output-dir", Rails.root.join('app/assets/javascripts/locale').to_s + ] end - it 'creates a pot file and invokes the \'gettext:po_to_json\' task' do - expect(Rake::Task).to receive(:[]).with('gettext:po_to_json').and_return(double(invoke: true)) + it 'creates a pot file and runs po-to-json conversion via node script' do + expect(Kernel).to receive(:system).with(*compile_command).and_return(true) expect { run_rake_task('gettext:compile') } .to change { File.exist?(pot_file_path) } .to be_truthy end + + it 'aborts with non-successful po-to-json conversion via node script' do + expect(Kernel).to receive(:system).with(*compile_command).and_return(false) + + expect { run_rake_task('gettext:compile') }.to abort_execution + end end describe ':regenerate' do + let(:locale_nz_path) { File.join(locale_path, 'en_NZ') } + let(:po_file_path) { File.join(locale_nz_path, 'gitlab.po') } + let(:extractor) { instance_double(Tooling::GettextExtractor, generate_pot: '') } + before do + FileUtils.mkdir(locale_nz_path) + File.write(po_file_path, fixture_file('valid.po')) + # this task takes a *really* long time to complete, so stub it for the spec - allow(Rake::Task['gettext:find']).to receive(:invoke) { invoke_find.call } + allow(Tooling::GettextExtractor).to receive(:new).and_return(extractor) end context 'when the locale folder is not found' do - let(:invoke_find) { -> { true } } - before do FileUtils.rm_r(locale_path) if Dir.exist?(locale_path) end @@ -53,67 +70,14 @@ RSpec.describe 'gettext', :silence_stdout do end end - context 'where there are existing /**/gitlab.po files' do - let(:locale_nz_path) { File.join(locale_path, 'en_NZ') } - let(:po_file_path) { File.join(locale_nz_path, 'gitlab.po') } - - let(:invoke_find) { -> { File.write pot_file_path, 'pot file test updates' } } - - before do - FileUtils.mkdir(locale_nz_path) - File.write(po_file_path, fixture_file('valid.po')) - end - - it 'does not remove that locale' do - expect { run_rake_task('gettext:regenerate') } - .not_to change { Dir.exist?(locale_nz_path) } - end - end - - context 'when there are locale folders without a gitlab.po file' do - let(:empty_locale_path) { File.join(locale_path, 'en_NZ') } - - let(:invoke_find) { -> { File.write pot_file_path, 'pot file test updates' } } - - before do - FileUtils.mkdir(empty_locale_path) - end - - it 'removes those folders' do - expect { run_rake_task('gettext:regenerate') } - .to change { Dir.exist?(empty_locale_path) } - .to eq false - end - end - context 'when the gitlab.pot file cannot be generated' do - let(:invoke_find) { -> { true } } - it 'prints an error' do + allow(File).to receive(:exist?).and_return(false) + expect { run_rake_task('gettext:regenerate') } .to raise_error(/gitlab.pot file not generated/) end end - - context 'when gettext:find changes the revision dates' do - let(:invoke_find) { -> { File.write pot_file_path, fixture_file('valid.po') } } - - before do - File.write pot_file_path, fixture_file('valid.po') - end - - it 'resets the changes' do - pot_file = File.read(pot_file_path) - expect(pot_file).to include('PO-Revision-Date: 2017-07-13 12:10-0500') - expect(pot_file).to include('PO-Creation-Date: 2016-07-13 12:11-0500') - - run_rake_task('gettext:regenerate') - - pot_file = File.read(pot_file_path) - expect(pot_file).not_to include('PO-Revision-Date: 2017-07-13 12:10-0500') - expect(pot_file).not_to include('PO-Creation-Date: 2016-07-13 12:11-0500') - end - end end describe ':lint' do diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb index 876b56d1208..04be713e0d4 100644 --- a/spec/tasks/gitlab/background_migrations_rake_spec.rb +++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb @@ -2,7 +2,8 @@ require 'rake_helper' -RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gitlab_schemas_validate_connection do +RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gitlab_schemas_validate_connection, + feature_category: :database do before do Rake.application.rake_require 'tasks/gitlab/background_migrations' end @@ -62,7 +63,7 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi let(:databases) { [Gitlab::Database::MAIN_DATABASE_NAME, ci_database_name] } before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models) end @@ -114,12 +115,6 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi let(:connection) { double(:connection) } let(:base_models) { { 'main' => model }.with_indifferent_access } - around do |example| - Gitlab::Database::SharedModel.using_connection(model.connection) do - example.run - end - end - it 'outputs the status of background migrations' do allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models) @@ -130,15 +125,33 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi OUTPUT end - context 'when multiple database feature is enabled' do + context 'when running the rake task against one database in multiple databases setup' do before do - skip_if_multiple_databases_not_setup + skip_if_shared_database(:ci) end - context 'with a single database' do - subject(:status_task) { run_rake_task("gitlab:background_migrations:status:#{main_database_name}") } + subject(:status_task) { run_rake_task("gitlab:background_migrations:status:#{main_database_name}") } - it 'outputs the status of background migrations' do + it 'outputs the status of background migrations' do + expect { status_task }.to output(<<~OUTPUT).to_stdout + Database: #{main_database_name} + finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]] + failed | #{migration2.job_class_name},#{migration2.table_name},#{migration2.column_name},[] + OUTPUT + end + end + + context 'when multiple databases are configured' do + before do + skip_if_multiple_databases_not_setup(:ci) + end + + context 'with two connections sharing the same database' do + before do + skip_if_database_exists(:ci) + end + + it 'skips the shared database' do expect { status_task }.to output(<<~OUTPUT).to_stdout Database: #{main_database_name} finished | #{migration1.job_class_name},#{migration1.table_name},#{migration1.column_name},[["id1","id2"]] @@ -153,6 +166,10 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi end context 'with multiple databases' do + before do + skip_if_shared_database(:ci) + end + subject(:status_task) { run_rake_task('gitlab:background_migrations:status') } let(:base_models) { { main: main_model, ci: ci_model } } @@ -161,6 +178,8 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gi it 'outputs the status for each database' do allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models) + allow(Gitlab::Database).to receive(:has_database?).with(:main).and_return(true) + allow(Gitlab::Database).to receive(:has_database?).with(:ci).and_return(true) expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(main_model.connection).and_yield expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_each).and_yield(migration1) diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb index c0196c09e3c..8422e781fd2 100644 --- a/spec/tasks/gitlab/backup_rake_spec.rb +++ b/spec/tasks/gitlab/backup_rake_spec.rb @@ -107,7 +107,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: with_them do before do allow(Kernel).to receive(:system).and_return(true) - allow(YAML).to receive(:load_file).and_return({ gitlab_version: Gitlab::VERSION }) + allow(YAML).to receive(:safe_load_file).and_return({ gitlab_version: Gitlab::VERSION }) allow(File).to receive(:delete).with(backup_restore_pid_path).and_return(1) allow(File).to receive(:open).and_call_original allow(File).to receive(:open).with(backup_restore_pid_path, any_args).and_yield(pid_file) @@ -158,7 +158,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: context 'when restore matches gitlab version' do before do - allow(YAML).to receive(:load_file) + allow(YAML).to receive(:safe_load_file) .and_return({ gitlab_version: gitlab_version }) expect_next_instance_of(::Backup::Manager) do |instance| backup_types.each do |subtask| @@ -212,7 +212,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: allow(Kernel).to receive(:system).and_return(true) allow(FileUtils).to receive(:cp_r).and_return(true) allow(FileUtils).to receive(:mv).and_return(true) - allow(YAML).to receive(:load_file) + allow(YAML).to receive(:safe_load_file) .and_return({ gitlab_version: Gitlab::VERSION }) expect_next_instance_of(::Backup::Manager) do |instance| diff --git a/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb b/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb index 55a50035222..78f86049ebb 100644 --- a/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb +++ b/spec/tasks/gitlab/db/decomposition/connection_status_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :pods do +RSpec.describe 'gitlab:db:decomposition:connection_status', feature_category: :cell do let(:max_connections) { 500 } let(:current_connections) { 300 } diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb index 0682a4b39cf..4533ec28e43 100644 --- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb +++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb @@ -3,7 +3,7 @@ require 'rake_helper' RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout, - :suppress_gitlab_schemas_validate_connection, feature_category: :pods do + :suppress_gitlab_schemas_validate_connection, feature_category: :cell do before :all do Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences' @@ -86,7 +86,7 @@ RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_st context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup(:ci) + skip_if_shared_database(:ci) end it 'does not change ci sequences on the ci database' do diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb index 9d54241aa7f..90612bcf9f7 100644 --- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb +++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, feature_category: :pods do +RSpec.describe 'gitlab:db:lock_writes', :reestablished_active_record_base, feature_category: :cell do before :all do Rake.application.rake_require 'active_record/railties/databases' Rake.application.rake_require 'tasks/seed_fu' diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb index 6e245b6f227..301da891244 100644 --- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb +++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb @@ -3,7 +3,7 @@ require 'rake_helper' RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablished_active_record_base, - :suppress_gitlab_schemas_validate_connection, feature_category: :pods do + :suppress_gitlab_schemas_validate_connection, feature_category: :cell do let(:main_connection) { ApplicationRecord.connection } let(:ci_connection) { Ci::ApplicationRecord.connection } let(:test_gitlab_main_table) { '_test_gitlab_main_table' } @@ -20,19 +20,16 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish end before do - skip_if_multiple_databases_not_setup(:ci) - - # Filling the table on both databases main and ci - Gitlab::Database.database_base_models.each_value do |base_model| - base_model.connection.execute(<<~SQL) - CREATE TABLE #{test_gitlab_main_table} (id integer NOT NULL); - INSERT INTO #{test_gitlab_main_table} VALUES(generate_series(1, 50)); - SQL - base_model.connection.execute(<<~SQL) - CREATE TABLE #{test_gitlab_ci_table} (id integer NOT NULL); - INSERT INTO #{test_gitlab_ci_table} VALUES(generate_series(1, 50)); - SQL - end + skip_if_shared_database(:ci) + + execute_on_each_database(<<~SQL) + CREATE TABLE #{test_gitlab_main_table} (id integer NOT NULL); + INSERT INTO #{test_gitlab_main_table} VALUES(generate_series(1, 50)); + SQL + execute_on_each_database(<<~SQL) + CREATE TABLE #{test_gitlab_ci_table} (id integer NOT NULL); + INSERT INTO #{test_gitlab_ci_table} VALUES(generate_series(1, 50)); + SQL allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return( { diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb index cc90345c7e0..94808232d7e 100644 --- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb +++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_schemas_validate_connection, feature_category: :pods do +RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_schemas_validate_connection, feature_category: :cell do # We don't need to delete this data since it only modifies `ar_internal_metadata` # which would not be cleaned either by `DbCleaner` self.use_transactional_tests = false diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb index 0f13840ae01..45d0c050949 100644 --- a/spec/tasks/gitlab/db_rake_spec.rb +++ b/spec/tasks/gitlab/db_rake_spec.rb @@ -25,7 +25,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor let(:main_model) { ApplicationRecord } before do - skip_if_multiple_databases_are_setup + skip_if_database_exists(:ci) end it 'marks the migration complete on the given database' do @@ -43,7 +43,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor let(:base_models) { { 'main' => main_model, 'ci' => ci_model } } before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models) end @@ -130,7 +130,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor let(:main_config) { double(:config, name: 'main') } before do - skip_if_multiple_databases_are_setup + skip_if_database_exists(:ci) end context 'when geo is not configured' do @@ -259,7 +259,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor let(:ci_config) { double(:config, name: 'ci') } before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models) end @@ -352,6 +352,40 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor end end + describe 'schema inconsistencies' do + let(:expected_value) do + [ + { inconsistency_type: 'wrong_indexes', object_name: 'index_1' }, + { inconsistency_type: 'missing_indexes', object_name: 'index_2' } + ] + end + + let(:runner) { instance_double(Gitlab::Database::SchemaValidation::Runner, execute: inconsistencies) } + let(:inconsistency_class) { Gitlab::Database::SchemaValidation::Inconsistency } + + let(:inconsistencies) do + [ + instance_double(inconsistency_class, inspect: 'index_statement_1'), + instance_double(inconsistency_class, inspect: 'index_statement_2') + ] + end + + let(:rake_output) do + <<~MSG + index_statement_1 + index_statement_2 + MSG + end + + before do + allow(Gitlab::Database::SchemaValidation::Runner).to receive(:new).and_return(runner) + end + + it 'prints the inconsistency message' do + expect { run_rake_task('gitlab:db:schema_checker:run') }.to output(rake_output).to_stdout + end + end + describe 'dictionary generate' do let(:db_config) { instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: 'fake_db') } @@ -581,7 +615,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor let(:base_models) { { 'main' => main_model, 'ci' => ci_model } } before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) allow(Gitlab::Database).to receive(:database_base_models_with_gitlab_shared).and_return(base_models) @@ -653,7 +687,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor context 'with multiple databases' do before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) end context 'when running the multi-database variant' do @@ -688,7 +722,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor describe 'reindex' do context 'with a single database' do before do - skip_if_multiple_databases_are_setup + skip_if_shared_database(:ci) end it 'delegates to Gitlab::Database::Reindexing' do @@ -724,7 +758,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor context 'when the single database task is used' do before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) end it 'delegates to Gitlab::Database::Reindexing with a specific database' do @@ -776,7 +810,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor describe 'execute_async_index_operations' do before do - skip_if_multiple_databases_not_setup + skip_if_shared_database(:ci) end it 'delegates ci task to Gitlab::Database::AsyncIndexes' do @@ -850,7 +884,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor describe 'validate_async_constraints' do before do - skip_if_multiple_databases_not_setup + skip_if_shared_database(:ci) end it 'delegates ci task to Gitlab::Database::AsyncConstraints' do @@ -1089,7 +1123,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor context 'with multiple databases', :reestablished_active_record_base do before do - skip_unless_ci_uses_database_tasks + skip_if_shared_database(:ci) end describe 'db:schema:dump against a single database' do @@ -1171,14 +1205,6 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout, feature_categor run_rake_task(test_task_name) end - def skip_unless_ci_uses_database_tasks - skip "Skipping because database tasks won't run against the ci database" unless ci_database_tasks? - end - - def ci_database_tasks? - !!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'ci')&.database_tasks? - end - def skip_unless_geo_configured skip 'Skipping because the geo database is not configured' unless geo_configured? end diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb index d2f4fa0b8ef..a161f33373d 100644 --- a/spec/tasks/gitlab/gitaly_rake_spec.rb +++ b/spec/tasks/gitlab/gitaly_rake_spec.rb @@ -66,7 +66,7 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do .with(%w[which gmake]) .and_return(['/usr/bin/gmake', 0]) expect(Gitlab::Popen).to receive(:popen) - .with(%w[gmake clean-build all], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil }) + .with(%w[gmake clean all]) .and_return(['ok', 0]) subject @@ -78,7 +78,7 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do .with(%w[which gmake]) .and_return(['/usr/bin/gmake', 0]) expect(Gitlab::Popen).to receive(:popen) - .with(%w[gmake clean-build all], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil }) + .with(%w[gmake clean all]) .and_return(['output', 1]) expect { subject }.to raise_error /Gitaly failed to compile: output/ @@ -95,27 +95,11 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do it 'calls make in the gitaly directory' do expect(Gitlab::Popen).to receive(:popen) - .with(%w[make clean-build all], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil }) + .with(%w[make clean all]) .and_return(['output', 0]) subject end - - context 'when Rails.env is test' do - let(:command) { %w[make clean-build all] } - - before do - stub_rails_env('test') - end - - it 'calls make in the gitaly directory with BUNDLE_DEPLOYMENT and GEM_HOME variables' do - expect(Gitlab::Popen).to receive(:popen) - .with(command, nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil, "BUNDLE_DEPLOYMENT" => 'false', "GEM_HOME" => Bundler.bundle_path.to_s }) - .and_return(['/usr/bin/gmake', 0]) - - subject - end - end end end end diff --git a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb index 3ee01977cba..f0fc3c501c5 100644 --- a/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb +++ b/spec/tasks/gitlab/refresh_project_statistics_build_artifacts_size_rake_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task', :silence_stdout do +RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task', :silence_stdout, feature_category: :build_artifacts do let(:rake_task) { 'gitlab:refresh_project_statistics_build_artifacts_size' } describe 'enqueuing build artifacts size statistics refresh for given list of project IDs' do @@ -10,8 +10,6 @@ RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task let_it_be(:project_2) { create(:project) } let_it_be(:project_3) { create(:project) } - let(:string_of_ids) { "#{project_1.id} #{project_2.id} #{project_3.id} 999999" } - let(:csv_url) { 'https://www.example.com/foo.csv' } let(:csv_body) do <<~BODY PROJECT_ID @@ -26,13 +24,12 @@ RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task stub_const("BUILD_ARTIFACTS_SIZE_REFRESH_ENQUEUE_BATCH_SIZE", 2) - stub_request(:get, csv_url).to_return(status: 200, body: csv_body) allow(Kernel).to receive(:sleep).with(1) end - context 'when given a list of space-separated IDs through rake argument' do + shared_examples_for 'recalculates project statistics successfully' do it 'enqueues the projects for refresh' do - expect { run_rake_task(rake_task, csv_url) }.to output(/Done/).to_stdout + expect { run_rake_task(rake_task, csv_path) }.to output(/Done/).to_stdout expect(Projects::BuildArtifactsSizeRefresh.all.map(&:project)).to match_array([project_1, project_2, project_3]) end @@ -42,11 +39,11 @@ RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task expect(Kernel).to receive(:sleep).with(1) expect(Projects::BuildArtifactsSizeRefresh).to receive(:enqueue_refresh).with([project_3]).ordered - run_rake_task(rake_task, csv_url) + run_rake_task(rake_task, csv_path) end end - context 'when CSV has invalid header' do + shared_examples_for 'raises error for invalid header' do let(:csv_body) do <<~BODY projectid @@ -57,8 +54,34 @@ RSpec.describe 'gitlab:refresh_project_statistics_build_artifacts_size rake task end it 'returns an error message' do - expect { run_rake_task(rake_task, csv_url) }.to output(/Project IDs must be listed in the CSV under the header PROJECT_ID/).to_stdout + expect { run_rake_task(rake_task, csv_path) }.to output(/Project IDs must be listed in the CSV under the header PROJECT_ID/).to_stdout end end + + context 'when given a remote CSV file' do + let(:csv_path) { 'https://www.example.com/foo.csv' } + + before do + stub_request(:get, csv_path).to_return(status: 200, body: csv_body) + end + + it_behaves_like 'recalculates project statistics successfully' + it_behaves_like 'raises error for invalid header' + end + + context 'when given a local CSV file' do + before do + File.write(csv_path, csv_body, mode: 'w') + end + + after do + FileUtils.rm_f(csv_path) + end + + let(:csv_path) { 'foo.csv' } + + it_behaves_like 'recalculates project statistics successfully' + it_behaves_like 'raises error for invalid header' + end end end diff --git a/spec/tasks/gitlab/setup_rake_spec.rb b/spec/tasks/gitlab/setup_rake_spec.rb index c31546fc259..80e997fcf88 100644 --- a/spec/tasks/gitlab/setup_rake_spec.rb +++ b/spec/tasks/gitlab/setup_rake_spec.rb @@ -7,6 +7,8 @@ RSpec.describe 'gitlab:setup namespace rake tasks', :silence_stdout do Rake.application.rake_require 'active_record/railties/databases' Rake.application.rake_require 'tasks/seed_fu' Rake.application.rake_require 'tasks/dev' + Rake.application.rake_require 'tasks/gitlab/db/validate_config' + Rake.application.rake_require 'tasks/gitlab/db/lock_writes' Rake.application.rake_require 'tasks/gitlab/setup' end @@ -115,11 +117,13 @@ RSpec.describe 'gitlab:setup namespace rake tasks', :silence_stdout do def expect_database_to_be_setup expect(Rake::Task['db:reset']).to receive(:invoke) + expect(Rake::Task['gitlab:db:lock_writes']).to receive(:invoke) expect(Rake::Task['db:seed_fu']).to receive(:invoke) end def expect_database_not_to_be_setup expect(Rake::Task['db:reset']).not_to receive(:invoke) + expect(Rake::Task['gitlab:db:lock_writes']).not_to receive(:invoke) expect(Rake::Task['db:seed_fu']).not_to receive(:invoke) end end diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb index a2546b8d033..cd520673143 100644 --- a/spec/tasks/gitlab/storage_rake_spec.rb +++ b/spec/tasks/gitlab/storage_rake_spec.rb @@ -2,7 +2,7 @@ require 'rake_helper' -RSpec.describe 'rake gitlab:storage:*', :silence_stdout, feature_category: :pods do +RSpec.describe 'rake gitlab:storage:*', :silence_stdout, feature_category: :cell do before do Rake.application.rake_require 'tasks/gitlab/storage' diff --git a/spec/tooling/danger/feature_flag_spec.rb b/spec/tooling/danger/feature_flag_spec.rb index 4575d8ca981..f4df2e1226c 100644 --- a/spec/tooling/danger/feature_flag_spec.rb +++ b/spec/tooling/danger/feature_flag_spec.rb @@ -83,6 +83,28 @@ RSpec.describe Tooling::Danger::FeatureFlag do end end + describe '#stage_label' do + before do + allow(fake_helper).to receive(:mr_labels).and_return(labels) + end + + context 'when there is no stage label' do + let(:labels) { [] } + + it 'returns nil' do + expect(feature_flag.stage_label).to be_nil + end + end + + context 'when there is a stage label' do + let(:labels) { ['devops::verify', 'group::pipeline execution'] } + + it 'returns the stage label' do + expect(feature_flag.stage_label).to eq(labels.first) + end + end + end + describe described_class::Found do let(:feature_flag_path) { 'config/feature_flags/development/entry.yml' } let(:group) { 'group::source code' } diff --git a/spec/tooling/danger/multiversion_spec.rb b/spec/tooling/danger/multiversion_spec.rb new file mode 100644 index 00000000000..90edad61d47 --- /dev/null +++ b/spec/tooling/danger/multiversion_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'rspec-parameterized' +require 'gitlab-dangerfiles' +require 'gitlab/dangerfiles/spec_helper' + +require_relative '../../../tooling/danger/multiversion' +require_relative '../../../tooling/danger/project_helper' + +RSpec.describe Tooling::Danger::Multiversion, feature_category: :shared do + include_context "with dangerfile" + + subject(:multiversion) { fake_danger.new(helper: fake_helper, git: fake_git) } + + let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) } + let(:ci_env) { true } + + before do + allow(fake_helper).to receive(:ci?).and_return(ci_env) + allow(fake_git).to receive(:modified_files).and_return(modified_files) + allow(fake_git).to receive(:added_files).and_return(added_files) + end + + describe '#check!' do + using RSpec::Parameterized::TableSyntax + + context 'when not in ci environment' do + let(:ci_env) { false } + + it 'does not add the warning markdown section' do + expect(multiversion).not_to receive(:markdown) + + multiversion.check! + end + end + + context 'when GraphQL API and frontend assets have not been simultaneously updated' do + where(:modified_files, :added_files) do + %w[app/assets/helloworld.vue] | %w[] + %w[app/assets/helloworld.vue] | %w[app/type.rb] + %w[app/assets/helloworld.js] | %w[app/graphql.rb] + %w[app/assets/helloworld.graphql] | %w[app/models/graphql.rb] + %w[] | %w[app/graphql/type.rb] + %w[app/vue.txt] | %w[app/graphql/type.rb] + %w[app/views/foo.haml] | %w[app/graphql/type.rb] + %w[foo] | %w[] + %w[] | %w[] + end + + with_them do + it 'does not add the warning markdown section' do + expect(multiversion).not_to receive(:markdown) + + multiversion.check! + end + end + end + + context 'when GraphQL API and frontend assets have been simultaneously updated' do + where(:modified_files, :added_files) do + %w[app/assets/helloworld.vue] | %w[app/graphql/type.rb] + %w[app/assets/helloworld.vue] | %w[app/graphql/type.rb] + %w[app/assets/helloworld.js] | %w[app/graphql/type.rb] + %w[ee/app/assets/helloworld.js] | %w[app/graphql/type.rb] + %w[app/assets/helloworld.graphql] | %w[ee/app/graphql/type.rb] + %w[ee/app/assets/helloworld.graphql] | %w[ee/app/graphql/type.rb] + %w[ee/app/assets/helloworld.graphql] | %w[jh/app/graphql/type.rb] + end + + with_them do + it 'adds the warning markdown section' do + expect(multiversion).to receive(:markdown) + + multiversion.check! + end + end + end + end +end diff --git a/spec/tooling/danger/specs/feature_category_suggestion_spec.rb b/spec/tooling/danger/specs/feature_category_suggestion_spec.rb new file mode 100644 index 00000000000..3956553f488 --- /dev/null +++ b/spec/tooling/danger/specs/feature_category_suggestion_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'gitlab/dangerfiles/spec_helper' + +require_relative '../../../../tooling/danger/specs' +require_relative '../../../../tooling/danger/project_helper' + +RSpec.describe Tooling::Danger::Specs::FeatureCategorySuggestion, feature_category: :tooling do + include_context "with dangerfile" + + let(:fake_danger) { DangerSpecHelper.fake_danger.include(Tooling::Danger::Specs) } + let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') } + let(:filename) { 'spec/foo_spec.rb' } + + let(:template) do + <<~SUGGESTION_MARKDOWN.chomp + ```suggestion + %s + ``` + + Consider adding `feature_category: ` for this example if it is not set already. + See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#feature-category-metadata). + SUGGESTION_MARKDOWN + end + + let(:file_lines) do + [ + " require 'spec_helper'", + " \n", + " RSpec.describe Projects::SummaryController, feature_category: :planning_analytics do", + " end", + "RSpec.describe Projects::SummaryController do", + " let_it_be(:user) { create(:user) }", + " end", + " describe 'GET \"time_summary\"' do", + " end", + " RSpec.describe Projects::SummaryController do", + " let_it_be(:user) { create(:user) }", + " end", + " describe 'GET \"time_summary\"' do", + " end", + " \n", + "RSpec.describe Projects :aggregate_failures,", + " feature_category: planning_analytics do", + " \n", + "RSpec.describe Epics :aggregate_failures,", + " ee: true do", + "\n", + "RSpec.describe Issues :aggregate_failures,", + " feature_category: :team_planning do", + "\n", + "RSpec.describe MergeRequest :aggregate_failures,", + " :js,", + " feature_category: :team_planning do" + ] + end + + let(:changed_lines) do + [ + "+ RSpec.describe Projects::SummaryController, feature_category: :planning_analytics do", + "+RSpec.describe Projects::SummaryController do", + "+ let_it_be(:user) { create(:user) }", + "- end", + "+ describe 'GET \"time_summary\"' do", + "+ RSpec.describe Projects::SummaryController do", + "+RSpec.describe Projects :aggregate_failures,", + "+ feature_category: planning_analytics do", + "+RSpec.describe Epics :aggregate_failures,", + "+ ee: true do", + "+RSpec.describe Issues :aggregate_failures,", + "+RSpec.describe MergeRequest :aggregate_failures,", + "+ :js,", + "+ feature_category: :team_planning do", + "+RSpec.describe 'line in commit diff but no longer in working copy' do" + ] + end + + subject(:specs) { fake_danger.new(helper: fake_helper) } + + before do + allow(specs).to receive(:project_helper).and_return(fake_project_helper) + allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines) + allow(specs.project_helper).to receive(:file_lines).and_return(file_lines) + end + + it 'adds suggestions at the correct lines', :aggregate_failures do + [ + { suggested_line: "RSpec.describe Projects::SummaryController do", number: 5 }, + { suggested_line: " RSpec.describe Projects::SummaryController do", number: 10 }, + { suggested_line: "RSpec.describe Epics :aggregate_failures,", number: 19 } + + ].each do |test_case| + comment = format(template, suggested_line: test_case[:suggested_line]) + expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) + end + + specs.add_suggestions_for(filename) + end +end diff --git a/spec/tooling/danger/specs/match_with_array_suggestion_spec.rb b/spec/tooling/danger/specs/match_with_array_suggestion_spec.rb new file mode 100644 index 00000000000..b065772a09b --- /dev/null +++ b/spec/tooling/danger/specs/match_with_array_suggestion_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'gitlab/dangerfiles/spec_helper' + +require_relative '../../../../tooling/danger/specs' +require_relative '../../../../tooling/danger/project_helper' + +RSpec.describe Tooling::Danger::Specs::MatchWithArraySuggestion, feature_category: :tooling do + include_context "with dangerfile" + + let(:fake_danger) { DangerSpecHelper.fake_danger.include(Tooling::Danger::Specs) } + let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') } + let(:filename) { 'spec/foo_spec.rb' } + + let(:file_lines) do + [ + " describe 'foo' do", + " expect(foo).to match(['bar', 'baz'])", + " end", + " expect(foo).to match(['bar', 'baz'])", # same line as line 1 above, we expect two different suggestions + " ", + " expect(foo).to match ['bar', 'baz']", + " expect(foo).to eq(['bar', 'baz'])", + " expect(foo).to eq ['bar', 'baz']", + " expect(foo).to(match(['bar', 'baz']))", + " expect(foo).to(eq(['bar', 'baz']))", + " expect(foo).to(eq([bar, baz]))", + " expect(foo).to(eq(['bar']))", + " foo.eq(['bar'])" + ] + end + + let(:matching_lines) do + [ + "+ expect(foo).to match(['should not error'])", + "+ expect(foo).to match(['bar', 'baz'])", + "+ expect(foo).to match(['bar', 'baz'])", + "+ expect(foo).to match ['bar', 'baz']", + "+ expect(foo).to eq(['bar', 'baz'])", + "+ expect(foo).to eq ['bar', 'baz']", + "+ expect(foo).to(match(['bar', 'baz']))", + "+ expect(foo).to(eq(['bar', 'baz']))", + "+ expect(foo).to(eq([bar, baz]))" + ] + end + + let(:changed_lines) do + [ + " expect(foo).to match(['bar', 'baz'])", + " expect(foo).to match(['bar', 'baz'])", + " expect(foo).to match ['bar', 'baz']", + " expect(foo).to eq(['bar', 'baz'])", + " expect(foo).to eq ['bar', 'baz']", + "- expect(foo).to match(['bar', 'baz'])", + "- expect(foo).to match(['bar', 'baz'])", + "- expect(foo).to match ['bar', 'baz']", + "- expect(foo).to eq(['bar', 'baz'])", + "- expect(foo).to eq ['bar', 'baz']", + "- expect(foo).to eq [bar, foo]", + "+ expect(foo).to eq([])" + ] + matching_lines + end + + let(:template) do + <<~MARKDOWN.chomp + ```suggestion + %s + ``` + + If order of the result is not important, please consider using `match_array` to avoid flakiness. + MARKDOWN + end + + subject(:specs) { fake_danger.new(helper: fake_helper) } + + before do + allow(specs).to receive(:project_helper).and_return(fake_project_helper) + allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines) + allow(specs.project_helper).to receive(:file_lines).and_return(file_lines) + end + + it 'adds suggestions at the correct lines' do + [ + { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 2 }, + { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 4 }, + { suggested_line: " expect(foo).to match_array ['bar', 'baz']", number: 6 }, + { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 7 }, + { suggested_line: " expect(foo).to match_array ['bar', 'baz']", number: 8 }, + { suggested_line: " expect(foo).to(match_array(['bar', 'baz']))", number: 9 }, + { suggested_line: " expect(foo).to(match_array(['bar', 'baz']))", number: 10 }, + { suggested_line: " expect(foo).to(match_array([bar, baz]))", number: 11 } + ].each do |test_case| + comment = format(template, suggested_line: test_case[:suggested_line]) + expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) + end + + specs.add_suggestions_for(filename) + end +end diff --git a/spec/tooling/danger/specs/project_factory_suggestion_spec.rb b/spec/tooling/danger/specs/project_factory_suggestion_spec.rb new file mode 100644 index 00000000000..95ffcfb1460 --- /dev/null +++ b/spec/tooling/danger/specs/project_factory_suggestion_spec.rb @@ -0,0 +1,104 @@ +# frozen_string_literal: true + +require 'gitlab/dangerfiles/spec_helper' + +require_relative '../../../../tooling/danger/specs' +require_relative '../../../../tooling/danger/project_helper' + +RSpec.describe Tooling::Danger::Specs::ProjectFactorySuggestion, feature_category: :tooling do + include_context "with dangerfile" + + let(:fake_danger) { DangerSpecHelper.fake_danger.include(Tooling::Danger::Specs) } + let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') } + let(:filename) { 'spec/foo_spec.rb' } + + let(:template) do + <<~MARKDOWN.chomp + ```suggestion + %s + ``` + + Project creations are very slow. Use `let_it_be`, `build` or `build_stubbed` if possible. + See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage) + for background information and alternative options. + MARKDOWN + end + + let(:file_lines) do + [ + " let(:project) { create(:project) }", + " let_it_be(:project) { create(:project, :repository)", + " let!(:project) { create(:project) }", + " let(:var) { create(:project) }", + " let(:merge_request) { create(:merge_request, project: project)", + " context 'when merge request exists' do", + " it { is_expected.to be_success }", + " end", + " let!(:var) { create(:project) }", + " let(:project) { create(:thing) }", + " let(:project) { build(:project) }", + " let(:project) do", + " create(:project)", + " end", + " let(:project) { create(:project, :repository) }", + " str = 'let(:project) { create(:project) }'", + " let(:project) { create(:project_empty_repo) }", + " let(:project) { create(:forked_project_with_submodules) }", + " let(:project) { create(:project_with_design) }", + " let(:authorization) { create(:project_authorization) }" + ] + end + + let(:matching_lines) do + [ + "+ let(:should_not_error) { create(:project) }", + "+ let(:project) { create(:project) }", + "+ let!(:project) { create(:project) }", + "+ let(:var) { create(:project) }", + "+ let!(:var) { create(:project) }", + "+ let(:project) { create(:project, :repository) }", + "+ let(:project) { create(:project_empty_repo) }", + "+ let(:project) { create(:forked_project_with_submodules) }", + "+ let(:project) { create(:project_with_design) }" + ] + end + + let(:changed_lines) do + [ + "+ line which doesn't exist in the file and should not cause an error", + "+ let_it_be(:project) { create(:project, :repository)", + "+ let(:project) { create(:thing) }", + "+ let(:project) do", + "+ create(:project)", + "+ end", + "+ str = 'let(:project) { create(:project) }'", + "+ let(:authorization) { create(:project_authorization) }" + ] + matching_lines + end + + subject(:specs) { fake_danger.new(helper: fake_helper) } + + before do + allow(specs).to receive(:project_helper).and_return(fake_project_helper) + allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines) + allow(specs.project_helper).to receive(:file_lines).and_return(file_lines) + end + + it 'adds suggestions at the correct lines', :aggregate_failures do + [ + { suggested_line: " let_it_be(:project) { create(:project) }", number: 1 }, + { suggested_line: " let_it_be(:project) { create(:project) }", number: 3 }, + { suggested_line: " let_it_be(:var) { create(:project) }", number: 4 }, + { suggested_line: " let_it_be(:var) { create(:project) }", number: 9 }, + { suggested_line: " let_it_be(:project) { create(:project, :repository) }", number: 15 }, + { suggested_line: " let_it_be(:project) { create(:project_empty_repo) }", number: 17 }, + { suggested_line: " let_it_be(:project) { create(:forked_project_with_submodules) }", number: 18 }, + { suggested_line: " let_it_be(:project) { create(:project_with_design) }", number: 19 } + ].each do |test_case| + comment = format(template, suggested_line: test_case[:suggested_line]) + expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) + end + + specs.add_suggestions_for(filename) + end +end diff --git a/spec/tooling/danger/specs_spec.rb b/spec/tooling/danger/specs_spec.rb index 09550f037d6..b4953858ef7 100644 --- a/spec/tooling/danger/specs_spec.rb +++ b/spec/tooling/danger/specs_spec.rb @@ -1,80 +1,24 @@ # frozen_string_literal: true -require 'rspec-parameterized' -require 'gitlab-dangerfiles' -require 'danger' -require 'danger/plugins/internal/helper' require 'gitlab/dangerfiles/spec_helper' require_relative '../../../tooling/danger/specs' -require_relative '../../../tooling/danger/project_helper' RSpec.describe Tooling::Danger::Specs, feature_category: :tooling do include_context "with dangerfile" let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) } - let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') } let(:filename) { 'spec/foo_spec.rb' } - let(:file_lines) do - [ - " describe 'foo' do", - " expect(foo).to match(['bar', 'baz'])", - " end", - " expect(foo).to match(['bar', 'baz'])", # same line as line 1 above, we expect two different suggestions - " ", - " expect(foo).to match ['bar', 'baz']", - " expect(foo).to eq(['bar', 'baz'])", - " expect(foo).to eq ['bar', 'baz']", - " expect(foo).to(match(['bar', 'baz']))", - " expect(foo).to(eq(['bar', 'baz']))", - " expect(foo).to(eq([bar, baz]))", - " expect(foo).to(eq(['bar']))", - " foo.eq(['bar'])" - ] - end - - let(:matching_lines) do - [ - "+ expect(foo).to match(['should not error'])", - "+ expect(foo).to match(['bar', 'baz'])", - "+ expect(foo).to match(['bar', 'baz'])", - "+ expect(foo).to match ['bar', 'baz']", - "+ expect(foo).to eq(['bar', 'baz'])", - "+ expect(foo).to eq ['bar', 'baz']", - "+ expect(foo).to(match(['bar', 'baz']))", - "+ expect(foo).to(eq(['bar', 'baz']))", - "+ expect(foo).to(eq([bar, baz]))" - ] - end - - let(:changed_lines) do - [ - " expect(foo).to match(['bar', 'baz'])", - " expect(foo).to match(['bar', 'baz'])", - " expect(foo).to match ['bar', 'baz']", - " expect(foo).to eq(['bar', 'baz'])", - " expect(foo).to eq ['bar', 'baz']", - "- expect(foo).to match(['bar', 'baz'])", - "- expect(foo).to match(['bar', 'baz'])", - "- expect(foo).to match ['bar', 'baz']", - "- expect(foo).to eq(['bar', 'baz'])", - "- expect(foo).to eq ['bar', 'baz']", - "- expect(foo).to eq [bar, foo]", - "+ expect(foo).to eq([])" - ] + matching_lines - end - subject(:specs) { fake_danger.new(helper: fake_helper) } - before do - allow(specs).to receive(:project_helper).and_return(fake_project_helper) - allow(specs.helper).to receive(:changed_lines).with(filename).and_return(matching_lines) - allow(specs.project_helper).to receive(:file_lines).and_return(file_lines) - end - describe '#changed_specs_files' do - let(:base_expected_files) { %w[spec/foo_spec.rb ee/spec/foo_spec.rb spec/bar_spec.rb ee/spec/bar_spec.rb spec/zab_spec.rb ee/spec/zab_spec.rb] } + let(:base_expected_files) do + %w[ + spec/foo_spec.rb ee/spec/foo_spec.rb spec/bar_spec.rb + ee/spec/bar_spec.rb spec/zab_spec.rb ee/spec/zab_spec.rb + ] + end before do all_changed_files = %w[ @@ -98,211 +42,16 @@ RSpec.describe Tooling::Danger::Specs, feature_category: :tooling do context 'with include_ee: :exclude' do it 'returns spec files without EE-specific files' do - expect(specs.changed_specs_files(ee: :exclude)).not_to include(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb]) + expect(specs.changed_specs_files(ee: :exclude)) + .not_to include(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb]) end end context 'with include_ee: :only' do it 'returns EE-specific spec files only' do - expect(specs.changed_specs_files(ee: :only)).to match_array(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb]) - end - end - end - - describe '#add_suggestions_for_match_with_array' do - let(:template) do - <<~MARKDOWN.chomp - ```suggestion - %s - ``` - - If order of the result is not important, please consider using `match_array` to avoid flakiness. - MARKDOWN - end - - it 'adds suggestions at the correct lines' do - [ - { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 2 }, - { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 4 }, - { suggested_line: " expect(foo).to match_array ['bar', 'baz']", number: 6 }, - { suggested_line: " expect(foo).to match_array(['bar', 'baz'])", number: 7 }, - { suggested_line: " expect(foo).to match_array ['bar', 'baz']", number: 8 }, - { suggested_line: " expect(foo).to(match_array(['bar', 'baz']))", number: 9 }, - { suggested_line: " expect(foo).to(match_array(['bar', 'baz']))", number: 10 }, - { suggested_line: " expect(foo).to(match_array([bar, baz]))", number: 11 } - ].each do |test_case| - comment = format(template, suggested_line: test_case[:suggested_line]) - expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) - end - - specs.add_suggestions_for_match_with_array(filename) - end - end - - describe '#add_suggestions_for_project_factory_usage' do - let(:template) do - <<~MARKDOWN.chomp - ```suggestion - %s - ``` - - Project creations are very slow. Use `let_it_be`, `build` or `build_stubbed` if possible. - See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage) - for background information and alternative options. - MARKDOWN - end - - let(:file_lines) do - [ - " let(:project) { create(:project) }", - " let_it_be(:project) { create(:project, :repository)", - " let!(:project) { create(:project) }", - " let(:var) { create(:project) }", - " let(:merge_request) { create(:merge_request, project: project)", - " context 'when merge request exists' do", - " it { is_expected.to be_success }", - " end", - " let!(:var) { create(:project) }", - " let(:project) { create(:thing) }", - " let(:project) { build(:project) }", - " let(:project) do", - " create(:project)", - " end", - " let(:project) { create(:project, :repository) }", - " str = 'let(:project) { create(:project) }'", - " let(:project) { create(:project_empty_repo) }", - " let(:project) { create(:forked_project_with_submodules) }", - " let(:project) { create(:project_with_design) }", - " let(:authorization) { create(:project_authorization) }" - ] - end - - let(:matching_lines) do - [ - "+ let(:should_not_error) { create(:project) }", - "+ let(:project) { create(:project) }", - "+ let!(:project) { create(:project) }", - "+ let(:var) { create(:project) }", - "+ let!(:var) { create(:project) }", - "+ let(:project) { create(:project, :repository) }", - "+ let(:project) { create(:project_empty_repo) }", - "+ let(:project) { create(:forked_project_with_submodules) }", - "+ let(:project) { create(:project_with_design) }" - ] - end - - let(:changed_lines) do - [ - "+ line which doesn't exist in the file and should not cause an error", - "+ let_it_be(:project) { create(:project, :repository)", - "+ let(:project) { create(:thing) }", - "+ let(:project) do", - "+ create(:project)", - "+ end", - "+ str = 'let(:project) { create(:project) }'", - "+ let(:authorization) { create(:project_authorization) }" - ] + matching_lines - end - - it 'adds suggestions at the correct lines', :aggregate_failures do - [ - { suggested_line: " let_it_be(:project) { create(:project) }", number: 1 }, - { suggested_line: " let_it_be(:project) { create(:project) }", number: 3 }, - { suggested_line: " let_it_be(:var) { create(:project) }", number: 4 }, - { suggested_line: " let_it_be(:var) { create(:project) }", number: 9 }, - { suggested_line: " let_it_be(:project) { create(:project, :repository) }", number: 15 }, - { suggested_line: " let_it_be(:project) { create(:project_empty_repo) }", number: 17 }, - { suggested_line: " let_it_be(:project) { create(:forked_project_with_submodules) }", number: 18 }, - { suggested_line: " let_it_be(:project) { create(:project_with_design) }", number: 19 } - ].each do |test_case| - comment = format(template, suggested_line: test_case[:suggested_line]) - expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) - end - - specs.add_suggestions_for_project_factory_usage(filename) - end - end - - describe '#add_suggestions_for_feature_category' do - let(:template) do - <<~SUGGESTION_MARKDOWN.chomp - ```suggestion - %s - ``` - - Consider adding `feature_category: ` for this example if it is not set already. - See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#feature-category-metadata). - SUGGESTION_MARKDOWN - end - - let(:file_lines) do - [ - " require 'spec_helper'", - " \n", - " RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController, feature_category: :planning_analytics do", - " end", - "RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", - " let_it_be(:user) { create(:user) }", - " end", - " describe 'GET \"time_summary\"' do", - " end", - " RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", - " let_it_be(:user) { create(:user) }", - " end", - " describe 'GET \"time_summary\"' do", - " end", - " \n", - "RSpec.describe Projects :aggregate_failures,", - " feature_category: planning_analytics do", - " \n", - "RSpec.describe Epics :aggregate_failures,", - " ee: true do", - "\n", - "RSpec.describe Issues :aggregate_failures,", - " feature_category: :team_planning do", - "\n", - "RSpec.describe MergeRequest :aggregate_failures,", - " :js,", - " feature_category: :team_planning do" - ] - end - - let(:changed_lines) do - [ - "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController, feature_category: :planning_analytics do", - "+RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", - "+ let_it_be(:user) { create(:user) }", - "- end", - "+ describe 'GET \"time_summary\"' do", - "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", - "+RSpec.describe Projects :aggregate_failures,", - "+ feature_category: planning_analytics do", - "+RSpec.describe Epics :aggregate_failures,", - "+ ee: true do", - "+RSpec.describe Issues :aggregate_failures,", - "+RSpec.describe MergeRequest :aggregate_failures,", - "+ :js,", - "+ feature_category: :team_planning do", - "+RSpec.describe 'line in commit diff but no longer in working copy' do" - ] - end - - before do - allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines) - end - - it 'adds suggestions at the correct lines', :aggregate_failures do - [ - { suggested_line: "RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", number: 5 }, - { suggested_line: " RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", number: 10 }, - { suggested_line: "RSpec.describe Epics :aggregate_failures,", number: 19 } - - ].each do |test_case| - comment = format(template, suggested_line: test_case[:suggested_line]) - expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number]) + expect(specs.changed_specs_files(ee: :only)) + .to match_array(%w[ee/spec/foo_spec.rb ee/spec/bar_spec.rb ee/spec/zab_spec.rb]) end - - specs.add_suggestions_for_feature_category(filename) end end end diff --git a/spec/tooling/danger/stable_branch_spec.rb b/spec/tooling/danger/stable_branch_spec.rb index b0a8ab3c132..fc644413a5a 100644 --- a/spec/tooling/danger/stable_branch_spec.rb +++ b/spec/tooling/danger/stable_branch_spec.rb @@ -351,4 +351,26 @@ RSpec.describe Tooling::Danger::StableBranch, feature_category: :delivery do it { is_expected.to eq(result) } end end + + describe '#valid_stable_branch?' do + it "returns false when on the default branch" do + allow(fake_helper).to receive(:mr_target_branch).and_return('main') + + expect(stable_branch.valid_stable_branch?).to be(false) + end + + it "returns true when on a stable branch" do + allow(fake_helper).to receive(:mr_target_branch).and_return('15-1-stable-ee') + allow(fake_helper).to receive(:security_mr?).and_return(false) + + expect(stable_branch.valid_stable_branch?).to be(true) + end + + it "returns false when on a stable branch on a security MR" do + allow(fake_helper).to receive(:mr_target_branch).and_return('15-1-stable-ee') + allow(fake_helper).to receive(:security_mr?).and_return(true) + + expect(stable_branch.valid_stable_branch?).to be(false) + end + end end diff --git a/spec/tooling/docs/deprecation_handling_spec.rb b/spec/tooling/docs/deprecation_handling_spec.rb index 94c93d99b94..78e613c37c7 100644 --- a/spec/tooling/docs/deprecation_handling_spec.rb +++ b/spec/tooling/docs/deprecation_handling_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Docs::DeprecationHandling do allow(YAML).to receive(:load_file) do |file_name| { 'title' => file_name[/[a-z]*\.yml/], - 'announcement_milestone' => file_name[/\d+-\d+/].tr('-', '.') + 'removal_milestone' => file_name[/\d+-\d+/].tr('-', '.') } end end diff --git a/spec/tooling/graphql/docs/renderer_spec.rb b/spec/tooling/graphql/docs/renderer_spec.rb index bf2383507aa..911dab09701 100644 --- a/spec/tooling/graphql/docs/renderer_spec.rb +++ b/spec/tooling/graphql/docs/renderer_spec.rb @@ -377,7 +377,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do | Name | Type | Description | | ---- | ---- | ----------- | - | `fooArg` **{warning-solid}** | [`String`](#string) | **Introduced** in 101.2. This feature is in Alpha. It can be changed or removed at any time. Argument description. | + | `fooArg` **{warning-solid}** | [`String`](#string) | **Introduced** in 101.2. This feature is an Experiment. It can be changed or removed at any time. Argument description. | DOC end @@ -415,7 +415,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do | Name | Type | Description | | ---- | ---- | ----------- | - | `foo` **{warning-solid}** | [`String!`](#string) | **Introduced** in 1.10. This feature is in Alpha. It can be changed or removed at any time. A description. | + | `foo` **{warning-solid}** | [`String!`](#string) | **Introduced** in 1.10. This feature is an Experiment. It can be changed or removed at any time. A description. | #### Fields with arguments @@ -425,7 +425,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do WARNING: **Introduced** in 1.10. - This feature is in Alpha. It can be changed or removed at any time. + This feature is an Experiment. It can be changed or removed at any time. Returns [`String!`](#string). @@ -460,7 +460,7 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do WARNING: **Introduced** in 10.11. - This feature is in Alpha. It can be changed or removed at any time. + This feature is an Experiment. It can be changed or removed at any time. Returns [`Int`](#int). DOC diff --git a/spec/tooling/lib/tooling/find_changes_spec.rb b/spec/tooling/lib/tooling/find_changes_spec.rb new file mode 100644 index 00000000000..37e590858cf --- /dev/null +++ b/spec/tooling/lib/tooling/find_changes_spec.rb @@ -0,0 +1,281 @@ +# frozen_string_literal: true + +require_relative '../../../../tooling/lib/tooling/find_changes' +require_relative '../../../support/helpers/stub_env' +require 'json' +require 'tempfile' + +RSpec.describe Tooling::FindChanges, feature_category: :tooling do + include StubENV + + attr_accessor :changed_files_file, :predictive_tests_file, :frontend_fixtures_mapping_file + + let(:instance) do + described_class.new( + changed_files_pathname: changed_files_pathname, + predictive_tests_pathname: predictive_tests_pathname, + frontend_fixtures_mapping_pathname: frontend_fixtures_mapping_pathname, + from: from) + end + + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:frontend_fixtures_mapping_pathname) { frontend_fixtures_mapping_file.path } + let(:from) { :api } + let(:gitlab_client) { double('GitLab') } # rubocop:disable RSpec/VerifiedDoubles + + around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('predictive_tests_file') + self.frontend_fixtures_mapping_file = Tempfile.new('frontend_fixtures_mapping_file') + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + frontend_fixtures_mapping_file.close + frontend_fixtures_mapping_file.unlink + predictive_tests_file.close + predictive_tests_file.unlink + changed_files_file.close + changed_files_file.unlink + end + end + + before do + stub_env( + 'CI_API_V4_URL' => 'gitlab_api_url', + 'CI_MERGE_REQUEST_IID' => '1234', + 'CI_MERGE_REQUEST_PROJECT_PATH' => 'dummy-project', + 'PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE' => 'dummy-token' + ) + end + + describe '#initialize' do + context 'when fetching changes from unknown' do + let(:from) { :unknown } + + it 'raises an ArgumentError' do + expect { instance }.to raise_error( + ArgumentError, ":from can only be :api or :changed_files" + ) + end + end + end + + describe '#execute' do + subject { instance.execute } + + before do + allow(instance).to receive(:gitlab).and_return(gitlab_client) + end + + context 'when there is no changed files file' do + let(:changed_files_pathname) { nil } + + it 'raises an ArgumentError' do + expect { subject }.to raise_error( + ArgumentError, "A path to the changed files file must be given as :changed_files_pathname" + ) + end + end + + context 'when fetching changes from API' do + let(:from) { :api } + + it 'calls GitLab API to retrieve the MR diff' do + expect(gitlab_client).to receive_message_chain(:merge_request_changes, :changes).and_return([]) + + subject + end + end + + context 'when fetching changes from changed files' do + let(:from) { :changed_files } + + it 'does not call GitLab API to retrieve the MR diff' do + expect(gitlab_client).not_to receive(:merge_request_changes) + + subject + end + + context 'when there are no file changes' do + it 'writes an empty string to changed files file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + context 'when there are file changes' do + before do + File.write(changed_files_pathname, changed_files_file_content) + end + + let(:changed_files_file_content) { 'first_file_changed second_file_changed' } + + # This is because we don't have frontend fixture mappings: we will just write the same data that we read. + it 'does not change the changed files file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + context 'when there is no matched tests file' do + let(:predictive_tests_pathname) { nil } + + it 'does not add frontend fixtures mapping to the changed files file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + context 'when there is no frontend fixture files' do + let(:frontend_fixtures_mapping_pathname) { nil } + + it 'does not add frontend fixtures mapping to the changed files file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + context 'when the matched tests file and frontend fixture files are provided' do + before do + File.write(predictive_tests_pathname, matched_tests) + File.write(frontend_fixtures_mapping_pathname, frontend_fixtures_mapping_json) + File.write(changed_files_pathname, changed_files_file_content) + end + + let(:changed_files_file_content) { '' } + + context 'when there are no mappings for the matched tests' do + let(:matched_tests) { 'match_spec1 match_spec_2' } + let(:frontend_fixtures_mapping_json) do + { other_spec: ['other_mapping'] }.to_json + end + + it 'does not change the changed files file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + context 'when there are available mappings for the matched tests' do + let(:matched_tests) { 'match_spec1 match_spec_2' } + let(:spec_mappings) { %w[spec1_mapping1 spec1_mapping2] } + let(:frontend_fixtures_mapping_json) do + { match_spec1: spec_mappings }.to_json + end + + context 'when the changed files file is initially empty' do + it 'adds the frontend fixtures mappings to the changed files file' do + expect { subject }.to change { File.read(changed_files_pathname) }.from('').to(spec_mappings.join(' ')) + end + end + + context 'when the changed files file is initially not empty' do + let(:changed_files_file_content) { 'initial_content1 initial_content2' } + + it 'adds the frontend fixtures mappings to the changed files file' do + expect { subject }.to change { File.read(changed_files_pathname) } + .from(changed_files_file_content) + .to("#{changed_files_file_content} #{spec_mappings.join(' ')}") + end + end + end + end + end + end + + describe '#only_js_files_changed' do + subject { instance.only_js_files_changed } + + context 'when fetching changes from changed files' do + let(:from) { :changed_files } + + before do + File.write(changed_files_pathname, changed_files_file_content) + end + + context 'when changed files contain only *.js changes' do + let(:changed_files_file_content) { 'a.js b.js' } + + it 'returns true' do + expect(subject).to be true + end + end + + context 'when changed files contain not only *.js changes' do + let(:changed_files_file_content) { 'a.js b.rb' } + + it 'returns false' do + expect(subject).to be false + end + end + end + + context 'when fetching changes from API' do + let(:from) { :api } + + let(:mr_changes_array) { [] } + + before do + allow(instance).to receive(:gitlab).and_return(gitlab_client) + + # The class from the GitLab gem isn't public, so we cannot use verified doubles for it. + # + # rubocop:disable RSpec/VerifiedDoubles + allow(gitlab_client).to receive(:merge_request_changes) + .with('dummy-project', '1234') + .and_return(double(changes: mr_changes_array)) + # rubocop:enable RSpec/VerifiedDoubles + end + + context 'when a file is passed as an argument' do + it 'calls GitLab API' do + expect(gitlab_client).to receive(:merge_request_changes) + .with('dummy-project', '1234') + + subject + end + end + + context 'when there are no file changes' do + let(:mr_changes_array) { [] } + + it 'returns false' do + expect(subject).to be false + end + end + + context 'when there are changes to files other than JS files' do + let(:mr_changes_array) do + [ + { + "new_path" => "scripts/gitlab_component_helpers.sh", + "old_path" => "scripts/gitlab_component_helpers.sh" + }, + { + "new_path" => "scripts/test.js", + "old_path" => "scripts/test.js" + } + ] + end + + it 'returns false' do + expect(subject).to be false + end + end + + context 'when there are changes only to JS files' do + let(:mr_changes_array) do + [ + { + "new_path" => "scripts/test.js", + "old_path" => "scripts/test.js" + } + ] + end + + it 'returns true' do + expect(subject).to be true + end + end + end + end +end diff --git a/spec/tooling/lib/tooling/find_tests_spec.rb b/spec/tooling/lib/tooling/find_tests_spec.rb new file mode 100644 index 00000000000..905f81c4bbd --- /dev/null +++ b/spec/tooling/lib/tooling/find_tests_spec.rb @@ -0,0 +1,159 @@ +# frozen_string_literal: true + +require 'tempfile' +require_relative '../../../../tooling/lib/tooling/find_tests' +require_relative '../../../support/helpers/stub_env' + +RSpec.describe Tooling::FindTests, feature_category: :tooling do + include StubENV + + attr_accessor :changed_files_file, :predictive_tests_file + + let(:instance) { described_class.new(changed_files_pathname, predictive_tests_pathname) } + let(:mock_test_file_finder) { instance_double(TestFileFinder::FileFinder) } + let(:new_matching_tests) { ["new_matching_spec.rb"] } + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:predictive_tests_content) { "previously_matching_spec.rb" } + + around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('predictive_tests_file') + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + changed_files_file.close + predictive_tests_file.close + changed_files_file.unlink + predictive_tests_file.unlink + end + end + + before do + allow(mock_test_file_finder).to receive(:use) + allow(mock_test_file_finder).to receive(:test_files).and_return(new_matching_tests) + allow(TestFileFinder::FileFinder).to receive(:new).and_return(mock_test_file_finder) + + stub_env( + 'RSPEC_TESTS_MAPPING_ENABLED' => nil, + 'RSPEC_TESTS_MAPPING_PATH' => '/tmp/does-not-exist.out' + ) + + # We write into the temp files initially, to later check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(predictive_tests_pathname, predictive_tests_content) + end + + describe '#execute' do + subject { instance.execute } + + context 'when the predictive_tests_pathname file does not exist' do + let(:instance) { described_class.new(non_existing_output_pathname, predictive_tests_pathname) } + let(:non_existing_output_pathname) { 'tmp/another_file.out' } + + around do |example| + example.run + ensure + FileUtils.rm_rf(non_existing_output_pathname) + end + + it 'creates the file' do + expect { subject }.to change { File.exist?(non_existing_output_pathname) }.from(false).to(true) + end + end + + context 'when the predictive_tests_pathname file already exists' do + it 'does not create an empty file' do + expect(File).not_to receive(:write).with(predictive_tests_pathname, '') + + subject + end + end + + it 'does not modify the content of the input file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + + it 'does not overwrite the output file' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_content) + .to("#{predictive_tests_content} #{new_matching_tests.uniq.join(' ')}") + end + + it 'loads the tests.yml file with a pattern matching mapping' do + expect(TestFileFinder::MappingStrategies::PatternMatching).to receive(:load).with('tests.yml') + + subject + end + + context 'when RSPEC_TESTS_MAPPING_ENABLED env variable is set' do + before do + stub_env( + 'RSPEC_TESTS_MAPPING_ENABLED' => 'true', + 'RSPEC_TESTS_MAPPING_PATH' => 'crystalball-test/mapping.json' + ) + end + + it 'loads the direct matching pattern file' do + expect(TestFileFinder::MappingStrategies::DirectMatching) + .to receive(:load_json) + .with('crystalball-test/mapping.json') + + subject + end + end + + context 'when RSPEC_TESTS_MAPPING_ENABLED env variable is not set' do + let(:rspec_tests_mapping_enabled) { '' } + + before do + stub_env( + 'RSPEC_TESTS_MAPPING_ENABLED' => rspec_tests_mapping_enabled, + 'RSPEC_TESTS_MAPPING_PATH' => rspec_tests_mapping_path + ) + end + + context 'when RSPEC_TESTS_MAPPING_PATH is set' do + let(:rspec_tests_mapping_path) { 'crystalball-test/mapping.json' } + + it 'does not load the direct matching pattern file' do + expect(TestFileFinder::MappingStrategies::DirectMatching).not_to receive(:load_json) + + subject + end + end + + context 'when RSPEC_TESTS_MAPPING_PATH is not set' do + let(:rspec_tests_mapping_path) { nil } + + it 'does not load the direct matching pattern file' do + expect(TestFileFinder::MappingStrategies::DirectMatching).not_to receive(:load_json) + + subject + end + end + end + + context 'when the same spec is matching multiple times' do + let(:new_matching_tests) do + [ + "new_matching_spec.rb", + "duplicate_spec.rb", + "duplicate_spec.rb" + ] + end + + it 'writes uniquely matching specs to the output' do + subject + + expect(File.read(predictive_tests_pathname).split(' ')).to match_array( + predictive_tests_content.split(' ') + new_matching_tests.uniq + ) + end + end + end +end diff --git a/spec/tooling/lib/tooling/gettext_extractor_spec.rb b/spec/tooling/lib/tooling/gettext_extractor_spec.rb new file mode 100644 index 00000000000..47a808f12df --- /dev/null +++ b/spec/tooling/lib/tooling/gettext_extractor_spec.rb @@ -0,0 +1,254 @@ +# frozen_string_literal: true + +require 'rspec/parameterized' + +require_relative '../../../../tooling/lib/tooling/gettext_extractor' +require_relative '../../../support/helpers/stub_env' +require_relative '../../../support/tmpdir' + +RSpec.describe Tooling::GettextExtractor, feature_category: :tooling do + include StubENV + include TmpdirHelper + + let(:base_dir) { mktmpdir } + let(:instance) { described_class.new(backend_glob: '*.{rb,haml,erb}', glob_base: base_dir) } + let(:frontend_status) { true } + + let(:files) do + { + rb_file: File.join(base_dir, 'ruby.rb'), + haml_file: File.join(base_dir, 'template.haml'), + erb_file: File.join(base_dir, 'template.erb') + } + end + + before do + # Disable parallelism in specs in order to suppress some confusing stack traces + stub_env( + 'PARALLEL_PROCESSOR_COUNT' => 0 + ) + # Mock Backend files + File.write(files[:rb_file], '[_("RB"), _("All"), n_("Apple", "Apples", size), s_("Context|A"), N_("All2") ]') + File.write( + files[:erb_file], + '

    <%= _("ERB") + _("All") + n_("Pear", "Pears", size) + s_("Context|B") + N_("All2") %>

    ' + ) + File.write( + files[:haml_file], + '%h1= _("HAML") + _("All") + n_("Cabbage", "Cabbages", size) + s_("Context|C") + N_("All2")' + ) + # Stub out Frontend file parsing + status = {} + allow(status).to receive(:success?).and_return(frontend_status) + allow(Open3).to receive(:capture2) + .with("node scripts/frontend/extract_gettext_all.js --all") + .and_return([ + '{"example.js": [ ["JS"], ["All"], ["Mango\u0000Mangoes"], ["Context|D"], ["All2"] ] }', + status + ]) + end + + describe '::HamlParser' do + it 'overwrites libraries in order to prefer hamlit' do + expect(described_class::HamlParser.libraries).to match_array(['hamlit']) + end + end + + describe '#parse' do + it 'collects and merges translatable strings from frontend and backend' do + expect(instance.parse([]).to_h { |entry| [entry.msgid, entry.msgid_plural] }).to eq({ + 'All' => nil, + 'All2' => nil, + 'Context|A' => nil, + 'Context|B' => nil, + 'Context|C' => nil, + 'Context|D' => nil, + 'ERB' => nil, + 'HAML' => nil, + 'JS' => nil, + 'RB' => nil, + 'Apple' => 'Apples', + 'Cabbage' => 'Cabbages', + 'Mango' => 'Mangoes', + 'Pear' => 'Pears' + }) + end + + it 're-raises error from backend extraction' do + allow(instance).to receive(:parse_backend_file).and_raise(StandardError) + + expect { instance.parse([]) }.to raise_error(StandardError) + end + + context 'when frontend extraction raises an error' do + let(:frontend_status) { false } + + it 'is re-raised' do + expect { instance.parse([]) }.to raise_error(StandardError, 'Could not parse frontend files') + end + end + end + + describe '#generate_pot' do + subject { instance.generate_pot } + + it 'produces pot without date headers' do + expect(subject).not_to include('POT-Creation-Date:') + expect(subject).not_to include('PO-Revision-Date:') + end + + it 'produces pot file with all translated strings, sorted by msg id' do + expect(subject).to eql <<~POT_FILE + # SOME DESCRIPTIVE TITLE. + # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER + # This file is distributed under the same license as the gitlab package. + # FIRST AUTHOR , YEAR. + # + #, fuzzy + msgid "" + msgstr "" + "Project-Id-Version: gitlab 1.0.0\\n" + "Report-Msgid-Bugs-To: \\n" + "Last-Translator: FULL NAME \\n" + "Language-Team: LANGUAGE \\n" + "Language: \\n" + "MIME-Version: 1.0\\n" + "Content-Type: text/plain; charset=UTF-8\\n" + "Content-Transfer-Encoding: 8bit\\n" + "Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\\n" + + msgid "All" + msgstr "" + + msgid "All2" + msgstr "" + + msgid "Apple" + msgid_plural "Apples" + msgstr[0] "" + msgstr[1] "" + + msgid "Cabbage" + msgid_plural "Cabbages" + msgstr[0] "" + msgstr[1] "" + + msgid "Context|A" + msgstr "" + + msgid "Context|B" + msgstr "" + + msgid "Context|C" + msgstr "" + + msgid "Context|D" + msgstr "" + + msgid "ERB" + msgstr "" + + msgid "HAML" + msgstr "" + + msgid "JS" + msgstr "" + + msgid "Mango" + msgid_plural "Mangoes" + msgstr[0] "" + msgstr[1] "" + + msgid "Pear" + msgid_plural "Pears" + msgstr[0] "" + msgstr[1] "" + + msgid "RB" + msgstr "" + POT_FILE + end + end + + # This private methods is tested directly, because unfortunately it is called + # with the "Parallel" gem. As the parallel gem executes this function in a different + # thread, our coverage reporting is confused + # + # On the other hand, the tests are also more readable, so maybe a win-win + describe '#parse_backend_file' do + subject { instance.send(:parse_backend_file, curr_file) } + + where do + { + 'with ruby file' => { + invalid_syntax: 'x = {id: _("RB")', + file: :rb_file, + result: { + 'All' => nil, + 'All2' => nil, + 'Context|A' => nil, + 'RB' => nil, 'Apple' => 'Apples' + } + }, + 'with haml file' => { + invalid_syntax: " %a\n- content = _('HAML')", + file: :haml_file, + result: { + 'All' => nil, + 'All2' => nil, + 'Context|C' => nil, + 'HAML' => nil, + 'Cabbage' => 'Cabbages' + } + }, + 'with erb file' => { + invalid_syntax: "<% x = {id: _('ERB') %>", + file: :erb_file, + result: { + 'All' => nil, + 'All2' => nil, + 'Context|B' => nil, + 'ERB' => nil, + 'Pear' => 'Pears' + } + } + } + end + + with_them do + let(:curr_file) { files[file] } + + context 'when file has valid syntax' do + it 'parses file and returns extracted strings as POEntries' do + expect(subject.map(&:class).uniq).to match_array([GetText::POEntry]) + expect(subject.to_h { |entry| [entry.msgid, entry.msgid_plural] }).to eq(result) + end + end + + # We do not worry about syntax errors in these file types, as it is _not_ the job of + # gettext extractor to ensure correctness of the files. These errors should raise + # in other places + context 'when file has invalid syntax' do + before do + File.write(curr_file, invalid_syntax) + end + + it 'does not raise error' do + expect { subject }.not_to raise_error + end + end + end + + context 'with unsupported file' do + let(:curr_file) { File.join(base_dir, 'foo.unsupported') } + + before do + File.write(curr_file, '') + end + + it 'raises error' do + expect { subject }.to raise_error(NotImplementedError) + end + end + end +end diff --git a/spec/tooling/lib/tooling/helpers/file_handler_spec.rb b/spec/tooling/lib/tooling/helpers/file_handler_spec.rb new file mode 100644 index 00000000000..b78f0a3bb6b --- /dev/null +++ b/spec/tooling/lib/tooling/helpers/file_handler_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'tempfile' +require_relative '../../../../../tooling/lib/tooling/helpers/file_handler' + +class MockClass # rubocop:disable Gitlab/NamespacedClass + include Tooling::Helpers::FileHandler +end + +RSpec.describe Tooling::Helpers::FileHandler, feature_category: :tooling do + attr_accessor :input_file_path, :output_file_path + + around do |example| + input_file = Tempfile.new('input') + output_file = Tempfile.new('output') + + self.input_file_path = input_file.path + self.output_file_path = output_file.path + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + output_file.close + input_file.close + output_file.unlink + input_file.unlink + end + end + + let(:instance) { MockClass.new } + let(:initial_content) { 'previous_content1 previous_content2' } + + before do + # We write into the temp files initially, to later check how the code modified those files + File.write(input_file_path, initial_content) + File.write(output_file_path, initial_content) + end + + describe '#read_array_from_file' do + subject { instance.read_array_from_file(input_file_path) } + + context 'when the input file does not exist' do + let(:non_existing_input_pathname) { 'tmp/another_file.out' } + + subject { instance.read_array_from_file(non_existing_input_pathname) } + + around do |example| + example.run + ensure + FileUtils.rm_rf(non_existing_input_pathname) + end + + it 'creates the file' do + expect { subject }.to change { File.exist?(non_existing_input_pathname) }.from(false).to(true) + end + end + + context 'when the input file is not empty' do + let(:initial_content) { 'previous_content1 previous_content2' } + + it 'returns the content of the file in an array' do + expect(subject).to eq(initial_content.split(' ')) + end + end + end + + describe '#write_array_to_file' do + let(:content_array) { %w[new_entry] } + let(:append_flag) { true } + + subject { instance.write_array_to_file(output_file_path, content_array, append: append_flag) } + + context 'when the output file does not exist' do + let(:non_existing_output_file) { 'tmp/another_file.out' } + + subject { instance.write_array_to_file(non_existing_output_file, content_array) } + + around do |example| + example.run + ensure + FileUtils.rm_rf(non_existing_output_file) + end + + it 'creates the file' do + expect { subject }.to change { File.exist?(non_existing_output_file) }.from(false).to(true) + end + end + + context 'when the output file is empty' do + let(:initial_content) { '' } + + it 'writes the correct content to the file' do + expect { subject }.to change { File.read(output_file_path) }.from('').to(content_array.join(' ')) + end + + context 'when the content array is not sorted' do + let(:content_array) { %w[new_entry a_new_entry] } + + it 'sorts the array before writing it to file' do + expect { subject }.to change { File.read(output_file_path) }.from('').to(content_array.sort.join(' ')) + end + end + end + + context 'when the output file is not empty' do + let(:initial_content) { 'previous_content1 previous_content2' } + + it 'appends the correct content to the file' do + expect { subject }.to change { File.read(output_file_path) } + .from(initial_content) + .to((initial_content.split(' ') + content_array).join(' ')) + end + + context 'when the append flag is set to false' do + let(:append_flag) { false } + + it 'overwrites the previous content' do + expect { subject }.to change { File.read(output_file_path) } + .from(initial_content) + .to(content_array.join(' ')) + end + end + end + end +end diff --git a/spec/tooling/lib/tooling/kubernetes_client_spec.rb b/spec/tooling/lib/tooling/kubernetes_client_spec.rb index 50d33182a42..8d127f1345b 100644 --- a/spec/tooling/lib/tooling/kubernetes_client_spec.rb +++ b/spec/tooling/lib/tooling/kubernetes_client_spec.rb @@ -1,286 +1,200 @@ # frozen_string_literal: true +require 'time' require_relative '../../../../tooling/lib/tooling/kubernetes_client' RSpec.describe Tooling::KubernetesClient do - let(:namespace) { 'review-apps' } - let(:release_name) { 'my-release' } - let(:pod_for_release) { "pod-my-release-abcd" } - let(:raw_resource_names_str) { "NAME\nfoo\n#{pod_for_release}\nbar" } - let(:raw_resource_names) { raw_resource_names_str.lines.map(&:strip) } - - subject { described_class.new(namespace: namespace) } - - describe 'RESOURCE_LIST' do - it 'returns the correct list of resources separated by commas' do - expect(described_class::RESOURCE_LIST).to eq('ingress,svc,pdb,hpa,deploy,statefulset,job,pod,secret,configmap,pvc,secret,clusterrole,clusterrolebinding,role,rolebinding,sa,crd') - end + let(:instance) { described_class.new } + let(:one_day_ago) { Time.now - 3600 * 24 * 1 } + let(:two_days_ago) { Time.now - 3600 * 24 * 2 } + let(:three_days_ago) { Time.now - 3600 * 24 * 3 } + + before do + # Global mock to ensure that no kubectl commands are run by accident in a test. + allow(instance).to receive(:run_command) end - describe '#cleanup_by_release' do - before do - allow(subject).to receive(:raw_resource_names).and_return(raw_resource_names) - end - - shared_examples 'a kubectl command to delete resources' do - let(:wait) { true } - let(:release_names_in_command) { release_name.respond_to?(:join) ? %(-l 'release in (#{release_name.join(', ')})') : %(-l release="#{release_name}") } - - specify do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete #{described_class::RESOURCE_LIST} " + - %(--namespace "#{namespace}" --now --ignore-not-found --wait=#{wait} #{release_names_in_command})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) - - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with([%(kubectl delete --namespace "#{namespace}" --ignore-not-found #{pod_for_release})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) - - # We're not verifying the output here, just silencing it - expect { subject.cleanup_by_release(release_name: release_name) }.to output.to_stdout - end - end - - it 'raises an error if the Kubernetes command fails' do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete #{described_class::RESOURCE_LIST} " + - %(--namespace "#{namespace}" --now --ignore-not-found --wait=true -l release="#{release_name}")]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) - - expect { subject.cleanup_by_release(release_name: release_name) }.to raise_error(described_class::CommandFailedError) - end - - it_behaves_like 'a kubectl command to delete resources' - - context 'with multiple releases' do - let(:release_name) { %w[my-release my-release-2] } - - it_behaves_like 'a kubectl command to delete resources' - end - - context 'with `wait: false`' do - let(:wait) { false } - - it_behaves_like 'a kubectl command to delete resources' - end - end - - describe '#cleanup_by_created_at' do - let(:two_days_ago) { Time.now - 3600 * 24 * 2 } - let(:resource_type) { 'pvc' } - let(:resource_names) { [pod_for_release] } + describe '#cleanup_namespaces_by_created_at' do + let(:namespace_1_created_at) { three_days_ago } + let(:namespace_2_created_at) { three_days_ago } + let(:namespace_1_name) { 'review-first-review-app' } + let(:namespace_2_name) { 'review-second-review-app' } + let(:kubectl_namespaces_json) do + <<~JSON + { + "apiVersion": "v1", + "items": [ + { + "apiVersion": "v1", + "kind": "namespace", + "metadata": { + "creationTimestamp": "#{namespace_1_created_at.utc.iso8601}", + "name": "#{namespace_1_name}" + } + }, + { + "apiVersion": "v1", + "kind": "namespace", + "metadata": { + "creationTimestamp": "#{namespace_2_created_at.utc.iso8601}", + "name": "#{namespace_2_name}" + } + } + ] + } + JSON + end + + subject { instance.cleanup_namespaces_by_created_at(created_before: two_days_ago) } before do - allow(subject).to receive(:resource_names_created_before).with(resource_type: resource_type, created_before: two_days_ago).and_return(resource_names) + allow(instance).to receive(:run_command).with( + "kubectl get namespace --all-namespaces --sort-by='{.metadata.creationTimestamp}' -o json" + ).and_return(kubectl_namespaces_json) end - shared_examples 'a kubectl command to delete resources by older than given creation time' do - let(:wait) { true } - let(:release_names_in_command) { resource_names.join(' ') } + context 'when no namespaces are stale' do + let(:namespace_1_created_at) { one_day_ago } + let(:namespace_2_created_at) { one_day_ago } - specify do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete #{resource_type} ".squeeze(' ') + - %(--namespace "#{namespace}" --now --ignore-not-found --wait=#{wait} #{release_names_in_command})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) + it 'does not delete any namespace' do + expect(instance).not_to receive(:run_command).with(/kubectl delete namespace/) - # We're not verifying the output here, just silencing it - expect { subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago) }.to output.to_stdout + subject end end - it 'raises an error if the Kubernetes command fails' do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete #{resource_type} " + - %(--namespace "#{namespace}" --now --ignore-not-found --wait=true #{pod_for_release})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) - - expect { subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago) }.to raise_error(described_class::CommandFailedError) - end + context 'when some namespaces are stale' do + let(:namespace_1_created_at) { three_days_ago } + let(:namespace_2_created_at) { three_days_ago } - it_behaves_like 'a kubectl command to delete resources by older than given creation time' + context 'when some namespaces are not review app namespaces' do + let(:namespace_1_name) { 'review-my-review-app' } + let(:namespace_2_name) { 'review-apps' } # This is not a review apps namespace, so we should not try to delete it - context 'with multiple resource names' do - let(:resource_names) { %w[pod-1 pod-2] } + it 'only deletes the review app namespaces' do + expect(instance).to receive(:run_command).with("kubectl delete namespace --now --ignore-not-found #{namespace_1_name}") - it_behaves_like 'a kubectl command to delete resources by older than given creation time' - end - - context 'with `wait: false`' do - let(:wait) { false } - - it_behaves_like 'a kubectl command to delete resources by older than given creation time' - end - - context 'with no resource_type given' do - let(:resource_type) { nil } - - it_behaves_like 'a kubectl command to delete resources by older than given creation time' - end - - context 'with multiple resource_type given' do - let(:resource_type) { 'pvc,service' } - - it_behaves_like 'a kubectl command to delete resources by older than given creation time' - end + subject + end + end - context 'with no resources found' do - let(:resource_names) { [] } + context 'when all namespaces are review app namespaces' do + let(:namespace_1_name) { 'review-my-review-app' } + let(:namespace_2_name) { 'review-another-review-app' } - it 'does not call #delete_by_exact_names' do - expect(subject).not_to receive(:delete_by_exact_names) + it 'deletes all of the stale namespaces' do + expect(instance).to receive(:run_command).with("kubectl delete namespace --now --ignore-not-found #{namespace_1_name} #{namespace_2_name}") - subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago) + subject + end end end end - describe '#cleanup_review_app_namespaces' do - let(:two_days_ago) { Time.now - 3600 * 24 * 2 } - let(:namespaces) { %w[review-abc-123 review-xyz-789] } + describe '#delete_namespaces' do + subject { instance.delete_namespaces(namespaces) } - subject { described_class.new(namespace: nil) } + context 'when at least one namespace is not a review app namespace' do + let(:namespaces) { %w[review-ns-1 default] } - before do - allow(subject).to receive(:review_app_namespaces_created_before).with(created_before: two_days_ago).and_return(namespaces) - end - - shared_examples 'a kubectl command to delete namespaces older than given creation time' do - let(:wait) { true } - - specify do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete namespace " + - %(--now --ignore-not-found --wait=#{wait} #{namespaces.join(' ')})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: true))) + it 'does not delete any namespace' do + expect(instance).not_to receive(:run_command).with(/kubectl delete namespace/) - # We're not verifying the output here, just silencing it - expect { subject.cleanup_review_app_namespaces(created_before: two_days_ago) }.to output.to_stdout + subject end end - it_behaves_like 'a kubectl command to delete namespaces older than given creation time' - - it 'raises an error if the Kubernetes command fails' do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl delete namespace " + - %(--now --ignore-not-found --wait=true #{namespaces.join(' ')})]) - .and_return(Gitlab::Popen::Result.new([], '', '', double(success?: false))) - - expect { subject.cleanup_review_app_namespaces(created_before: two_days_ago) }.to raise_error(described_class::CommandFailedError) - end - - context 'with no namespaces found' do - let(:namespaces) { [] } + context 'when all namespaces are review app namespaces' do + let(:namespaces) { %w[review-ns-1 review-ns-2] } - it 'does not call #delete_namespaces_by_exact_names' do - expect(subject).not_to receive(:delete_namespaces_by_exact_names) + it 'deletes the namespaces' do + expect(instance).to receive(:run_command).with("kubectl delete namespace --now --ignore-not-found #{namespaces.join(' ')}") - subject.cleanup_review_app_namespaces(created_before: two_days_ago) + subject end end end - describe '#raw_resource_names' do - it 'calls kubectl to retrieve the resource names' do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl get #{described_class::RESOURCE_LIST} " + - %(--namespace "#{namespace}" -o name)]) - .and_return(Gitlab::Popen::Result.new([], raw_resource_names_str, '', double(success?: true))) - - expect(subject.__send__(:raw_resource_names)).to eq(raw_resource_names) + describe '#namespaces_created_before' do + subject { instance.namespaces_created_before(created_before: two_days_ago) } + + let(:namespace_1_created_at) { three_days_ago } + let(:namespace_2_created_at) { one_day_ago } + let(:namespace_1_name) { 'review-first-review-app' } + let(:namespace_2_name) { 'review-second-review-app' } + let(:kubectl_namespaces_json) do + <<~JSON + { + "apiVersion": "v1", + "items": [ + { + "apiVersion": "v1", + "kind": "namespace", + "metadata": { + "creationTimestamp": "#{namespace_1_created_at.utc.iso8601}", + "name": "#{namespace_1_name}" + } + }, + { + "apiVersion": "v1", + "kind": "namespace", + "metadata": { + "creationTimestamp": "#{namespace_2_created_at.utc.iso8601}", + "name": "#{namespace_2_name}" + } + } + ] + } + JSON + end + + it 'returns an array of namespaces' do + allow(instance).to receive(:run_command).with( + "kubectl get namespace --all-namespaces --sort-by='{.metadata.creationTimestamp}' -o json" + ).and_return(kubectl_namespaces_json) + + expect(subject).to match_array(%w[review-first-review-app]) end end - describe '#resource_names_created_before' do - let(:three_days_ago) { Time.now - 3600 * 24 * 3 } - let(:two_days_ago) { Time.now - 3600 * 24 * 2 } - let(:pvc_created_three_days_ago) { 'pvc-created-three-days-ago' } - let(:resource_type) { 'pvc' } - let(:raw_resources) do - { - items: [ - { - apiVersion: "v1", - kind: "PersistentVolumeClaim", - metadata: { - creationTimestamp: three_days_ago, - name: pvc_created_three_days_ago - } - }, - { - apiVersion: "v1", - kind: "PersistentVolumeClaim", - metadata: { - creationTimestamp: Time.now, - name: 'another-pvc' - } - } - ] - }.to_json - end + describe '#run_command' do + subject { instance.run_command(command) } - shared_examples 'a kubectl command to retrieve resource names sorted by creationTimestamp' do - specify do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl get #{resource_type} ".squeeze(' ') + - %(--namespace "#{namespace}" ) + - "--sort-by='{.metadata.creationTimestamp}' -o json"]) - .and_return(Gitlab::Popen::Result.new([], raw_resources, '', double(success?: true))) + before do + # We undo the global mock just for this method + allow(instance).to receive(:run_command).and_call_original - expect(subject.__send__(:resource_names_created_before, resource_type: resource_type, created_before: two_days_ago)).to contain_exactly(pvc_created_three_days_ago) - end + # Mock stdout + allow(instance).to receive(:puts) end - it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp' - - context 'with no resource_type given' do - let(:resource_type) { nil } + context 'when executing a successful command' do + let(:command) { 'true' } # https://linux.die.net/man/1/true - it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp' - end + it 'displays the name of the command to stdout' do + expect(instance).to receive(:puts).with("Running command: `#{command}`") - context 'with multiple resource_type given' do - let(:resource_type) { 'pvc,service' } + subject + end - it_behaves_like 'a kubectl command to retrieve resource names sorted by creationTimestamp' + it 'does not raise an error' do + expect { subject }.not_to raise_error + end end - end - describe '#review_app_namespaces_created_before' do - let(:three_days_ago) { Time.now - 3600 * 24 * 3 } - let(:two_days_ago) { Time.now - 3600 * 24 * 2 } - let(:namespace_created_three_days_ago) { 'review-ns-created-three-days-ago' } - let(:resource_type) { 'namespace' } - let(:raw_resources) do - { - items: [ - { - apiVersion: "v1", - kind: "Namespace", - metadata: { - creationTimestamp: three_days_ago, - name: namespace_created_three_days_ago - } - }, - { - apiVersion: "v1", - kind: "Namespace", - metadata: { - creationTimestamp: Time.now, - name: 'another-namespace' - } - } - ] - }.to_json - end + context 'when executing an unsuccessful command' do + let(:command) { 'false' } # https://linux.die.net/man/1/false - specify do - expect(Gitlab::Popen).to receive(:popen_with_detail) - .with(["kubectl get namespace --sort-by='{.metadata.creationTimestamp}' -o json"]) - .and_return(Gitlab::Popen::Result.new([], raw_resources, '', double(success?: true))) + it 'displays the name of the command to stdout' do + expect(instance).to receive(:puts).with("Running command: `#{command}`") - expect(subject.__send__(:review_app_namespaces_created_before, created_before: two_days_ago)).to eq([namespace_created_three_days_ago]) + expect { subject }.to raise_error(described_class::CommandFailedError) + end + + it 'raises an error' do + expect { subject }.to raise_error(described_class::CommandFailedError) + end end end end diff --git a/spec/tooling/lib/tooling/mappings/graphql_base_type_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/graphql_base_type_mappings_spec.rb new file mode 100644 index 00000000000..b6459428214 --- /dev/null +++ b/spec/tooling/lib/tooling/mappings/graphql_base_type_mappings_spec.rb @@ -0,0 +1,251 @@ +# frozen_string_literal: true + +require 'tempfile' +require_relative '../../../../../tooling/lib/tooling/mappings/graphql_base_type_mappings' + +RSpec.describe Tooling::Mappings::GraphqlBaseTypeMappings, feature_category: :tooling do + # We set temporary folders, and those readers give access to those folder paths + attr_accessor :foss_folder, :ee_folder, :jh_folder + attr_accessor :changed_files_file, :predictive_tests_file + + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:instance) { described_class.new(changed_files_pathname, predictive_tests_pathname) } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:predictive_tests_initial_content) { "previously_matching_spec.rb" } + + around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('predictive_tests_file') + + Dir.mktmpdir('FOSS') do |foss_folder| + Dir.mktmpdir('EE') do |ee_folder| + Dir.mktmpdir('JH') do |jh_folder| + self.foss_folder = foss_folder + self.ee_folder = ee_folder + self.jh_folder = jh_folder + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + changed_files_file.close + predictive_tests_file.close + changed_files_file.unlink + predictive_tests_file.unlink + end + end + end + end + end + + before do + stub_const("Tooling::Mappings::GraphqlBaseTypeMappings::GRAPHQL_TYPES_FOLDERS", { + nil => [foss_folder], + 'ee' => [foss_folder, ee_folder], + 'jh' => [foss_folder, ee_folder, jh_folder] + }) + + # We write into the temp files initially, to later check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(predictive_tests_pathname, predictive_tests_initial_content) + end + + describe '#execute' do + subject { instance.execute } + + context 'when no GraphQL files were changed' do + let(:changed_files_content) { '' } + + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } + end + end + + context 'when some GraphQL files were changed' do + let(:changed_files_content) do + [ + "#{foss_folder}/my_graphql_file.rb", + "#{foss_folder}/my_other_graphql_file.rb" + ].join(' ') + end + + context 'when none of those GraphQL types are included in other GraphQL types' do + before do + File.write("#{foss_folder}/my_graphql_file.rb", "some graphQL code; implements-test MyOtherGraphqlFile") + File.write("#{foss_folder}/my_other_graphql_file.rb", "some graphQL code") + end + + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } + end + end + + context 'when the GraphQL types are included in other GraphQL types' do + before do + File.write("#{foss_folder}/my_graphql_file.rb", "some graphQL code; implements MyOtherGraphqlFile") + File.write("#{foss_folder}/my_other_graphql_file.rb", "some graphQL code") + + # We mock this because we are using temp directories, so we cannot rely on just replacing `app`` with `spec` + allow(instance).to receive(:filename_to_spec_filename) + .with("#{foss_folder}/my_graphql_file.rb") + .and_return('spec/my_graphql_file_spec.rb') + end + + it 'writes the correct specs in the output' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_initial_content) + .to("#{predictive_tests_initial_content} spec/my_graphql_file_spec.rb") + end + end + end + end + + describe '#filter_files' do + subject { instance.filter_files } + + before do + File.write("#{foss_folder}/my_graphql_file.rb", "my_graphql_file.rb") + File.write("#{foss_folder}/my_other_graphql_file.rb", "my_other_graphql_file.rb") + File.write("#{foss_folder}/another_file.erb", "another_file.erb") + end + + context 'when no files were changed' do + let(:changed_files_content) { '' } + + it 'returns an empty array' do + expect(subject).to match_array([]) + end + end + + context 'when GraphQL files were changed' do + let(:changed_files_content) do + [ + "#{foss_folder}/my_graphql_file.rb", + "#{foss_folder}/my_other_graphql_file.rb", + "#{foss_folder}/another_file.erb" + ].join(' ') + end + + it 'returns the path to the GraphQL files' do + expect(subject).to match_array([ + "#{foss_folder}/my_graphql_file.rb", + "#{foss_folder}/my_other_graphql_file.rb" + ]) + end + end + + context 'when files are deleted' do + let(:changed_files_content) { "#{foss_folder}/deleted.rb" } + + it 'returns an empty array' do + expect(subject).to match_array([]) + end + end + end + + describe '#types_hierarchies' do + subject { instance.types_hierarchies } + + context 'when no types are implementing other types' do + before do + File.write("#{foss_folder}/foss_file.rb", "some graphQL code") + File.write("#{ee_folder}/ee_file.rb", "some graphQL code") + File.write("#{jh_folder}/jh_file.rb", "some graphQL code") + end + + it 'returns nothing' do + expect(subject).to eq( + nil => {}, + 'ee' => {}, + 'jh' => {} + ) + end + end + + context 'when types are implementing other types' do + before do + File.write("#{foss_folder}/foss_file.rb", "some graphQL code; implements NoteableInterface") + File.write("#{ee_folder}/ee_file.rb", "some graphQL code; implements NoteableInterface") + File.write("#{jh_folder}/jh_file.rb", "some graphQL code; implements NoteableInterface") + end + + context 'when FOSS' do + it 'returns only FOSS types' do + expect(subject).to include( + nil => { + 'NoteableInterface' => [ + "#{foss_folder}/foss_file.rb" + ] + } + ) + end + end + + context 'when EE' do + it 'returns the correct children types' do + expect(subject).to include( + 'ee' => { + 'NoteableInterface' => [ + "#{foss_folder}/foss_file.rb", + "#{ee_folder}/ee_file.rb" + ] + } + ) + end + end + + context 'when JH' do + it 'returns the correct children types' do + expect(subject).to include( + 'jh' => { + 'NoteableInterface' => [ + "#{foss_folder}/foss_file.rb", + "#{ee_folder}/ee_file.rb", + "#{jh_folder}/jh_file.rb" + ] + } + ) + end + end + end + end + + describe '#filename_to_class_name' do + let(:filename) { 'app/graphql/types/user_merge_request_interaction_type.rb' } + + subject { instance.filename_to_class_name(filename) } + + it 'returns the correct class name' do + expect(subject).to eq('UserMergeRequestInteractionType') + end + end + + describe '#filename_to_spec_filename' do + let(:filename) { 'ee/app/graphql/ee/types/application_type.rb' } + let(:expected_spec_filename) { 'ee/spec/graphql/ee/types/application_type_spec.rb' } + + subject { instance.filename_to_spec_filename(filename) } + + context 'when the spec file exists' do + before do + allow(File).to receive(:exist?).with(expected_spec_filename).and_return(true) + end + + it 'returns the correct spec filename' do + expect(subject).to eq(expected_spec_filename) + end + end + + context 'when the spec file does not exist' do + before do + allow(File).to receive(:exist?).with(expected_spec_filename).and_return(false) + end + + it 'returns nil' do + expect(subject).to eq(nil) + end + end + end +end diff --git a/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb index 12a73480440..e1f35bedebb 100644 --- a/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb +++ b/spec/tooling/lib/tooling/mappings/js_to_system_specs_mappings_spec.rb @@ -6,33 +6,63 @@ require_relative '../../../../../tooling/lib/tooling/mappings/js_to_system_specs RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :tooling do # We set temporary folders, and those readers give access to those folder paths attr_accessor :js_base_folder, :system_specs_base_folder + attr_accessor :changed_files_file, :predictive_tests_file + + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:predictive_tests_content) { "previously_matching_spec.rb" } + + let(:instance) do + described_class.new( + changed_files_pathname, + predictive_tests_pathname, + system_specs_base_folder: system_specs_base_folder, + js_base_folder: js_base_folder + ) + end around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('predictive_tests_file') + Dir.mktmpdir do |tmp_js_base_folder| Dir.mktmpdir do |tmp_system_specs_base_folder| self.system_specs_base_folder = tmp_system_specs_base_folder self.js_base_folder = tmp_js_base_folder - example.run + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + changed_files_file.close + predictive_tests_file.close + changed_files_file.unlink + predictive_tests_file.unlink + end end end end + before do + # We write into the temp files initially, to later check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(predictive_tests_pathname, predictive_tests_content) + end + describe '#execute' do - let(:instance) do - described_class.new( - system_specs_base_folder: system_specs_base_folder, - js_base_folder: js_base_folder - ) - end + subject { instance.execute } - subject { instance.execute(changed_files) } + before do + File.write(changed_files_pathname, changed_files.join(' ')) + end context 'when no JS files were changed' do let(:changed_files) { [] } - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -40,8 +70,8 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to let(:changed_files) { ["#{js_base_folder}/issues/secret_values.js"] } context 'when the JS files are not present on disk' do - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -52,8 +82,8 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to end context 'when no system specs match the JS keyword' do - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -63,8 +93,10 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to File.write("#{system_specs_base_folder}/confidential_issues/issues_spec.rb", "a test") end - it 'returns something' do - expect(subject).to match_array(["#{system_specs_base_folder}/confidential_issues/issues_spec.rb"]) + it 'adds the new specs to the output file' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_content) + .to("#{predictive_tests_content} #{system_specs_base_folder}/confidential_issues/issues_spec.rb") end end end @@ -72,12 +104,13 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to end describe '#filter_files' do - subject { described_class.new(js_base_folder: js_base_folder).filter_files(changed_files) } + subject { instance.filter_files } before do File.write("#{js_base_folder}/index.js", "index.js") File.write("#{js_base_folder}/index-with-ee-in-it.js", "index-with-ee-in-it.js") File.write("#{js_base_folder}/index-with-jh-in-it.js", "index-with-jh-in-it.js") + File.write(changed_files_pathname, changed_files.join(' ')) end context 'when no files were changed' do @@ -117,7 +150,7 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to end describe '#construct_js_keywords' do - subject { described_class.new.construct_js_keywords(js_files) } + subject { described_class.new(changed_files_file, predictive_tests_file).construct_js_keywords(js_files) } let(:js_files) do %w[ @@ -147,7 +180,31 @@ RSpec.describe Tooling::Mappings::JsToSystemSpecsMappings, feature_category: :to describe '#system_specs_for_edition' do subject do - described_class.new(system_specs_base_folder: system_specs_base_folder).system_specs_for_edition(edition) + instance.system_specs_for_edition(edition) + end + + let(:edition) { nil } + + context 'when a file is not a ruby spec' do + before do + File.write("#{system_specs_base_folder}/issues_spec.tar.gz", "a test") + end + + it 'does not return that file' do + expect(subject).to be_empty + end + end + + context 'when a file is a ruby spec' do + let(:spec_pathname) { "#{system_specs_base_folder}/issues_spec.rb" } + + before do + File.write(spec_pathname, "a test") + end + + it 'returns that file' do + expect(subject).to match_array(spec_pathname) + end end context 'when FOSS' do diff --git a/spec/tooling/lib/tooling/mappings/partial_to_views_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/partial_to_views_mappings_spec.rb new file mode 100644 index 00000000000..75ddee18985 --- /dev/null +++ b/spec/tooling/lib/tooling/mappings/partial_to_views_mappings_spec.rb @@ -0,0 +1,280 @@ +# frozen_string_literal: true + +require 'tempfile' +require 'fileutils' +require_relative '../../../../../tooling/lib/tooling/mappings/partial_to_views_mappings' + +RSpec.describe Tooling::Mappings::PartialToViewsMappings, feature_category: :tooling do + attr_accessor :view_base_folder, :changed_files_file, :views_with_partials_file + + let(:instance) do + described_class.new(changed_files_pathname, views_with_partials_pathname, view_base_folder: view_base_folder) + end + + let(:changed_files_pathname) { changed_files_file.path } + let(:views_with_partials_pathname) { views_with_partials_file.path } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:views_with_partials_content) { "previously_added_view.html.haml" } + + around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.views_with_partials_file = Tempfile.new('views_with_partials_file') + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + Dir.mktmpdir do |tmp_views_base_folder| + self.view_base_folder = tmp_views_base_folder + example.run + end + ensure + changed_files_file.close + views_with_partials_file.close + changed_files_file.unlink + views_with_partials_file.unlink + end + end + + before do + # We write into the temp files initially, to check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(views_with_partials_pathname, views_with_partials_content) + end + + describe '#execute' do + subject { instance.execute } + + let(:changed_files) { ["#{view_base_folder}/my_view.html.haml"] } + let(:changed_files_content) { changed_files.join(" ") } + + before do + # We create all of the changed_files, so that they are part of the filtered files + changed_files.each { |changed_file| FileUtils.touch(changed_file) } + end + + it 'does not modify the content of the input file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + + context 'when no partials were modified' do + it 'does not change the output file' do + expect { subject }.not_to change { File.read(views_with_partials_pathname) } + end + end + + context 'when some partials were modified' do + let(:changed_files) do + [ + "#{view_base_folder}/my_view.html.haml", + "#{view_base_folder}/_my_partial.html.haml", + "#{view_base_folder}/_my_other_partial.html.haml" + ] + end + + before do + # We create a red-herring partial to have a more convincing test suite + FileUtils.touch("#{view_base_folder}/_another_partial.html.haml") + end + + context 'when the partials are not included in any views' do + before do + File.write("#{view_base_folder}/my_view.html.haml", "render 'another_partial'") + end + + it 'does not change the output file' do + expect { subject }.not_to change { File.read(views_with_partials_pathname) } + end + end + + context 'when the partials are included in views' do + before do + File.write("#{view_base_folder}/my_view.html.haml", "render 'my_partial'") + end + + it 'writes the view including the partial to the output' do + expect { subject }.to change { File.read(views_with_partials_pathname) } + .from(views_with_partials_content) + .to(views_with_partials_content + " #{view_base_folder}/my_view.html.haml") + end + end + end + end + + describe '#filter_files' do + subject { instance.filter_files } + + let(:changed_files_content) { file_path } + + context 'when the file does not exist on disk' do + let(:file_path) { "#{view_base_folder}/_index.html.erb" } + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when the file exists on disk' do + before do + File.write(file_path, "I am a partial!") + end + + context 'when the file is not in the view base folders' do + let(:file_path) { "/tmp/_index.html.haml" } + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when the filename does not start with an underscore' do + let(:file_path) { "#{view_base_folder}/index.html.haml" } + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when the filename does not have the correct extension' do + let(:file_path) { "#{view_base_folder}/_index.html.erb" } + + it 'returns an empty array' do + expect(subject).to be_empty + end + end + + context 'when the file is a partial' do + let(:file_path) { "#{view_base_folder}/_index.html.haml" } + + it 'returns the file' do + expect(subject).to match_array(file_path) + end + end + end + end + + describe '#extract_partial_keyword' do + subject { instance.extract_partial_keyword('ee/app/views/shared/_new_project_item_vue_select.html.haml') } + + it 'returns the correct partial keyword' do + expect(subject).to eq('new_project_item_vue_select') + end + end + + describe '#view_includes_modified_partial?' do + subject { instance.view_includes_modified_partial?(view_file, included_partial_name) } + + context 'when the included partial name is relative to the view file' do + let(:view_file) { "#{view_base_folder}/components/my_view.html.haml" } + let(:included_partial_name) { 'subfolder/relative_partial' } + + before do + FileUtils.mkdir_p("#{view_base_folder}/components/subfolder") + File.write(changed_files_content, "I am a partial!") + end + + context 'when the partial is not part of the changed files' do + let(:changed_files_content) { "#{view_base_folder}/components/subfolder/_not_the_partial.html.haml" } + + it 'returns false' do + expect(subject).to be_falsey + end + end + + context 'when the partial is part of the changed files' do + let(:changed_files_content) { "#{view_base_folder}/components/subfolder/_relative_partial.html.haml" } + + it 'returns true' do + expect(subject).to be_truthy + end + end + end + + context 'when the included partial name is relative to the base views folder' do + let(:view_file) { "#{view_base_folder}/components/my_view.html.haml" } + let(:included_partial_name) { 'shared/absolute_partial' } + + before do + FileUtils.mkdir_p("#{view_base_folder}/components") + FileUtils.mkdir_p("#{view_base_folder}/shared") + File.write(changed_files_content, "I am a partial!") + end + + context 'when the partial is not part of the changed files' do + let(:changed_files_content) { "#{view_base_folder}/shared/not_the_partial" } + + it 'returns false' do + expect(subject).to be_falsey + end + end + + context 'when the partial is part of the changed files' do + let(:changed_files_content) { "#{view_base_folder}/shared/_absolute_partial.html.haml" } + + it 'returns true' do + expect(subject).to be_truthy + end + end + end + end + + describe '#reconstruct_partial_filename' do + subject { instance.reconstruct_partial_filename(partial_name) } + + context 'when the partial does not contain a path' do + let(:partial_name) { 'sidebar' } + + it 'returns the correct filename' do + expect(subject).to eq('_sidebar.html.haml') + end + end + + context 'when the partial contains a path' do + let(:partial_name) { 'shared/components/sidebar' } + + it 'returns the correct filename' do + expect(subject).to eq('shared/components/_sidebar.html.haml') + end + end + end + + describe '#find_pattern_in_file' do + let(:subject) { instance.find_pattern_in_file(file.path, /pattern/) } + let(:file) { Tempfile.new('find_pattern_in_file') } + + before do + file.write(file_content) + file.close + end + + context 'when the file contains the pattern' do + let(:file_content) do + <<~FILE + Beginning of file + + pattern + pattern + pattern + + End of file + FILE + end + + it 'returns the pattern once' do + expect(subject).to match_array(%w[pattern]) + end + end + + context 'when the file does not contain the pattern' do + let(:file_content) do + <<~FILE + Beginning of file + End of file + FILE + end + + it 'returns an empty array' do + expect(subject).to match_array([]) + end + end + end +end diff --git a/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb index eaa0124370d..6d007843716 100644 --- a/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb +++ b/spec/tooling/lib/tooling/mappings/view_to_js_mappings_spec.rb @@ -6,37 +6,67 @@ require_relative '../../../../../tooling/lib/tooling/mappings/view_to_js_mapping RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling do # We set temporary folders, and those readers give access to those folder paths attr_accessor :view_base_folder, :js_base_folder + attr_accessor :changed_files_file, :predictive_tests_file + + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:predictive_tests_content) { "previously_matching_spec.rb" } + + let(:instance) do + described_class.new( + changed_files_pathname, + predictive_tests_pathname, + view_base_folder: view_base_folder, + js_base_folder: js_base_folder + ) + end around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('matching_tests') + Dir.mktmpdir do |tmp_js_base_folder| Dir.mktmpdir do |tmp_views_base_folder| self.js_base_folder = tmp_js_base_folder self.view_base_folder = tmp_views_base_folder - example.run + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + example.run + ensure + changed_files_file.close + predictive_tests_file.close + changed_files_file.unlink + predictive_tests_file.unlink + end end end end - describe '#execute' do - let(:instance) do - described_class.new( - view_base_folder: view_base_folder, - js_base_folder: js_base_folder - ) - end + before do + # We write into the temp files initially, to later check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(predictive_tests_pathname, predictive_tests_content) + end + describe '#execute' do let(:changed_files) { %W[#{view_base_folder}/index.html] } - subject { instance.execute(changed_files) } + subject { instance.execute } + + before do + File.write(changed_files_pathname, changed_files.join(' ')) + end context 'when no view files have been changed' do before do allow(instance).to receive(:filter_files).and_return([]) end - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -53,8 +83,8 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d FILE end - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -70,8 +100,8 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d end context 'when no matching JS files are found' do - it 'returns nothing' do - expect(subject).to match_array([]) + it 'does not change the output file' do + expect { subject }.not_to change { File.read(predictive_tests_pathname) } end end @@ -90,8 +120,10 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d File.write("#{js_base_folder}/index.js", index_js_content) end - it 'returns the matching JS files' do - expect(subject).to match_array(["#{js_base_folder}/index.js"]) + it 'adds the matching JS files to the output' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_content) + .to("#{predictive_tests_content} #{js_base_folder}/index.js") end end end @@ -135,17 +167,20 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d end it 'scans those partials for the HTML attribute value' do - expect(subject).to match_array(["#{js_base_folder}/index.js"]) + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_content) + .to("#{predictive_tests_content} #{js_base_folder}/index.js") end end end describe '#filter_files' do - subject { described_class.new(view_base_folder: view_base_folder).filter_files(changed_files) } + subject { instance.filter_files } before do File.write("#{js_base_folder}/index.js", "index.js") File.write("#{view_base_folder}/index.html", "index.html") + File.write(changed_files_pathname, changed_files.join(' ')) end context 'when no files were changed' do @@ -182,7 +217,7 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d end describe '#find_partials' do - subject { described_class.new(view_base_folder: view_base_folder).find_partials(file_path) } + subject { instance.find_partials(file_path) } let(:file_path) { "#{view_base_folder}/my_html_file.html" } @@ -230,12 +265,12 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d = render partial: "subfolder/my-partial4" = render(partial:"subfolder/my-partial5", path: 'else') = render partial:"subfolder/my-partial6" - = render_if_exist("subfolder/my-partial7", path: 'else') - = render_if_exist "subfolder/my-partial8" - = render_if_exist(partial: "subfolder/my-partial9", path: 'else') - = render_if_exist partial: "subfolder/my-partial10" - = render_if_exist(partial:"subfolder/my-partial11", path: 'else') - = render_if_exist partial:"subfolder/my-partial12" + = render_if_exists("subfolder/my-partial7", path: 'else') + = render_if_exists "subfolder/my-partial8" + = render_if_exists(partial: "subfolder/my-partial9", path: 'else') + = render_if_exists partial: "subfolder/my-partial10" + = render_if_exists(partial:"subfolder/my-partial11", path: 'else') + = render_if_exists partial:"subfolder/my-partial12" End of file FILE @@ -275,7 +310,7 @@ RSpec.describe Tooling::Mappings::ViewToJsMappings, feature_category: :tooling d end describe '#find_pattern_in_file' do - let(:subject) { described_class.new.find_pattern_in_file(file.path, /pattern/) } + let(:subject) { instance.find_pattern_in_file(file.path, /pattern/) } let(:file) { Tempfile.new('find_pattern_in_file') } before do diff --git a/spec/tooling/lib/tooling/mappings/view_to_system_specs_mappings_spec.rb b/spec/tooling/lib/tooling/mappings/view_to_system_specs_mappings_spec.rb new file mode 100644 index 00000000000..b8a13c50c9b --- /dev/null +++ b/spec/tooling/lib/tooling/mappings/view_to_system_specs_mappings_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'tempfile' +require 'fileutils' +require_relative '../../../../../tooling/lib/tooling/mappings/view_to_system_specs_mappings' + +RSpec.describe Tooling::Mappings::ViewToSystemSpecsMappings, feature_category: :tooling do + attr_accessor :view_base_folder, :changed_files_file, :predictive_tests_file + + let(:instance) do + described_class.new(changed_files_pathname, predictive_tests_pathname, view_base_folder: view_base_folder) + end + + let(:changed_files_pathname) { changed_files_file.path } + let(:predictive_tests_pathname) { predictive_tests_file.path } + let(:changed_files_content) { "changed_file1 changed_file2" } + let(:predictive_tests_initial_content) { "previously_added_spec.rb" } + + around do |example| + self.changed_files_file = Tempfile.new('changed_files_file') + self.predictive_tests_file = Tempfile.new('predictive_tests_file') + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + Dir.mktmpdir do |tmp_views_base_folder| + self.view_base_folder = tmp_views_base_folder + example.run + end + ensure + changed_files_file.close + predictive_tests_file.close + changed_files_file.unlink + predictive_tests_file.unlink + end + end + + before do + FileUtils.mkdir_p("#{view_base_folder}/app/views/dashboard") + + # We write into the temp files initially, to check how the code modified those files + File.write(changed_files_pathname, changed_files_content) + File.write(predictive_tests_pathname, predictive_tests_initial_content) + end + + shared_examples 'writes nothing to the output file' do + it 'writes nothing to the output file' do + expect { subject }.not_to change { File.read(changed_files_pathname) } + end + end + + describe '#execute' do + subject { instance.execute } + + let(:changed_files) { ["#{view_base_folder}/app/views/dashboard/my_view.html.haml"] } + let(:changed_files_content) { changed_files.join(" ") } + + before do + # We create all of the changed_files, so that they are part of the filtered files + changed_files.each { |changed_file| FileUtils.touch(changed_file) } + end + + context 'when the changed files are not view files' do + let(:changed_files) { ["#{view_base_folder}/app/views/dashboard/my_helper.rb"] } + + it_behaves_like 'writes nothing to the output file' + end + + context 'when the changed files are view files' do + let(:changed_files) { ["#{view_base_folder}/app/views/dashboard/my_view.html.haml"] } + + context 'when the view files do not exist on disk' do + before do + allow(File).to receive(:exist?).with(changed_files.first).and_return(false) + end + + it_behaves_like 'writes nothing to the output file' + end + + context 'when the view files exist on disk' do + context 'when no feature match the view' do + # Nothing in this context, because the spec corresponding to `changed_files` doesn't exist + + it_behaves_like 'writes nothing to the output file' + end + + context 'when there is a feature spec that exactly matches the view' do + let(:expected_feature_spec) { "#{view_base_folder}/spec/features/dashboard/my_view_spec.rb" } + + before do + allow(File).to receive(:exist?).and_call_original + allow(File).to receive(:exist?).with(expected_feature_spec).and_return(true) + end + + it 'writes that feature spec to the output file' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_initial_content) + .to("#{predictive_tests_initial_content} #{expected_feature_spec}") + end + end + + context 'when there is a feature spec that matches the parent folder of the view' do + let(:expected_feature_specs) do + [ + "#{view_base_folder}/spec/features/dashboard/another_feature_spec.rb", + "#{view_base_folder}/spec/features/dashboard/other_feature_spec.rb" + ] + end + + before do + FileUtils.mkdir_p("#{view_base_folder}/spec/features/dashboard") + + expected_feature_specs.each do |expected_feature_spec| + FileUtils.touch(expected_feature_spec) + end + end + + it 'writes all of the feature specs for the parent folder to the output file' do + expect { subject }.to change { File.read(predictive_tests_pathname) } + .from(predictive_tests_initial_content) + .to("#{predictive_tests_initial_content} #{expected_feature_specs.join(' ')}") + end + end + end + end + end +end diff --git a/spec/tooling/lib/tooling/predictive_tests_spec.rb b/spec/tooling/lib/tooling/predictive_tests_spec.rb new file mode 100644 index 00000000000..b82364fe6f6 --- /dev/null +++ b/spec/tooling/lib/tooling/predictive_tests_spec.rb @@ -0,0 +1,134 @@ +# frozen_string_literal: true + +require 'tempfile' +require 'fileutils' +require_relative '../../../../tooling/lib/tooling/predictive_tests' +require_relative '../../../support/helpers/stub_env' + +RSpec.describe Tooling::PredictiveTests, feature_category: :tooling do + include StubENV + + let(:instance) { described_class.new } + let(:matching_tests_initial_content) { 'initial_matching_spec' } + let(:fixtures_mapping_content) { '{}' } + + attr_accessor :changed_files, :changed_files_path, :fixtures_mapping, + :matching_js_files, :matching_tests, :views_with_partials + + around do |example| + self.changed_files = Tempfile.new('test-folder/changed_files.txt') + self.changed_files_path = changed_files.path + self.fixtures_mapping = Tempfile.new('test-folder/fixtures_mapping.txt') + self.matching_js_files = Tempfile.new('test-folder/matching_js_files.txt') + self.matching_tests = Tempfile.new('test-folder/matching_tests.txt') + self.views_with_partials = Tempfile.new('test-folder/views_with_partials.txt') + + # See https://ruby-doc.org/stdlib-1.9.3/libdoc/tempfile/rdoc/ + # Tempfile.html#class-Tempfile-label-Explicit+close + begin + # In practice, we let PredictiveTests create the file, and we just + # use its file name. + changed_files.close + changed_files.unlink + + example.run + ensure + # Since example.run can create the file again, let's remove it again + FileUtils.rm_f(changed_files_path) + fixtures_mapping.close + fixtures_mapping.unlink + matching_js_files.close + matching_js_files.unlink + matching_tests.close + matching_tests.unlink + views_with_partials.close + views_with_partials.unlink + end + end + + before do + stub_env( + 'RSPEC_CHANGED_FILES_PATH' => changed_files_path, + 'RSPEC_MATCHING_TESTS_PATH' => matching_tests.path, + 'RSPEC_VIEWS_INCLUDING_PARTIALS_PATH' => views_with_partials.path, + 'FRONTEND_FIXTURES_MAPPING_PATH' => fixtures_mapping.path, + 'RSPEC_MATCHING_JS_FILES_PATH' => matching_js_files.path, + 'RSPEC_TESTS_MAPPING_ENABLED' => "false", + 'RSPEC_TESTS_MAPPING_PATH' => '/tmp/does-not-exist.out' + ) + + # We write some data to later on verify that we only append to this file. + File.write(matching_tests.path, matching_tests_initial_content) + File.write(fixtures_mapping.path, fixtures_mapping_content) + + allow(Gitlab).to receive(:configure) + end + + describe '#execute' do + subject { instance.execute } + + context 'when ENV variables are missing' do + before do + stub_env( + 'RSPEC_CHANGED_FILES_PATH' => '', + 'FRONTEND_FIXTURES_MAPPING_PATH' => '' + ) + end + + it 'raises an error' do + expect { subject }.to raise_error( + '[predictive tests] Missing ENV variable(s): RSPEC_CHANGED_FILES_PATH,FRONTEND_FIXTURES_MAPPING_PATH.' + ) + end + end + + context 'when all ENV variables are provided' do + before do + change = double('GitLab::Change') # rubocop:disable RSpec/VerifiedDoubles + allow(change).to receive_message_chain(:to_h, :values_at) + .and_return([changed_files_content, changed_files_content]) + + allow(Gitlab).to receive_message_chain(:merge_request_changes, :changes) + .and_return([change]) + end + + context 'when no files were changed' do + let(:changed_files_content) { '' } + + it 'does not change files other than RSPEC_CHANGED_FILES_PATH' do + expect { subject }.not_to change { File.read(matching_tests.path) } + expect { subject }.not_to change { File.read(views_with_partials.path) } + expect { subject }.not_to change { File.read(fixtures_mapping.path) } + expect { subject }.not_to change { File.read(matching_js_files.path) } + end + end + + context 'when some files used for frontend fixtures were changed' do + let(:changed_files_content) { 'app/models/todo.rb' } + let(:changed_files_matching_test) { 'spec/models/todo_spec.rb' } + let(:matching_frontend_fixture) { 'tmp/tests/frontend/fixtures-ee/todos/todos.html' } + let(:fixtures_mapping_content) do + JSON.dump(changed_files_matching_test => [matching_frontend_fixture]) # rubocop:disable Gitlab/Json + end + + it 'writes to RSPEC_CHANGED_FILES_PATH with API contents and appends with matching fixtures' do + subject + + expect(File.read(changed_files_path)).to eq("#{changed_files_content} #{matching_frontend_fixture}") + end + + it 'appends the spec file to RSPEC_MATCHING_TESTS_PATH' do + expect { subject }.to change { File.read(matching_tests.path) } + .from(matching_tests_initial_content) + .to("#{matching_tests_initial_content} #{changed_files_matching_test}") + end + + it 'does not change files other than RSPEC_CHANGED_FILES_PATH nor RSPEC_MATCHING_TESTS_PATH' do + expect { subject }.not_to change { File.read(views_with_partials.path) } + expect { subject }.not_to change { File.read(fixtures_mapping.path) } + expect { subject }.not_to change { File.read(matching_js_files.path) } + end + end + end + end +end diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb index aac7d19c079..a7e4e42206a 100644 --- a/spec/tooling/quality/test_level_spec.rb +++ b/spec/tooling/quality/test_level_spec.rb @@ -46,7 +46,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do context 'when level is unit' do it 'returns a pattern' do expect(subject.pattern(:unit)) - .to eq("spec/{bin,channels,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,components}{,/**/}*_spec.rb") + .to eq("spec/{bin,channels,components,config,contracts,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling}{,/**/}*_spec.rb") end end @@ -121,7 +121,7 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do context 'when level is unit' do it 'returns a regexp' do expect(subject.regexp(:unit)) - .to eq(%r{spec/(bin|channels|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|components)/}) + .to eq(%r{spec/(bin|channels|components|config|contracts|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling)/}) end end @@ -167,6 +167,13 @@ RSpec.describe Quality::TestLevel, feature_category: :tooling do end end + context 'when start_with == true' do + it 'returns a regexp' do + expect(described_class.new(['ee/']).regexp(:system, true)) + .to eq(%r{^(ee/)spec/(features)/}) + end + end + describe 'performance' do it 'memoizes the regexp for a given level' do expect(subject.regexp(:system).object_id).to eq(subject.regexp(:system).object_id) diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb index 05cffff1f1a..a035402e207 100644 --- a/spec/uploaders/attachment_uploader_spec.rb +++ b/spec/uploaders/attachment_uploader_spec.rb @@ -10,9 +10,9 @@ RSpec.describe AttachmentUploader do subject { uploader } it_behaves_like 'builds correct paths', - store_dir: %r[uploads/-/system/note/attachment/], - upload_path: %r[uploads/-/system/note/attachment/], - absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] + store_dir: %r[uploads/-/system/note/attachment/], + upload_path: %r[uploads/-/system/note/attachment/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/] context "object_store is REMOTE" do before do @@ -22,8 +22,8 @@ RSpec.describe AttachmentUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like 'builds correct paths', - store_dir: %r[note/attachment/], - upload_path: %r[note/attachment/] + store_dir: %r[note/attachment/], + upload_path: %r[note/attachment/] end describe "#migrate!" do diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb index a55e5c23fe8..e472ac46e66 100644 --- a/spec/uploaders/avatar_uploader_spec.rb +++ b/spec/uploaders/avatar_uploader_spec.rb @@ -10,9 +10,9 @@ RSpec.describe AvatarUploader do subject { uploader } it_behaves_like 'builds correct paths', - store_dir: %r[uploads/-/system/user/avatar/], - upload_path: %r[uploads/-/system/user/avatar/], - absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] + store_dir: %r[uploads/-/system/user/avatar/], + upload_path: %r[uploads/-/system/user/avatar/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/] context "object_store is REMOTE" do before do @@ -22,8 +22,8 @@ RSpec.describe AvatarUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like 'builds correct paths', - store_dir: %r[user/avatar/], - upload_path: %r[user/avatar/] + store_dir: %r[user/avatar/], + upload_path: %r[user/avatar/] end context "with a file" do diff --git a/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb b/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb index 0630e9f6546..3935f081372 100644 --- a/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb +++ b/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb @@ -9,9 +9,9 @@ RSpec.describe Ci::PipelineArtifactUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}/\h{64}/pipelines/\d+/artifacts/\d+], - cache_dir: %r[artifacts/tmp/cache], - work_dir: %r[artifacts/tmp/work] + store_dir: %r[\h{2}/\h{2}/\h{64}/pipelines/\d+/artifacts/\d+], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] context 'when object store is REMOTE' do before do diff --git a/spec/uploaders/dependency_proxy/file_uploader_spec.rb b/spec/uploaders/dependency_proxy/file_uploader_spec.rb index eb12e7dffa5..3cb2d1ea0f0 100644 --- a/spec/uploaders/dependency_proxy/file_uploader_spec.rb +++ b/spec/uploaders/dependency_proxy/file_uploader_spec.rb @@ -11,9 +11,9 @@ RSpec.describe DependencyProxy::FileUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}], - cache_dir: %r[/dependency_proxy/tmp/cache], - work_dir: %r[/dependency_proxy/tmp/work] + store_dir: %r[\h{2}/\h{2}], + cache_dir: %r[/dependency_proxy/tmp/cache], + work_dir: %r[/dependency_proxy/tmp/work] context 'object store is remote' do before do @@ -22,8 +22,7 @@ RSpec.describe DependencyProxy::FileUploader do include_context 'with storage', described_class::Store::REMOTE - it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}] + it_behaves_like "builds correct paths", store_dir: %r[\h{2}/\h{2}] end end diff --git a/spec/uploaders/design_management/design_v432x230_uploader_spec.rb b/spec/uploaders/design_management/design_v432x230_uploader_spec.rb index a18a37e73da..f3dd77d67a0 100644 --- a/spec/uploaders/design_management/design_v432x230_uploader_spec.rb +++ b/spec/uploaders/design_management/design_v432x230_uploader_spec.rb @@ -11,10 +11,10 @@ RSpec.describe DesignManagement::DesignV432x230Uploader do subject(:uploader) { described_class.new(model, :image_v432x230) } it_behaves_like 'builds correct paths', - store_dir: %r[uploads/-/system/design_management/action/image_v432x230/], - upload_path: %r[uploads/-/system/design_management/action/image_v432x230/], - relative_path: %r[uploads/-/system/design_management/action/image_v432x230/], - absolute_path: %r[#{CarrierWave.root}/uploads/-/system/design_management/action/image_v432x230/] + store_dir: %r[uploads/-/system/design_management/action/image_v432x230/], + upload_path: %r[uploads/-/system/design_management/action/image_v432x230/], + relative_path: %r[uploads/-/system/design_management/action/image_v432x230/], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/design_management/action/image_v432x230/] context 'object_store is REMOTE' do before do @@ -24,9 +24,9 @@ RSpec.describe DesignManagement::DesignV432x230Uploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like 'builds correct paths', - store_dir: %r[design_management/action/image_v432x230/], - upload_path: %r[design_management/action/image_v432x230/], - relative_path: %r[design_management/action/image_v432x230/] + store_dir: %r[design_management/action/image_v432x230/], + upload_path: %r[design_management/action/image_v432x230/], + relative_path: %r[design_management/action/image_v432x230/] end describe "#migrate!" do diff --git a/spec/uploaders/external_diff_uploader_spec.rb b/spec/uploaders/external_diff_uploader_spec.rb index a889181b72c..2121e9cbc29 100644 --- a/spec/uploaders/external_diff_uploader_spec.rb +++ b/spec/uploaders/external_diff_uploader_spec.rb @@ -9,9 +9,9 @@ RSpec.describe ExternalDiffUploader do subject(:uploader) { described_class.new(diff, :external_diff) } it_behaves_like "builds correct paths", - store_dir: %r[merge_request_diffs/mr-\d+], - cache_dir: %r[/external-diffs/tmp/cache], - work_dir: %r[/external-diffs/tmp/work] + store_dir: %r[merge_request_diffs/mr-\d+], + cache_dir: %r[/external-diffs/tmp/cache], + work_dir: %r[/external-diffs/tmp/work] context "object store is REMOTE" do before do @@ -21,7 +21,7 @@ RSpec.describe ExternalDiffUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[merge_request_diffs/mr-\d+] + store_dir: %r[merge_request_diffs/mr-\d+] end describe 'remote file' do diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb index 1287b809223..76519545e24 100644 --- a/spec/uploaders/file_uploader_spec.rb +++ b/spec/uploaders/file_uploader_spec.rb @@ -13,9 +13,9 @@ RSpec.describe FileUploader do shared_examples 'builds correct legacy storage paths' do include_examples 'builds correct paths', - store_dir: %r{awesome/project/\h+}, - upload_path: %r{\h+/}, - absolute_path: %r{#{described_class.root}/awesome/project/55dc16aa0edd05693fd98b5051e83321/foo.jpg} + store_dir: %r{awesome/project/\h+}, + upload_path: %r{\h+/}, + absolute_path: %r{#{described_class.root}/awesome/project/55dc16aa0edd05693fd98b5051e83321/foo.jpg} end context 'legacy storage' do @@ -26,8 +26,8 @@ RSpec.describe FileUploader do let(:project) { build_stubbed(:project, namespace: group, name: 'project') } include_examples 'builds correct paths', - store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, - upload_path: %r{\h+/} + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{\h+/} end context 'when only repositories are rolled out' do @@ -47,8 +47,8 @@ RSpec.describe FileUploader do # always use hashed storage path for remote uploads it_behaves_like 'builds correct paths', - store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, - upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/} + store_dir: %r{@hashed/\h{2}/\h{2}/\h+}, + upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/} end describe 'initialize' do diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb index d7c9ef7e0d5..dac9e97641d 100644 --- a/spec/uploaders/job_artifact_uploader_spec.rb +++ b/spec/uploaders/job_artifact_uploader_spec.rb @@ -10,9 +10,9 @@ RSpec.describe JobArtifactUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z], - cache_dir: %r[artifacts/tmp/cache], - work_dir: %r[artifacts/tmp/work] + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z], + cache_dir: %r[artifacts/tmp/cache], + work_dir: %r[artifacts/tmp/work] context "object store is REMOTE" do before do @@ -22,7 +22,7 @@ RSpec.describe JobArtifactUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z] + store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z] describe '#cdn_enabled_url' do it 'returns URL and false' do diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb index b85892a42b5..9bbfd910ada 100644 --- a/spec/uploaders/lfs_object_uploader_spec.rb +++ b/spec/uploaders/lfs_object_uploader_spec.rb @@ -10,9 +10,9 @@ RSpec.describe LfsObjectUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}], - cache_dir: %r[/lfs-objects/tmp/cache], - work_dir: %r[/lfs-objects/tmp/work] + store_dir: %r[\h{2}/\h{2}], + cache_dir: %r[/lfs-objects/tmp/cache], + work_dir: %r[/lfs-objects/tmp/work] context "object store is REMOTE" do before do @@ -22,7 +22,7 @@ RSpec.describe LfsObjectUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[\h{2}/\h{2}] + store_dir: %r[\h{2}/\h{2}] end describe 'remote file' do diff --git a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb index 184c664f6dc..96413f622e8 100644 --- a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb +++ b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb @@ -99,9 +99,10 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN, let(:path) { '/path/to/file.txt' } let(:expiration) { (Time.current + 10.minutes).utc.to_i } let(:cdn_query_params) { "Expires=#{expiration}&KeyName=#{key_name}" } + let(:encoded_path) { Addressable::URI.encode_component(path, Addressable::URI::CharacterClasses::PATH) } def verify_signature(url, unsigned_url) - expect(url).to start_with("#{options[:url]}#{path}") + expect(url).to start_with("#{options[:url]}#{encoded_path}") uri = Addressable::URI.parse(url) query = uri.query_values @@ -116,6 +117,16 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN, end end + context 'with UTF-8 characters in path' do + let(:path) { "/path/to/©️job🧪" } + let(:url) { subject.signed_url(path) } + let(:unsigned_url) { "#{options[:url]}#{encoded_path}?#{cdn_query_params}" } + + it 'returns a valid signed URL' do + verify_signature(url, unsigned_url) + end + end + context 'with default query parameters' do let(:url) { subject.signed_url(path) } let(:unsigned_url) { "#{options[:url]}#{path}?#{cdn_query_params}" } diff --git a/spec/uploaders/packages/composer/cache_uploader_spec.rb b/spec/uploaders/packages/composer/cache_uploader_spec.rb index 7ceaa24f463..7eea4a839ab 100644 --- a/spec/uploaders/packages/composer/cache_uploader_spec.rb +++ b/spec/uploaders/packages/composer/cache_uploader_spec.rb @@ -9,9 +9,9 @@ RSpec.describe Packages::Composer::CacheUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$], - cache_dir: %r[/packages/tmp/cache], - work_dir: %r[/packages/tmp/work] + store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$], + cache_dir: %r[/packages/tmp/cache], + work_dir: %r[/packages/tmp/work] context 'object store is remote' do before do @@ -21,7 +21,7 @@ RSpec.describe Packages::Composer::CacheUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/composer_cache/\d+$] end describe 'remote file' do diff --git a/spec/uploaders/packages/debian/component_file_uploader_spec.rb b/spec/uploaders/packages/debian/component_file_uploader_spec.rb index bee82fb2715..84ba751c737 100644 --- a/spec/uploaders/packages/debian/component_file_uploader_spec.rb +++ b/spec/uploaders/packages/debian/component_file_uploader_spec.rb @@ -12,9 +12,9 @@ RSpec.describe Packages::Debian::ComponentFileUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_component_file/\d+$], - cache_dir: %r[/packages/tmp/cache$], - work_dir: %r[/packages/tmp/work$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_component_file/\d+$], + cache_dir: %r[/packages/tmp/cache$], + work_dir: %r[/packages/tmp/work$] context 'object store is remote' do before do @@ -24,9 +24,9 @@ RSpec.describe Packages::Debian::ComponentFileUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_component_file/\d+$], - cache_dir: %r[/packages/tmp/cache$], - work_dir: %r[/packages/tmp/work$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_component_file/\d+$], + cache_dir: %r[/packages/tmp/cache$], + work_dir: %r[/packages/tmp/work$] end describe 'remote file' do diff --git a/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb index 96655edb186..df630569856 100644 --- a/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb +++ b/spec/uploaders/packages/debian/distribution_release_file_uploader_spec.rb @@ -12,9 +12,9 @@ RSpec.describe Packages::Debian::DistributionReleaseFileUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$], - cache_dir: %r[/packages/tmp/cache$], - work_dir: %r[/packages/tmp/work$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$], + cache_dir: %r[/packages/tmp/cache$], + work_dir: %r[/packages/tmp/work$] context 'object store is remote' do before do @@ -24,9 +24,9 @@ RSpec.describe Packages::Debian::DistributionReleaseFileUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$], - cache_dir: %r[/packages/tmp/cache$], - work_dir: %r[/packages/tmp/work$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/debian_#{container_type}_distribution/\d+$], + cache_dir: %r[/packages/tmp/cache$], + work_dir: %r[/packages/tmp/work$] end describe 'remote file' do diff --git a/spec/uploaders/packages/package_file_uploader_spec.rb b/spec/uploaders/packages/package_file_uploader_spec.rb index 7d270ad03c9..ddd9823d55c 100644 --- a/spec/uploaders/packages/package_file_uploader_spec.rb +++ b/spec/uploaders/packages/package_file_uploader_spec.rb @@ -9,9 +9,9 @@ RSpec.describe Packages::PackageFileUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/\d+/files/\d+$], - cache_dir: %r[/packages/tmp/cache], - work_dir: %r[/packages/tmp/work] + store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/\d+/files/\d+$], + cache_dir: %r[/packages/tmp/cache], + work_dir: %r[/packages/tmp/work] context 'object store is remote' do before do @@ -21,7 +21,7 @@ RSpec.describe Packages::PackageFileUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like "builds correct paths", - store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/\d+/files/\d+$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/packages/\d+/files/\d+$] end describe 'remote file' do diff --git a/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb b/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb index b3767ae179a..a36a035fde3 100644 --- a/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb +++ b/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb @@ -9,9 +9,9 @@ RSpec.describe Packages::Rpm::RepositoryFileUploader do subject { uploader } it_behaves_like 'builds correct paths', - store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$], - cache_dir: %r{/packages/tmp/cache}, - work_dir: %r{/packages/tmp/work} + store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$], + cache_dir: %r{/packages/tmp/cache}, + work_dir: %r{/packages/tmp/work} context 'when object store is remote' do before do @@ -21,7 +21,7 @@ RSpec.describe Packages::Rpm::RepositoryFileUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like 'builds correct paths', - store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$] + store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$] end describe 'remote file' do diff --git a/spec/uploaders/pages/deployment_uploader_spec.rb b/spec/uploaders/pages/deployment_uploader_spec.rb index 1832f73bd67..7686efd4fe4 100644 --- a/spec/uploaders/pages/deployment_uploader_spec.rb +++ b/spec/uploaders/pages/deployment_uploader_spec.rb @@ -13,9 +13,9 @@ RSpec.describe Pages::DeploymentUploader do subject { uploader } it_behaves_like "builds correct paths", - store_dir: %r[/\h{2}/\h{2}/\h{64}/pages_deployments/\d+], - cache_dir: %r[pages/@hashed/tmp/cache], - work_dir: %r[pages/@hashed/tmp/work] + store_dir: %r[/\h{2}/\h{2}/\h{64}/pages_deployments/\d+], + cache_dir: %r[pages/@hashed/tmp/cache], + work_dir: %r[pages/@hashed/tmp/work] context 'when object store is REMOTE' do before do diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb index 1373ccac23d..58edf3f093d 100644 --- a/spec/uploaders/personal_file_uploader_spec.rb +++ b/spec/uploaders/personal_file_uploader_spec.rb @@ -50,9 +50,9 @@ RSpec.describe PersonalFileUploader do context 'object_store is LOCAL' do it_behaves_like 'builds correct paths', - store_dir: %r[uploads/-/system/personal_snippet/\d+/\h+], - upload_path: %r[\h+/\S+], - absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/\h+/\S+$] + store_dir: %r[uploads/-/system/personal_snippet/\d+/\h+], + upload_path: %r[\h+/\S+], + absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/\h+/\S+$] it_behaves_like '#base_dir' it_behaves_like '#to_h' @@ -66,8 +66,8 @@ RSpec.describe PersonalFileUploader do include_context 'with storage', described_class::Store::REMOTE it_behaves_like 'builds correct paths', - store_dir: %r[\d+/\h+], - upload_path: %r[^personal_snippet/\d+/\h+/] + store_dir: %r[\d+/\h+], + upload_path: %r[^personal_snippet/\d+/\h+/] it_behaves_like '#base_dir' it_behaves_like '#to_h' diff --git a/spec/views/admin/application_settings/_ci_cd.html.haml_spec.rb b/spec/views/admin/application_settings/_ci_cd.html.haml_spec.rb index d5aa7139e2b..dc65063c97b 100644 --- a/spec/views/admin/application_settings/_ci_cd.html.haml_spec.rb +++ b/spec/views/admin/application_settings/_ci_cd.html.haml_spec.rb @@ -60,8 +60,9 @@ RSpec.describe 'admin/application_settings/_ci_cd' do expect(rendered).to have_field('Maximum number of runners registered per project', type: 'number') expect(page.find_field('Maximum number of runners registered per project').value).to eq('70') - expect(rendered).to have_field("Maximum number of downstream pipelines in a pipeline's hierarchy tree", -type: 'number') + expect(rendered).to have_field( + "Maximum number of downstream pipelines in a pipeline's hierarchy tree", type: 'number' + ) expect(page.find_field("Maximum number of downstream pipelines in a pipeline's hierarchy tree").value) .to eq('300') end diff --git a/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb index 011f05eac21..f10ee35060b 100644 --- a/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb +++ b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb @@ -46,12 +46,13 @@ RSpec.describe 'admin/application_settings/_repository_check.html.haml', feature describe 'inactive project deletion' do let_it_be(:application_setting) do - build(:application_setting, - delete_inactive_projects: true, - inactive_projects_delete_after_months: 2, - inactive_projects_min_size_mb: 250, - inactive_projects_send_warning_email_after_months: 1 - ) + build( + :application_setting, + delete_inactive_projects: true, + inactive_projects_delete_after_months: 2, + inactive_projects_min_size_mb: 250, + inactive_projects_send_warning_email_after_months: 1 + ) end it 'has the setting subsection' do diff --git a/spec/views/admin/projects/_form.html.haml_spec.rb b/spec/views/admin/projects/_form.html.haml_spec.rb new file mode 100644 index 00000000000..d36b32a1cbc --- /dev/null +++ b/spec/views/admin/projects/_form.html.haml_spec.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'admin/projects/_form', feature_category: :projects do + using RSpec::Parameterized::TableSyntax + + let_it_be(:admin) { build_stubbed(:admin) } + let_it_be(:project) { build_stubbed(:project) } + + before do + allow(view).to receive(:current_user).and_return(:admin) + assign(:project, project) + end + + describe 'project runner registration setting' do + where(:runner_registration_enabled, :valid_runner_registrars, :checked, :disabled) do + true | ['project'] | true | false + false | ['project'] | false | false + false | ['group'] | false | true + end + + with_them do + before do + allow(project).to receive(:runner_registration_enabled).and_return(runner_registration_enabled) + stub_application_setting(valid_runner_registrars: valid_runner_registrars) + end + + it 'renders the checkbox correctly' do + render + + expect(rendered).to have_field( + 'New project runners can be registered', + type: 'checkbox', + checked: checked, + disabled: disabled + ) + end + end + end +end diff --git a/spec/views/ci/status/_badge.html.haml_spec.rb b/spec/views/ci/status/_badge.html.haml_spec.rb index 6cbd9a61e98..ff8cfe2cca0 100644 --- a/spec/views/ci/status/_badge.html.haml_spec.rb +++ b/spec/views/ci/status/_badge.html.haml_spec.rb @@ -49,10 +49,12 @@ RSpec.describe 'ci/status/_badge' do context 'status has external target url' do before do - external_job = create(:generic_commit_status, - status: :running, - pipeline: pipeline, - target_url: 'http://gitlab.com') + external_job = create( + :generic_commit_status, + status: :running, + pipeline: pipeline, + target_url: 'http://gitlab.com' + ) render_status(external_job) end diff --git a/spec/views/ci/status/_icon.html.haml_spec.rb b/spec/views/ci/status/_icon.html.haml_spec.rb index d0579734451..78b19957cf0 100644 --- a/spec/views/ci/status/_icon.html.haml_spec.rb +++ b/spec/views/ci/status/_icon.html.haml_spec.rb @@ -48,10 +48,12 @@ RSpec.describe 'ci/status/_icon' do context 'status has external target url' do before do - external_job = create(:generic_commit_status, - status: :running, - pipeline: pipeline, - target_url: 'http://gitlab.com') + external_job = create( + :generic_commit_status, + status: :running, + pipeline: pipeline, + target_url: 'http://gitlab.com' + ) render_status(external_job) end diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb index bad01ec2c3d..8de2eab36e9 100644 --- a/spec/views/devise/sessions/new.html.haml_spec.rb +++ b/spec/views/devise/sessions/new.html.haml_spec.rb @@ -31,71 +31,73 @@ RSpec.describe 'devise/sessions/new' do flag_values = [true, false] flag_values.each do |val| - before do - stub_feature_flags(restyle_login_page: val) - end + context "with #{val}" do + before do + stub_feature_flags(restyle_login_page: val) + end - describe 'ldap' do - include LdapHelpers + describe 'ldap' do + include LdapHelpers - let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access } + let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access } - before do - enable_ldap - stub_devise - disable_captcha - disable_sign_up - disable_other_signin_methods + before do + enable_ldap + stub_devise + disable_captcha + disable_sign_up + disable_other_signin_methods - allow(view).to receive(:experiment_enabled?).and_return(false) - end + allow(view).to receive(:experiment_enabled?).and_return(false) + end - it 'is shown when enabled' do - render + it 'is shown when enabled' do + render - expect(rendered).to have_selector('.new-session-tabs') - expect(rendered).to have_selector('[data-testid="ldap-tab"]') - expect(rendered).to have_field('LDAP Username') - end + expect(rendered).to have_selector('.new-session-tabs') + expect(rendered).to have_selector('[data-testid="ldap-tab"]') + expect(rendered).to have_field('LDAP Username') + end - it 'is not shown when LDAP sign in is disabled' do - disable_ldap_sign_in + it 'is not shown when LDAP sign in is disabled' do + disable_ldap_sign_in - render + render - expect(rendered).to have_content('No authentication methods configured') - expect(rendered).not_to have_selector('[data-testid="ldap-tab"]') - expect(rendered).not_to have_field('LDAP Username') + expect(rendered).to have_content('No authentication methods configured') + expect(rendered).not_to have_selector('[data-testid="ldap-tab"]') + expect(rendered).not_to have_field('LDAP Username') + end end - end - - describe 'Google Tag Manager' do - let!(:gtm_id) { 'GTM-WWKMTWS' } - subject { rendered } + describe 'Google Tag Manager' do + let!(:gtm_id) { 'GTM-WWKMTWS' } - before do - stub_devise - disable_captcha - stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id }) - end + subject { rendered } - describe 'when Google Tag Manager is enabled' do before do - enable_gtm - render + stub_devise + disable_captcha + stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id }) end - it { is_expected.to match /www.googletagmanager.com/ } - end + describe 'when Google Tag Manager is enabled' do + before do + enable_gtm + render + end - describe 'when Google Tag Manager is disabled' do - before do - disable_gtm - render + it { is_expected.to match /www.googletagmanager.com/ } end - it { is_expected.not_to match /www.googletagmanager.com/ } + describe 'when Google Tag Manager is disabled' do + before do + disable_gtm + render + end + + it { is_expected.not_to match /www.googletagmanager.com/ } + end end end end diff --git a/spec/views/devise/shared/_signup_box.html.haml_spec.rb b/spec/views/devise/shared/_signup_box.html.haml_spec.rb index 94a5871cb97..377e29e18e7 100644 --- a/spec/views/devise/shared/_signup_box.html.haml_spec.rb +++ b/spec/views/devise/shared/_signup_box.html.haml_spec.rb @@ -30,10 +30,12 @@ RSpec.describe 'devise/shared/_signup_box' do end def text(translation) - format(translation, - button_text: button_text, - link_start: "", - link_end: '') + format( + translation, + button_text: button_text, + link_start: "", + link_end: '' + ) end context 'when terms are enforced' do diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb index fda93ebab51..1400791f12b 100644 --- a/spec/views/groups/edit.html.haml_spec.rb +++ b/spec/views/groups/edit.html.haml_spec.rb @@ -2,9 +2,13 @@ require 'spec_helper' -RSpec.describe 'groups/edit.html.haml' do +RSpec.describe 'groups/edit.html.haml', feature_category: :subgroups do include Devise::Test::ControllerHelpers + before do + stub_template 'groups/settings/_code_suggestions' => '' + end + describe '"Share with group lock" setting' do let(:root_owner) { create(:user) } let(:root_group) { create(:group) } diff --git a/spec/views/groups/packages/index.html.haml_spec.rb b/spec/views/groups/packages/index.html.haml_spec.rb new file mode 100644 index 00000000000..26f6268a224 --- /dev/null +++ b/spec/views/groups/packages/index.html.haml_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'groups/packages/index.html.haml', feature_category: :package_registry do + let_it_be(:group) { build(:group) } + + subject { rendered } + + before do + assign(:group, group) + end + + it 'renders vue entrypoint' do + render + + expect(rendered).to have_selector('#js-vue-packages-list') + end + + describe 'settings path' do + it 'without permission sets empty settings path' do + allow(view).to receive(:show_group_package_registry_settings).and_return(false) + + render + + expect(rendered).to have_selector('[data-settings-path=""]') + end + + it 'with permission sets group settings path' do + allow(view).to receive(:show_group_package_registry_settings).and_return(true) + + render + + expect(rendered).to have_selector( + "[data-settings-path=\"#{group_settings_packages_and_registries_path(group)}\"]" + ) + end + end +end diff --git a/spec/views/groups/settings/_general.html.haml_spec.rb b/spec/views/groups/settings/_general.html.haml_spec.rb new file mode 100644 index 00000000000..9f16e43be13 --- /dev/null +++ b/spec/views/groups/settings/_general.html.haml_spec.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'groups/settings/_general.html.haml', feature_category: :subgroups do + describe 'Group Settings README' do + let_it_be(:group) { build_stubbed(:group) } + let_it_be(:user) { build_stubbed(:admin) } + + before do + assign(:group, group) + allow(view).to receive(:current_user).and_return(user) + end + + it 'renders #js-group-settings-readme' do + render + + expect(rendered).to have_selector('#js-group-settings-readme') + end + end +end diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb new file mode 100644 index 00000000000..ac687f68ef6 --- /dev/null +++ b/spec/views/groups/show.html.haml_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'groups/show', feature_category: :subgroups do + describe 'group README' do + let_it_be(:group) { build_stubbed(:group) } + let_it_be(:readme_project) { build_stubbed(:project, :readme) } + + before do + assign(:group, group) + end + + context 'with readme project' do + before do + allow(group).to receive(:group_readme).and_return(readme_project) + end + + it 'renders #js-group-readme' do + render + + expect(rendered).to have_selector('#js-group-readme') + end + end + + context 'without readme project' do + before do + allow(group).to receive(:group_readme).and_return(nil) + end + + it 'does not render #js-group-readme' do + render + + expect(rendered).not_to have_selector('#js-group-readme') + end + end + end +end diff --git a/spec/views/layouts/_head.html.haml_spec.rb b/spec/views/layouts/_head.html.haml_spec.rb index f9725c73d05..0ceef3fbd0e 100644 --- a/spec/views/layouts/_head.html.haml_spec.rb +++ b/spec/views/layouts/_head.html.haml_spec.rb @@ -59,7 +59,7 @@ RSpec.describe 'layouts/_head' do render - expect(rendered).to match('') + expect(rendered).to match('') end context 'when an asset_host is set and snowplow url is set', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/346542' do diff --git a/spec/views/layouts/_search.html.haml_spec.rb b/spec/views/layouts/_search.html.haml_spec.rb deleted file mode 100644 index ceb82e3640e..00000000000 --- a/spec/views/layouts/_search.html.haml_spec.rb +++ /dev/null @@ -1,77 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe 'layouts/_search' do - let(:group) { nil } - let(:project) { nil } - let(:scope) { 'issues' } - let(:search_context) do - instance_double(Gitlab::SearchContext, - project: project, - group: group, - scope: scope, - ref: nil, - snippets: [], - search_url: '/search', - project_metadata: {}, - group_metadata: {}) - end - - before do - allow(view).to receive(:search_context).and_return(search_context) - allow(search_context).to receive(:code_search?).and_return(false) - allow(search_context).to receive(:for_snippets?).and_return(false) - end - - shared_examples 'search context scope is set' do - context 'when rendering' do - it 'sets the placeholder' do - render - - expect(rendered).to include('placeholder="Search GitLab"') - expect(rendered).to include('aria-label="Search GitLab"') - end - end - - context 'when on issues' do - it 'sets scope to issues' do - render - - expect(rendered).to have_css("input[name='scope'][value='issues']", count: 1, visible: false) - end - end - - context 'when on merge requests' do - let(:scope) { 'merge_requests' } - - it 'sets scope to merge_requests' do - render - - expect(rendered).to have_css("input[name='scope'][value='merge_requests']", count: 1, visible: false) - end - end - end - - context 'when doing project level search' do - let(:project) { create(:project) } - - before do - allow(search_context).to receive(:for_project?).and_return(true) - allow(search_context).to receive(:for_group?).and_return(false) - end - - it_behaves_like 'search context scope is set' - end - - context 'when doing group level search' do - let(:group) { create(:group) } - - before do - allow(search_context).to receive(:for_project?).and_return(false) - allow(search_context).to receive(:for_group?).and_return(true) - end - - it_behaves_like 'search context scope is set' - end -end diff --git a/spec/views/layouts/application.html.haml_spec.rb b/spec/views/layouts/application.html.haml_spec.rb index 527ba1498b9..d4d40a9ade9 100644 --- a/spec/views/layouts/application.html.haml_spec.rb +++ b/spec/views/layouts/application.html.haml_spec.rb @@ -6,10 +6,6 @@ RSpec.describe 'layouts/application' do let(:user) { create(:user) } before do - allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings) - allow(view).to receive(:experiment_enabled?).and_return(false) - allow(view).to receive(:session).and_return({}) - allow(view).to receive(:user_signed_in?).and_return(true) allow(view).to receive(:current_user).and_return(user) allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user)) end diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb index 163f39568e5..3097598aaca 100644 --- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb +++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb @@ -2,7 +2,14 @@ require 'spec_helper' -RSpec.describe 'layouts/nav/sidebar/_admin' do +RSpec.describe 'layouts/nav/sidebar/_admin', feature_category: :navigation do + let(:user) { build(:admin) } + + before do + allow(user).to receive(:can_admin_all_resources?).and_return(true) + allow(view).to receive(:current_user).and_return(user) + end + shared_examples 'page has active tab' do |title| it "activates #{title} tab" do render @@ -32,7 +39,7 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do context 'on projects' do before do - allow(controller).to receive(:controller_name).and_return('projects') + allow(controller).to receive(:controller_name).and_return('admin/projects') allow(controller).to receive(:controller_path).and_return('admin/projects') end diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb index 0df490f9b41..c85534b73ee 100644 --- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb +++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb @@ -334,12 +334,12 @@ RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do describe 'Deployments' do let(:page) { Nokogiri::HTML.parse(rendered) } - describe 'Feature Flags' do + describe 'Feature flags' do it 'has a link to the feature flags page' do render - expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Feature Flags"]')).not_to be_empty - expect(rendered).to have_link('Feature Flags', href: project_feature_flags_path(project)) + expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Feature flags"]')).not_to be_empty + expect(rendered).to have_link('Feature flags', href: project_feature_flags_path(project)) end describe 'when the user does not have access' do @@ -348,7 +348,7 @@ RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do it 'does not have a link to the feature flags page' do render - expect(rendered).not_to have_link('Feature Flags') + expect(rendered).not_to have_link('Feature flags') end end end @@ -567,11 +567,11 @@ RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do end end - describe 'Infrastructure Registry' do - it 'shows link to infrastructure registry page' do + describe 'Terraform modules' do + it 'shows link to terraform modules page' do render - expect(rendered).to have_link('Infrastructure Registry', href: project_infrastructure_registry_index_path(project)) + expect(rendered).to have_link('Terraform modules', href: project_infrastructure_registry_index_path(project)) end context 'when package registry config is disabled' do @@ -580,7 +580,7 @@ RSpec.describe 'layouts/nav/sidebar/_project', feature_category: :navigation do render - expect(rendered).not_to have_link('Infrastructure Registry', href: project_infrastructure_registry_index_path(project)) + expect(rendered).not_to have_link('Terraform modules', href: project_infrastructure_registry_index_path(project)) end end end diff --git a/spec/views/notify/autodevops_disabled_email.text.erb_spec.rb b/spec/views/notify/autodevops_disabled_email.text.erb_spec.rb index d8299d637e1..4e053711dcf 100644 --- a/spec/views/notify/autodevops_disabled_email.text.erb_spec.rb +++ b/spec/views/notify/autodevops_disabled_email.text.erb_spec.rb @@ -9,12 +9,14 @@ RSpec.describe 'notify/autodevops_disabled_email.text.erb' do let(:project) { create(:project, :repository) } let(:pipeline) do - create(:ci_pipeline, - :failed, - project: project, - user: user, - ref: project.default_branch, - sha: project.commit.sha) + create( + :ci_pipeline, + :failed, + project: project, + user: user, + ref: project.default_branch, + sha: project.commit.sha + ) end before do diff --git a/spec/views/notify/new_achievement_email.html.haml_spec.rb b/spec/views/notify/new_achievement_email.html.haml_spec.rb new file mode 100644 index 00000000000..9f577e6c043 --- /dev/null +++ b/spec/views/notify/new_achievement_email.html.haml_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'email_spec' + +RSpec.describe 'notify/new_achievement_email.html.haml', feature_category: :user_profile do + let(:user) { build(:user) } + let(:achievement) { build(:achievement) } + + before do + allow(view).to receive(:message) { instance_double(Mail::Message, subject: 'Subject') } + assign(:user, user) + assign(:achievement, achievement) + end + + it 'contains achievement information' do + render + + expect(rendered).to have_content(achievement.namespace.full_path) + expect(rendered).to have_content(" awarded you the ") + expect(rendered).to have_content(achievement.name) + expect(rendered).to have_content(" achievement!") + + expect(rendered).to have_content("View your achievements on your profile") + end +end diff --git a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb index dd637af5137..9bd5722954f 100644 --- a/spec/views/notify/pipeline_failed_email.text.erb_spec.rb +++ b/spec/views/notify/pipeline_failed_email.text.erb_spec.rb @@ -9,12 +9,14 @@ RSpec.describe 'notify/pipeline_failed_email.text.erb' do let(:merge_request) { create(:merge_request, :simple, source_project: project) } let(:pipeline) do - create(:ci_pipeline, - :failed, - project: project, - user: user, - ref: project.default_branch, - sha: project.commit.sha) + create( + :ci_pipeline, + :failed, + project: project, + user: user, + ref: project.default_branch, + sha: project.commit.sha + ) end before do diff --git a/spec/views/profiles/keys/_key.html.haml_spec.rb b/spec/views/profiles/keys/_key.html.haml_spec.rb index d51bfd19c37..4d14ce7c909 100644 --- a/spec/views/profiles/keys/_key.html.haml_spec.rb +++ b/spec/views/profiles/keys/_key.html.haml_spec.rb @@ -12,10 +12,12 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access context 'when the key partial is used' do let_it_be(:key) do - create(:personal_key, - user: user, - last_used_at: 7.days.ago, - expires_at: 2.days.from_now) + create( + :personal_key, + user: user, + last_used_at: 7.days.ago, + expires_at: 2.days.from_now + ) end it 'displays the correct values', :aggregate_failures do @@ -54,9 +56,7 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access context 'when the key has not been used' do let_it_be(:key) do - create(:personal_key, - user: user, - last_used_at: nil) + create(:personal_key, user: user, last_used_at: nil) end it 'renders "Never" for last used' do @@ -97,9 +97,7 @@ RSpec.describe 'profiles/keys/_key.html.haml', feature_category: :system_access context 'when the key does not have an expiration date' do let_it_be(:key) do - create(:personal_key, - user: user, - expires_at: nil) + create(:personal_key, user: user, expires_at: nil) end it 'renders "Never" for expires' do diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb index 6e0c6d67d85..9a177ba0394 100644 --- a/spec/views/profiles/preferences/show.html.haml_spec.rb +++ b/spec/views/profiles/preferences/show.html.haml_spec.rb @@ -54,9 +54,9 @@ RSpec.describe 'profiles/preferences/show' do end it 'has helpful homepage setup guidance' do - expect(rendered).to have_selector('[data-label="Dashboard"]') + expect(rendered).to have_selector('[data-label="Homepage"]') expect(rendered).to have_selector("[data-description=" \ - "'Choose what content you want to see by default on your dashboard.']") + "'Choose what content you want to see by default on your homepage.']") end end diff --git a/spec/views/projects/_home_panel.html.haml_spec.rb b/spec/views/projects/_home_panel.html.haml_spec.rb index 6f6a2d9a04d..e5081df4c22 100644 --- a/spec/views/projects/_home_panel.html.haml_spec.rb +++ b/spec/views/projects/_home_panel.html.haml_spec.rb @@ -199,18 +199,6 @@ RSpec.describe 'projects/_home_panel' do expect(rendered).not_to have_content("Forked from #{source_project.full_name}") end - - context 'when fork_divergence_counts is disabled' do - before do - stub_feature_flags(fork_divergence_counts: false) - end - - it 'shows the forked-from project' do - render - - expect(rendered).to have_content("Forked from #{source_project.full_name}") - end - end end context 'user cannot read fork source' do @@ -223,18 +211,6 @@ RSpec.describe 'projects/_home_panel' do expect(rendered).not_to have_content("Forked from an inaccessible project") end - - context 'when fork_divergence_counts is disabled' do - before do - stub_feature_flags(fork_divergence_counts: false) - end - - it 'shows the message that forked project is inaccessible' do - render - - expect(rendered).to have_content("Forked from an inaccessible project") - end - end end end end diff --git a/spec/views/projects/commit/_commit_box.html.haml_spec.rb b/spec/views/projects/commit/_commit_box.html.haml_spec.rb index 4335a0901ae..ee76560ac3b 100644 --- a/spec/views/projects/commit/_commit_box.html.haml_spec.rb +++ b/spec/views/projects/commit/_commit_box.html.haml_spec.rb @@ -52,8 +52,7 @@ RSpec.describe 'projects/commit/_commit_box.html.haml' do context 'when pipeline for the commit is blocked' do let!(:pipeline) do - create(:ci_pipeline, :blocked, project: project, - sha: project.commit.id) + create(:ci_pipeline, :blocked, project: project, sha: project.commit.id) end it 'shows correct pipeline description' do diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb index eba54628215..6d2237e773e 100644 --- a/spec/views/projects/commit/show.html.haml_spec.rb +++ b/spec/views/projects/commit/show.html.haml_spec.rb @@ -28,19 +28,6 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code_ allow(view).to receive(:pagination_params).and_return({}) end - context 'inline diff view' do - before do - allow(view).to receive(:diff_view).and_return(:inline) - allow(view).to receive(:diff_view).and_return(:inline) - - render - end - - it 'has limited width' do - expect(rendered).to have_selector('.limit-container-width') - end - end - context 'parallel diff view' do before do allow(view).to receive(:diff_view).and_return(:parallel) diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb index aeb12abd240..77336aa7d86 100644 --- a/spec/views/projects/edit.html.haml_spec.rb +++ b/spec/views/projects/edit.html.haml_spec.rb @@ -13,9 +13,11 @@ RSpec.describe 'projects/edit' do assign(:project, project) allow(controller).to receive(:current_user).and_return(user) - allow(view).to receive_messages(current_user: user, - can?: true, - current_application_settings: Gitlab::CurrentSettings.current_application_settings) + allow(view).to receive_messages( + current_user: user, + can?: true, + current_application_settings: Gitlab::CurrentSettings.current_application_settings + ) end context 'project export disabled' do diff --git a/spec/views/projects/merge_requests/edit.html.haml_spec.rb b/spec/views/projects/merge_requests/edit.html.haml_spec.rb index 75956160c0a..8774623d07e 100644 --- a/spec/views/projects/merge_requests/edit.html.haml_spec.rb +++ b/spec/views/projects/merge_requests/edit.html.haml_spec.rb @@ -43,7 +43,7 @@ RSpec.describe 'projects/merge_requests/edit.html.haml' do render expect(rendered).to have_field('merge_request[title]') - expect(rendered).to have_field('merge_request[description]') + expect(rendered).to have_selector('input[name="merge_request[description]"]', visible: false) expect(rendered).to have_selector('input[name="merge_request[label_ids][]"]', visible: false) expect(rendered).to have_selector('.js-milestone-dropdown-root') expect(rendered).not_to have_selector('#merge_request_target_branch', visible: false) @@ -55,7 +55,7 @@ RSpec.describe 'projects/merge_requests/edit.html.haml' do render expect(rendered).to have_field('merge_request[title]') - expect(rendered).to have_field('merge_request[description]') + expect(rendered).to have_selector('input[name="merge_request[description]"]', visible: false) expect(rendered).to have_selector('input[name="merge_request[label_ids][]"]', visible: false) expect(rendered).to have_selector('.js-milestone-dropdown-root') expect(rendered).to have_selector('#merge_request_target_branch', visible: false) diff --git a/spec/views/projects/packages/index.html.haml_spec.rb b/spec/views/projects/packages/index.html.haml_spec.rb new file mode 100644 index 00000000000..2557ceb70b3 --- /dev/null +++ b/spec/views/projects/packages/index.html.haml_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'projects/packages/packages/index.html.haml', feature_category: :package_registry do + let_it_be(:project) { build(:project) } + + subject { rendered } + + before do + assign(:project, project) + end + + it 'renders vue entrypoint' do + render + + expect(rendered).to have_selector('#js-vue-packages-list') + end + + describe 'settings path' do + it 'without permission sets empty settings path' do + allow(view).to receive(:show_package_registry_settings).and_return(false) + + render + + expect(rendered).to have_selector('[data-settings-path=""]') + end + + it 'with permission sets project settings path' do + allow(view).to receive(:show_package_registry_settings).and_return(true) + + render + + expect(rendered).to have_selector( + "[data-settings-path=\"#{project_settings_packages_and_registries_path(project)}\"]" + ) + end + end +end diff --git a/spec/views/projects/settings/merge_requests/show.html.haml_spec.rb b/spec/views/projects/settings/merge_requests/show.html.haml_spec.rb index 821f430eb10..1a7bfc5b5cd 100644 --- a/spec/views/projects/settings/merge_requests/show.html.haml_spec.rb +++ b/spec/views/projects/settings/merge_requests/show.html.haml_spec.rb @@ -13,9 +13,11 @@ RSpec.describe 'projects/settings/merge_requests/show' do assign(:project, project) allow(controller).to receive(:current_user).and_return(user) - allow(view).to receive_messages(current_user: user, - can?: true, - current_application_settings: Gitlab::CurrentSettings.current_application_settings) + allow(view).to receive_messages( + current_user: user, + can?: true, + current_application_settings: Gitlab::CurrentSettings.current_application_settings + ) end describe 'merge suggestions settings' do diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb index 99db5d9e2a8..dfa27afb72f 100644 --- a/spec/views/projects/tags/index.html.haml_spec.rb +++ b/spec/views/projects/tags/index.html.haml_spec.rb @@ -6,9 +6,7 @@ RSpec.describe 'projects/tags/index.html.haml' do let_it_be(:project) { create(:project, :repository) } let_it_be(:git_tag) { project.repository.tags.last } let_it_be(:release) do - create(:release, project: project, - sha: git_tag.target_commit.sha, - tag: 'v1.1.0') + create(:release, project: project, sha: git_tag.target_commit.sha, tag: 'v1.1.0') end let(:pipeline) { create(:ci_pipeline, :success, project: project, ref: git_tag.name, sha: release.sha) } diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb index ed71a03c7e0..832cc5b7cf3 100644 --- a/spec/views/search/_results.html.haml_spec.rb +++ b/spec/views/search/_results.html.haml_spec.rb @@ -97,12 +97,6 @@ RSpec.describe 'search/_results', feature_category: :global_search do expect(rendered).not_to have_selector('[data-track-property=search_result]') end end - - it 'does render the sidebar' do - render - - expect(rendered).to have_selector('#js-search-sidebar') - end end end diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb index db06adfeb6b..0158a9049b9 100644 --- a/spec/views/search/show.html.haml_spec.rb +++ b/spec/views/search/show.html.haml_spec.rb @@ -41,6 +41,12 @@ RSpec.describe 'search/show', feature_category: :global_search do expect(rendered).not_to render_template('search/_results') end + + it 'does render the sidebar' do + render + + expect(rendered).to have_selector('#js-search-sidebar') + end end context 'unfurling support' do diff --git a/spec/views/shared/milestones/_issuables.html.haml_spec.rb b/spec/views/shared/milestones/_issuables.html.haml_spec.rb index 5eed2c96a45..cd11c028bd7 100644 --- a/spec/views/shared/milestones/_issuables.html.haml_spec.rb +++ b/spec/views/shared/milestones/_issuables.html.haml_spec.rb @@ -6,8 +6,13 @@ RSpec.describe 'shared/milestones/_issuables.html.haml' do let(:issuables_size) { 100 } before do - allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil, dom_class: '', - issuables: double(length: issuables_size).as_null_object) + allow(view).to receive_messages( + title: nil, + id: nil, + show_project_name: nil, + dom_class: '', + issuables: double(length: issuables_size).as_null_object + ) stub_template 'shared/milestones/_issuable.html.haml' => '' end diff --git a/spec/views/shared/runners/_runner_details.html.haml_spec.rb b/spec/views/shared/runners/_runner_details.html.haml_spec.rb index 6e95f6e8075..9776d29de44 100644 --- a/spec/views/shared/runners/_runner_details.html.haml_spec.rb +++ b/spec/views/shared/runners/_runner_details.html.haml_spec.rb @@ -6,11 +6,14 @@ RSpec.describe 'shared/runners/_runner_details.html.haml' do include PageLayoutHelper let(:runner) do - create(:ci_runner, name: 'test runner', - version: '11.4.0', - ip_address: '127.1.2.3', - revision: 'abcd1234', - architecture: 'amd64' ) + create( + :ci_runner, + name: 'test runner', + version: '11.4.0', + ip_address: '127.1.2.3', + revision: 'abcd1234', + architecture: 'amd64' + ) end before do diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb index 081bece09e9..fcc157f9998 100644 --- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb +++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb @@ -10,9 +10,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker, feature_ let(:end_user_id) { start_user_id } let(:execute_worker) { subject.perform(start_user_id, end_user_id) } - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do context 'checks if project authorization update is required' do diff --git a/spec/workers/background_migration/ci_database_worker_spec.rb b/spec/workers/background_migration/ci_database_worker_spec.rb index 1048a06bb12..3f2977a0aaa 100644 --- a/spec/workers/background_migration/ci_database_worker_spec.rb +++ b/spec/workers/background_migration/ci_database_worker_spec.rb @@ -2,6 +2,10 @@ require 'spec_helper' -RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state, if: Gitlab::Database.has_config?(:ci), feature_category: :database do +RSpec.describe BackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state, feature_category: :database do + before do + skip_if_shared_database(:ci) + end + it_behaves_like 'it runs background migration jobs', 'ci' end diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb index f8efc9c455d..adae0417a9a 100644 --- a/spec/workers/build_hooks_worker_spec.rb +++ b/spec/workers/build_hooks_worker_spec.rb @@ -42,7 +42,5 @@ RSpec.describe BuildHooksWorker, feature_category: :continuous_integration do end end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed end diff --git a/spec/workers/build_queue_worker_spec.rb b/spec/workers/build_queue_worker_spec.rb index 1f3640e7496..079e11acde3 100644 --- a/spec/workers/build_queue_worker_spec.rb +++ b/spec/workers/build_queue_worker_spec.rb @@ -24,7 +24,5 @@ RSpec.describe BuildQueueWorker, feature_category: :continuous_integration do end end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :sticky + it_behaves_like 'worker with data consistency', described_class, data_consistency: :sticky end diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb index 7260e0c0f67..2faa28ba489 100644 --- a/spec/workers/bulk_imports/export_request_worker_spec.rb +++ b/spec/workers/bulk_imports/export_request_worker_spec.rb @@ -80,7 +80,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do 'source_full_path' => entity.source_full_path, 'exception.backtrace' => anything, 'exception.class' => 'NoMethodError', - 'exception.message' => "undefined method `model_id' for nil:NilClass", + 'exception.message' => /^undefined method `model_id' for nil:NilClass/, 'message' => 'Failed to fetch source entity id', 'importer' => 'gitlab_migration', 'source_version' => entity.bulk_import.source_version_info.to_s diff --git a/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb b/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb index bbc2bcf9ac9..9c3d249c6aa 100644 --- a/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb +++ b/spec/workers/ci/job_artifacts/track_artifact_report_worker_spec.rb @@ -9,8 +9,7 @@ RSpec.describe Ci::JobArtifacts::TrackArtifactReportWorker, feature_category: :c let_it_be(:user) { create(:user) } let_it_be(:pipeline) do - create(:ci_pipeline, :with_test_reports, :with_coverage_reports, - project: project, user: user) + create(:ci_pipeline, :with_test_reports, :with_coverage_reports, project: project, user: user) end subject(:perform) { described_class.new.perform(pipeline_id) } diff --git a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb index d8f620bc024..619012eaa6e 100644 --- a/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb +++ b/spec/workers/ci/runners/stale_machines_cleanup_cron_worker_spec.rb @@ -8,16 +8,16 @@ RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :r describe '#perform', :freeze_time do subject(:perform) { worker.perform } - let!(:runner_machine1) do + let!(:runner_manager1) do create(:ci_runner_machine, created_at: 7.days.ago, contacted_at: 7.days.ago) end - let!(:runner_machine2) { create(:ci_runner_machine) } - let!(:runner_machine3) { create(:ci_runner_machine, created_at: 6.days.ago) } + let!(:runner_manager2) { create(:ci_runner_machine) } + let!(:runner_manager3) { create(:ci_runner_machine, created_at: 6.days.ago) } it_behaves_like 'an idempotent worker' do it 'delegates to Ci::Runners::StaleMachinesCleanupService' do - expect_next_instance_of(Ci::Runners::StaleMachinesCleanupService) do |service| + expect_next_instance_of(Ci::Runners::StaleManagersCleanupService) do |service| expect(service) .to receive(:execute).and_call_original end @@ -26,16 +26,16 @@ RSpec.describe Ci::Runners::StaleMachinesCleanupCronWorker, feature_category: :r expect(worker.logging_extras).to eq({ "extra.ci_runners_stale_machines_cleanup_cron_worker.status" => :success, - "extra.ci_runners_stale_machines_cleanup_cron_worker.deleted_machines" => true + "extra.ci_runners_stale_machines_cleanup_cron_worker.deleted_managers" => true }) end - it 'cleans up stale runner machines', :aggregate_failures do - expect(Ci::RunnerMachine.stale.count).to eq 1 + it 'cleans up stale runner managers', :aggregate_failures do + expect(Ci::RunnerManager.stale.count).to eq 1 - expect { perform }.to change { Ci::RunnerMachine.count }.from(3).to(2) + expect { perform }.to change { Ci::RunnerManager.count }.from(3).to(2) - expect(Ci::RunnerMachine.all).to match_array [runner_machine2, runner_machine3] + expect(Ci::RunnerManager.all).to match_array [runner_manager2, runner_manager3] end end end diff --git a/spec/workers/clusters/agents/delete_expired_events_worker_spec.rb b/spec/workers/clusters/agents/delete_expired_events_worker_spec.rb index b439df4e119..8f2bd189d5c 100644 --- a/spec/workers/clusters/agents/delete_expired_events_worker_spec.rb +++ b/spec/workers/clusters/agents/delete_expired_events_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Agents::DeleteExpiredEventsWorker, feature_category: :kubernetes_management do +RSpec.describe Clusters::Agents::DeleteExpiredEventsWorker, feature_category: :deployment_management do let(:agent) { create(:cluster_agent) } describe '#perform' do diff --git a/spec/workers/clusters/applications/activate_integration_worker_spec.rb b/spec/workers/clusters/applications/activate_integration_worker_spec.rb index 40a774e1818..58b133aa6de 100644 --- a/spec/workers/clusters/applications/activate_integration_worker_spec.rb +++ b/spec/workers/clusters/applications/activate_integration_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Applications::ActivateIntegrationWorker, '#perform', feature_category: :kubernetes_management do +RSpec.describe Clusters::Applications::ActivateIntegrationWorker, '#perform', feature_category: :deployment_management do context 'when cluster exists' do describe 'prometheus integration' do let(:integration_name) { 'prometheus' } diff --git a/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb index f02ad18c7cc..5f7cd786ea3 100644 --- a/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb +++ b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Applications::DeactivateIntegrationWorker, '#perform', feature_category: :kubernetes_management do +RSpec.describe Clusters::Applications::DeactivateIntegrationWorker, '#perform', feature_category: :deployment_management do context 'when cluster exists' do describe 'prometheus integration' do let(:integration_name) { 'prometheus' } diff --git a/spec/workers/clusters/cleanup/project_namespace_worker_spec.rb b/spec/workers/clusters/cleanup/project_namespace_worker_spec.rb index 15fc9e8678e..7119664d706 100644 --- a/spec/workers/clusters/cleanup/project_namespace_worker_spec.rb +++ b/spec/workers/clusters/cleanup/project_namespace_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Cleanup::ProjectNamespaceWorker, feature_category: :kubernetes_management do +RSpec.describe Clusters::Cleanup::ProjectNamespaceWorker, feature_category: :deployment_management do describe '#perform' do context 'when cluster.cleanup_status is cleanup_removing_project_namespaces' do let!(:cluster) { create(:cluster, :with_environments, :cleanup_removing_project_namespaces) } @@ -27,7 +27,6 @@ RSpec.describe Clusters::Cleanup::ProjectNamespaceWorker, feature_category: :kub exception: 'ClusterCleanupMethods::ExceededExecutionLimitError', cluster_id: kind_of(Integer), class_name: described_class.name, - applications: "", cleanup_status: cluster.cleanup_status_name, event: :failed_to_remove_cluster_and_resources, message: "exceeded execution limit of 10 tries" diff --git a/spec/workers/clusters/cleanup/service_account_worker_spec.rb b/spec/workers/clusters/cleanup/service_account_worker_spec.rb index 0d4df795278..cc388841c91 100644 --- a/spec/workers/clusters/cleanup/service_account_worker_spec.rb +++ b/spec/workers/clusters/cleanup/service_account_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Clusters::Cleanup::ServiceAccountWorker, feature_category: :kubernetes_management do +RSpec.describe Clusters::Cleanup::ServiceAccountWorker, feature_category: :deployment_management do describe '#perform' do let!(:cluster) { create(:cluster, :cleanup_removing_service_account) } diff --git a/spec/workers/concerns/cluster_agent_queue_spec.rb b/spec/workers/concerns/cluster_agent_queue_spec.rb index c30616d04e1..77417601748 100644 --- a/spec/workers/concerns/cluster_agent_queue_spec.rb +++ b/spec/workers/concerns/cluster_agent_queue_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ClusterAgentQueue, feature_category: :kubernetes_management do +RSpec.describe ClusterAgentQueue, feature_category: :deployment_management do let(:worker) do Class.new do def self.name @@ -14,5 +14,5 @@ RSpec.describe ClusterAgentQueue, feature_category: :kubernetes_management do end end - it { expect(worker.get_feature_category).to eq(:kubernetes_management) } + it { expect(worker.get_feature_category).to eq(:deployment_management) } end diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb index 7e00093b686..26680fcc870 100644 --- a/spec/workers/concerns/cronjob_queue_spec.rb +++ b/spec/workers/concerns/cronjob_queue_spec.rb @@ -44,7 +44,7 @@ RSpec.describe CronjobQueue, feature_category: :shared do expect(worker.sidekiq_options['retry']).to eq(false) end - it 'automatically clears project, user and namespace from the context', :aggregate_failues do + it 'automatically clears project, user and namespace from the context', :aggregate_failures do worker_context = worker.get_worker_context.to_lazy_hash.transform_values { |v| v.try(:call) } expect(worker_context[:user]).to be_nil diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb index f72caf3a8c2..18a3e3c2c5b 100644 --- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb +++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb @@ -30,7 +30,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur let(:github_identifiers) do { some_id: 1, - some_type: '_some_type_' + some_type: '_some_type_', + object_type: 'dummy' } end @@ -52,7 +53,8 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur def github_identifiers { some_id: 1, - some_type: '_some_type_' + some_type: '_some_type_', + object_type: 'dummy' } end end diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb index 0bbe14842bb..700d9e37a55 100644 --- a/spec/workers/concerns/worker_context_spec.rb +++ b/spec/workers/concerns/worker_context_spec.rb @@ -73,9 +73,11 @@ RSpec.describe WorkerContext, feature_category: :shared do describe '.bulk_perform_async_with_contexts' do subject do - worker.bulk_perform_async_with_contexts(%w(hello world), - context_proc: -> (_) { { user: build_stubbed(:user) } }, - arguments_proc: -> (word) { word }) + worker.bulk_perform_async_with_contexts( + %w(hello world), + context_proc: -> (_) { { user: build_stubbed(:user) } }, + arguments_proc: -> (word) { word } + ) end it 'calls bulk_perform_async with the arguments' do @@ -89,10 +91,12 @@ RSpec.describe WorkerContext, feature_category: :shared do describe '.bulk_perform_in_with_contexts' do subject do - worker.bulk_perform_in_with_contexts(10.minutes, - %w(hello world), - context_proc: -> (_) { { user: build_stubbed(:user) } }, - arguments_proc: -> (word) { word }) + worker.bulk_perform_in_with_contexts( + 10.minutes, + %w(hello world), + context_proc: -> (_) { { user: build_stubbed(:user) } }, + arguments_proc: -> (word) { word } + ) end it 'calls bulk_perform_in with the arguments and delay' do diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb index 91ba6e5a20a..782f949eacf 100644 --- a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb +++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb @@ -2,6 +2,7 @@ require 'spec_helper' -RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state, feature_category: :database do +RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state, + feature_category: :database do it_behaves_like 'it runs batched background migration jobs', :ci, :ci_builds end diff --git a/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb b/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb index 6b6723a468f..84ea5db4bab 100644 --- a/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb +++ b/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Database::CiNamespaceMirrorsConsistencyCheckWorker, feature_category: :pods do +RSpec.describe Database::CiNamespaceMirrorsConsistencyCheckWorker, feature_category: :cell do let(:worker) { described_class.new } describe '#perform' do diff --git a/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb b/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb index 613d40b57d8..0895f3d0559 100644 --- a/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb +++ b/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Database::CiProjectMirrorsConsistencyCheckWorker, feature_category: :pods do +RSpec.describe Database::CiProjectMirrorsConsistencyCheckWorker, feature_category: :cell do let(:worker) { described_class.new } describe '#perform' do diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb index 51614f8b0cb..e620ed6e05c 100644 --- a/spec/workers/deployments/hooks_worker_spec.rb +++ b/spec/workers/deployments/hooks_worker_spec.rb @@ -60,8 +60,6 @@ RSpec.describe Deployments::HooksWorker, feature_category: :continuous_delivery worker.perform(deployment_id: deployment.id, status_changed_at: status_changed_at) end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed end end diff --git a/spec/workers/design_management/new_version_worker_spec.rb b/spec/workers/design_management/new_version_worker_spec.rb index afc908d925a..baf6409a64f 100644 --- a/spec/workers/design_management/new_version_worker_spec.rb +++ b/spec/workers/design_management/new_version_worker_spec.rb @@ -57,9 +57,11 @@ RSpec.describe DesignManagement::NewVersionWorker, feature_category: :design_man context 'the version includes multiple types of action' do let_it_be(:version) do - create(:design_version, :with_lfs_file, - created_designs: create_list(:design, 1, :with_lfs_file), - modified_designs: create_list(:design, 1)) + create( + :design_version, :with_lfs_file, + created_designs: create_list(:design, 1, :with_lfs_file), + modified_designs: create_list(:design, 1) + ) end it 'creates two system notes' do diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb index 4c464c797e4..51a77a09e16 100644 --- a/spec/workers/email_receiver_worker_spec.rb +++ b/spec/workers/email_receiver_worker_spec.rb @@ -7,7 +7,7 @@ RSpec.describe EmailReceiverWorker, :mailer, feature_category: :team_planning do context "when reply by email is enabled" do before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(true) end it "calls the email receiver" do @@ -67,7 +67,7 @@ RSpec.describe EmailReceiverWorker, :mailer, feature_category: :team_planning do context "when reply by email is disabled" do before do - allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(false) + allow(Gitlab::Email::IncomingEmail).to receive(:enabled?).and_return(false) end it "doesn't call the email receiver" do diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb index f080e2ef1c3..78691f70866 100644 --- a/spec/workers/every_sidekiq_worker_spec.rb +++ b/spec/workers/every_sidekiq_worker_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe 'Every Sidekiq worker', feature_category: :shared do + include EverySidekiqWorkerTestHelper + let(:workers_without_defaults) do Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS.values end @@ -241,7 +243,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do 'Geo::DesignRepositorySyncWorker' => 1, 'Geo::DestroyWorker' => 3, 'Geo::EventWorker' => 3, - 'Geo::FileRegistryRemovalWorker' => 3, 'Geo::FileRemovalWorker' => 3, 'Geo::ProjectSyncWorker' => 1, 'Geo::RenameRepositoryWorker' => 3, @@ -370,6 +371,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do 'Namespaces::RefreshRootStatisticsWorker' => 3, 'Namespaces::RootStatisticsWorker' => 3, 'Namespaces::ScheduleAggregationWorker' => 3, + 'Namespaces::FreeUserCap::NotificationClearingWorker' => false, 'NewEpicWorker' => 3, 'NewIssueWorker' => 3, 'NewMergeRequestWorker' => 3, @@ -407,6 +409,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do 'ProjectScheduleBulkRepositoryShardMovesWorker' => 3, 'ProjectTemplateExportWorker' => false, 'ProjectUpdateRepositoryStorageWorker' => 3, + 'Projects::DeregisterSuggestedReviewersProjectWorker' => 3, 'Projects::DisableLegacyOpenSourceLicenseForInactiveProjectsWorker' => 3, 'Projects::GitGarbageCollectWorker' => false, 'Projects::InactiveProjectsDeletionNotificationWorker' => 3, @@ -477,9 +480,10 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do 'WebHooks::DestroyWorker' => 3, 'WebHooks::LogExecutionWorker' => 3, 'Wikis::GitGarbageCollectWorker' => false, + 'WorkItems::ImportWorkItemsCsvWorker' => 3, 'X509CertificateRevokeWorker' => 3, 'ComplianceManagement::MergeRequests::ComplianceViolationsWorker' => 3 - } + }.merge(extra_retry_exceptions) end it 'uses the default number of retries for new jobs' do @@ -493,7 +497,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do it 'uses specified numbers of retries for workers with exceptions encoded here', :aggregate_failures do retry_exception_workers.each do |worker| expect(worker.retries).to eq(retry_exceptions[worker.klass.to_s]), - "#{worker.klass} has #{worker.retries} retries, expected #{retry_exceptions[worker.klass]}" + "#{worker.klass} has #{worker.retries} retries, expected #{retry_exceptions[worker.klass]}" end end end diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb index 1c24cdcccae..fcbe1b2cf99 100644 --- a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb +++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb @@ -74,20 +74,68 @@ RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :i .with(log_attributes.merge('message' => 'importer finished')) subject.perform(user.id, gist_hash, 'some_key') + + expect_snowplow_event( + category: 'Gitlab::GithubGistsImport::ImportGistWorker', + label: 'github_gist_import', + action: 'create', + user: user, + status: 'success' + ) end end - context 'when importer raised an error' do - it 'raises an error' do - exception = StandardError.new('_some_error_') + context 'when failure' do + context 'when importer raised an error' do + it 'raises an error' do + exception = StandardError.new('_some_error_') - expect(importer).to receive(:execute).and_raise(exception) - expect(Gitlab::GithubImport::Logger) - .to receive(:error) - .with(log_attributes.merge('message' => 'importer failed', 'error.message' => '_some_error_')) - expect(Gitlab::ErrorTracking).to receive(:track_exception) + expect(importer).to receive(:execute).and_raise(exception) + expect(Gitlab::GithubImport::Logger) + .to receive(:error) + .with(log_attributes.merge('message' => 'importer failed', 'error.message' => '_some_error_')) + expect(Gitlab::ErrorTracking).to receive(:track_exception) + + expect { subject.perform(user.id, gist_hash, 'some_key') }.to raise_error(StandardError) + end + end + + context 'when importer returns error' do + let(:importer_result) { instance_double('ServiceResponse', errors: 'error_message', success?: false) } + + it 'tracks and logs error' do + expect(importer).to receive(:execute).and_return(importer_result) + expect(Gitlab::GithubImport::Logger) + .to receive(:error) + .with(log_attributes.merge('message' => 'importer failed', 'error.message' => 'error_message')) + expect(Gitlab::JobWaiter).to receive(:notify).with('some_key', subject.jid) + + subject.perform(user.id, gist_hash, 'some_key') + + expect_snowplow_event( + category: 'Gitlab::GithubGistsImport::ImportGistWorker', + label: 'github_gist_import', + action: 'create', + user: user, + status: 'failed' + ) + end + end + end + + describe '.sidekiq_retries_exhausted' do + it 'sends snowplow event' do + job = { 'args' => [user.id, 'some_key', '1'], 'jid' => '123' } + + described_class.sidekiq_retries_exhausted_block.call(job) - expect { subject.perform(user.id, gist_hash, 'some_key') }.to raise_error(StandardError) + expect_snowplow_event( + category: 'Gitlab::GithubGistsImport::ImportGistWorker', + label: 'github_gist_import', + action: 'create', + user: user, + status: 'failed' + ) end end end diff --git a/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb index fc03e14c20e..e0db440232c 100644 --- a/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb +++ b/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb @@ -14,6 +14,19 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportIssueWorker, feature_cat let(:client) { instance_double('Gitlab::GithubImport::Client') } + let(:issue_hash) do + { + 'record_db_id' => rand(100), + 'record_type' => 'Issue', + 'iid' => 2, + 'text' => <<-TEXT + Some text... + + ![special-image](https://user-images.githubusercontent.com...) + TEXT + } + end + it 'imports an issue attachments' do expect_next_instance_of( Gitlab::GithubImport::Importer::NoteAttachmentsImporter, @@ -28,7 +41,7 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportIssueWorker, feature_cat .to receive(:increment) .and_call_original - worker.import(project, client, {}) + worker.import(project, client, issue_hash) end end end diff --git a/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb index bd90cee567e..b4be229af2a 100644 --- a/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb +++ b/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb @@ -14,6 +14,19 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportMergeRequestWorker, feat let(:client) { instance_double('Gitlab::GithubImport::Client') } + let(:mr_hash) do + { + 'record_db_id' => rand(100), + 'record_type' => 'MergeRequest', + 'iid' => 2, + 'text' => <<-TEXT + Some text... + + ![special-image](https://user-images.githubusercontent.com...) + TEXT + } + end + it 'imports an merge request attachments' do expect_next_instance_of( Gitlab::GithubImport::Importer::NoteAttachmentsImporter, @@ -28,7 +41,7 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportMergeRequestWorker, feat .to receive(:increment) .and_call_original - worker.import(project, client, {}) + worker.import(project, client, mr_hash) end end end diff --git a/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb index 7d8fb9bc788..60b49901fd9 100644 --- a/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb +++ b/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb @@ -19,6 +19,7 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportNoteWorker, feature_cate { 'record_db_id' => rand(100), 'record_type' => 'Note', + 'noteable_type' => 'Issue', 'text' => <<-TEXT Some text... diff --git a/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb index 50eebc6ce8c..83cb7b9fecf 100644 --- a/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb +++ b/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb @@ -19,6 +19,7 @@ RSpec.describe Gitlab::GithubImport::Attachments::ImportReleaseWorker, feature_c { 'record_db_id' => rand(100), 'record_type' => 'Release', + 'tag' => 'v1.0', 'text' => <<-TEXT Some text... diff --git a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb index f4f5353a9cf..62a9e3446f8 100644 --- a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb +++ b/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb @@ -17,8 +17,10 @@ RSpec.describe Gitlab::GithubImport::ImportReleaseAttachmentsWorker, feature_cat let(:release_hash) do { - 'release_db_id' => rand(100), - 'description' => <<-TEXT + 'record_db_id' => rand(100), + 'record_type' => 'Release', + 'tag' => 'v1.0', + 'text' => <<-TEXT Some text... ![special-image](https://user-images.githubusercontent.com...) diff --git a/spec/workers/incident_management/close_incident_worker_spec.rb b/spec/workers/incident_management/close_incident_worker_spec.rb index bf967a42ceb..3c2e69a4675 100644 --- a/spec/workers/incident_management/close_incident_worker_spec.rb +++ b/spec/workers/incident_management/close_incident_worker_spec.rb @@ -36,7 +36,7 @@ RSpec.describe IncidentManagement::CloseIncidentWorker, feature_category: :incid context 'when issue type is not incident' do before do - issue.update!(issue_type: :issue) + issue.update!(issue_type: :issue, work_item_type: WorkItems::Type.default_by_type(:issue)) end it_behaves_like 'does not call the close issue service' diff --git a/spec/workers/integrations/irker_worker_spec.rb b/spec/workers/integrations/irker_worker_spec.rb index 257a6f72709..7a51e9cfe47 100644 --- a/spec/workers/integrations/irker_worker_spec.rb +++ b/spec/workers/integrations/irker_worker_spec.rb @@ -88,10 +88,11 @@ RSpec.describe Integrations::IrkerWorker, '#perform', feature_category: :integra context 'with new commits to existing branch' do it 'sends a correct message with a compare url' do - compare_url = Gitlab::Routing.url_helpers - .project_compare_url(project, - from: Commit.truncate_sha(push_data[:before]), - to: Commit.truncate_sha(push_data[:after])) + compare_url = Gitlab::Routing.url_helpers.project_compare_url( + project, + from: Commit.truncate_sha(push_data[:before]), + to: Commit.truncate_sha(push_data[:after]) + ) message = "pushed #{push_data['total_commits_count']} " \ "new commits to master: #{compare_url}" diff --git a/spec/workers/issuable_export_csv_worker_spec.rb b/spec/workers/issuable_export_csv_worker_spec.rb index 66198157edb..e54466b3641 100644 --- a/spec/workers/issuable_export_csv_worker_spec.rb +++ b/spec/workers/issuable_export_csv_worker_spec.rb @@ -22,6 +22,10 @@ RSpec.describe IssuableExportCsvWorker, feature_category: :team_planning do subject end + it 'defines the loggable_arguments' do + expect(described_class.loggable_arguments).to match_array([0, 1, 2, 3]) + end + it 'removes sort parameter' do expect(IssuesFinder).to receive(:new).with(anything, hash_not_including(:sort)).and_call_original diff --git a/spec/workers/jira_connect/sync_branch_worker_spec.rb b/spec/workers/jira_connect/sync_branch_worker_spec.rb index 54b1915b253..1c2661ad0e5 100644 --- a/spec/workers/jira_connect/sync_branch_worker_spec.rb +++ b/spec/workers/jira_connect/sync_branch_worker_spec.rb @@ -5,9 +5,7 @@ require 'spec_helper' RSpec.describe JiraConnect::SyncBranchWorker, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do let_it_be(:group) { create(:group) } diff --git a/spec/workers/jira_connect/sync_builds_worker_spec.rb b/spec/workers/jira_connect/sync_builds_worker_spec.rb index 6ef15b084a3..8c694fe33bd 100644 --- a/spec/workers/jira_connect/sync_builds_worker_spec.rb +++ b/spec/workers/jira_connect/sync_builds_worker_spec.rb @@ -5,9 +5,7 @@ require 'spec_helper' RSpec.describe ::JiraConnect::SyncBuildsWorker, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do let_it_be(:pipeline) { create(:ci_pipeline) } diff --git a/spec/workers/jira_connect/sync_deployments_worker_spec.rb b/spec/workers/jira_connect/sync_deployments_worker_spec.rb index 2e72a94bc1e..39609f331d0 100644 --- a/spec/workers/jira_connect/sync_deployments_worker_spec.rb +++ b/spec/workers/jira_connect/sync_deployments_worker_spec.rb @@ -5,9 +5,7 @@ require 'spec_helper' RSpec.describe ::JiraConnect::SyncDeploymentsWorker, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do let_it_be(:deployment) { create(:deployment) } diff --git a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb index c2dbd52398f..cc3867d26c1 100644 --- a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb +++ b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb @@ -5,9 +5,7 @@ require 'spec_helper' RSpec.describe ::JiraConnect::SyncFeatureFlagsWorker, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do let_it_be(:feature_flag) { create(:operations_feature_flag) } diff --git a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb index 23abb915d68..6b6f7610f07 100644 --- a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb +++ b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb @@ -5,9 +5,7 @@ require 'spec_helper' RSpec.describe JiraConnect::SyncMergeRequestWorker, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do let_it_be(:group) { create(:group) } @@ -24,7 +22,7 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker, feature_category: :integrati it 'calls JiraConnect::SyncService#execute' do expect_next(JiraConnect::SyncService).to receive(:execute) - .with(merge_requests: [merge_request], update_sequence_id: update_sequence_id) + .with(merge_requests: [merge_request], branches: [have_attributes(name: 'master')], update_sequence_id: update_sequence_id) perform end @@ -38,5 +36,32 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker, feature_category: :integrati perform end end + + shared_examples 'does not send any branch data' do + it 'calls JiraConnect::SyncService correctly with nil branches' do + expect_next(JiraConnect::SyncService).to receive(:execute) + .with(merge_requests: [merge_request], branches: nil, update_sequence_id: update_sequence_id) + + perform + end + end + + context 'when the merge request is closed' do + before do + merge_request.close! + end + + it_behaves_like 'does not send any branch data' + end + + context 'when source branch cannot be found' do + before do + allow_next_found_instance_of(MergeRequest) do |mr| + allow(mr).to receive(:source_branch).and_return('non-existant-branch') + end + end + + it_behaves_like 'does not send any branch data' + end end end diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb index afd56a3b5c1..7a23aabfd0f 100644 --- a/spec/workers/jira_connect/sync_project_worker_spec.rb +++ b/spec/workers/jira_connect/sync_project_worker_spec.rb @@ -5,17 +5,16 @@ require 'spec_helper' RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep, feature_category: :integrations do include AfterNextHelpers - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed describe '#perform' do - let_it_be(:project) { create_default(:project).freeze } + let_it_be(:project) { create_default(:project, :repository).freeze } let!(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'TEST-123') } let!(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'TEST-323') } let!(:mr_with_other_title) { create(:merge_request, :unique_branches) } let!(:jira_subscription) { create(:jira_connect_subscription, namespace: project.namespace) } + let(:jira_referencing_branch_name) { 'TEST-123_my-feature-branch' } let(:jira_connect_sync_service) { JiraConnect::SyncService.new(project) } let(:job_args) { [project.id, update_sequence_id] } @@ -27,6 +26,7 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep, feature_c Atlassian::JiraConnect::Serializers::RepositoryEntity.represent( project, merge_requests: [mr_with_jira_description, mr_with_jira_title], + branches: [project.repository.find_branch(jira_referencing_branch_name)], update_sequence_id: update_sequence_id ) ] @@ -58,21 +58,71 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep, feature_c expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control_count) end - it 'sends the request with custom update_sequence_id' do - allow_next(Atlassian::JiraConnect::Client).to receive(:post) - .with(request_path, request_body) + context 'with branches to sync' do + context 'on a single branch' do + it 'sends the request with custom update_sequence_id' do + project.repository.create_branch(jira_referencing_branch_name) - perform(project.id, update_sequence_id) + allow_next(Atlassian::JiraConnect::Client).to receive(:post) + .with(request_path, request_body) + + perform(project.id, update_sequence_id) + end + end + + context 'on multiple branches' do + after do + project.repository.rm_branch(project.owner, 'TEST-2_my-feature-branch') + project.repository.rm_branch(project.owner, 'TEST-3_my-feature-branch') + project.repository.rm_branch(project.owner, 'TEST-4_my-feature-branch') + end + + it 'does not requests a lot from Gitaly', :request_store do + # NOTE: Gitaly N+1 calls when processing stats and diffs on commits. + # This should be reduced as we work on reducing Gitaly calls. + # See https://gitlab.com/gitlab-org/gitlab/-/issues/354370 + described_class.new.perform(project.id, update_sequence_id) + + project.repository.create_branch('TEST-2_my-feature-branch') + project.repository.create_branch('TEST-3_my-feature-branch') + project.repository.create_branch('TEST-4_my-feature-branch') + + expect { described_class.new.perform(project.id, update_sequence_id) } + .to change { Gitlab::GitalyClient.get_request_count }.by(13) + end + end + end + + context 'when the number of items to sync is higher than the limit' do + let!(:most_recent_merge_request) { create(:merge_request, :unique_branches, description: 'TEST-323', title: 'TEST-123') } + + before do + stub_const("#{described_class}::MAX_RECORDS_LIMIT", 1) + + project.repository.create_branch('TEST-321_new-branch') + end + + it 'syncs only the most recent merge requests and branches within the limit' do + expect(jira_connect_sync_service).to receive(:execute) + .with( + merge_requests: [most_recent_merge_request], + branches: [have_attributes(name: jira_referencing_branch_name)], + update_sequence_id: update_sequence_id + ) + + perform(project.id, update_sequence_id) + end end - context 'when the number of merge requests to sync is higher than the limit' do + context 'when the feature flag is disabled' do let!(:most_recent_merge_request) { create(:merge_request, :unique_branches, description: 'TEST-323', title: 'TEST-123') } before do - stub_const("#{described_class}::MERGE_REQUEST_LIMIT", 1) + stub_feature_flags(jira_connect_sync_branches: false) + stub_const("#{described_class}::MAX_RECORDS_LIMIT", 1) end - it 'syncs only the most recent merge requests within the limit' do + it 'does not sync branches' do expect(jira_connect_sync_service).to receive(:execute) .with(merge_requests: [most_recent_merge_request], update_sequence_id: update_sequence_id) diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb index 19860f32b29..e49b4707eb3 100644 --- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb +++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe LooseForeignKeys::CleanupWorker, feature_category: :pods do +RSpec.describe LooseForeignKeys::CleanupWorker, feature_category: :cell do include MigrationsHelpers using RSpec::Parameterized::TableSyntax diff --git a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb index d8e49f444a9..57f8cfbfb83 100644 --- a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb +++ b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb @@ -12,8 +12,11 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker, feature_category: :sourc describe '#perform' do before do allow_next_instance_of(::Projects::DeleteBranchWorker) do |instance| - allow(instance).to receive(:perform).with(merge_request.source_project.id, user.id, - merge_request.source_branch) + allow(instance).to receive(:perform).with( + merge_request.source_project.id, + user.id, + merge_request.source_branch + ) end end @@ -36,8 +39,11 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker, feature_category: :sourc context 'with existing user and merge request' do it 'calls delete branch worker' do expect_next_instance_of(::Projects::DeleteBranchWorker) do |instance| - expect(instance).to receive(:perform).with(merge_request.source_project.id, user.id, - merge_request.source_branch) + expect(instance).to receive(:perform).with( + merge_request.source_project.id, + user.id, + merge_request.source_branch + ) end worker.perform(merge_request.id, sha, user.id) diff --git a/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb b/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb index 912afb59412..b65bd4eb1db 100644 --- a/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb +++ b/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb @@ -74,9 +74,12 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker, feature_category: :code_ context 'when there is no pipeline for source branch' do it "does not update merge request head pipeline" do - merge_request = create(:merge_request, source_branch: 'feature', - target_branch: "branch_1", - source_project: project) + merge_request = create( + :merge_request, + source_branch: 'feature', + target_branch: "branch_1", + source_project: project + ) subject @@ -96,10 +99,13 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker, feature_category: :code_ end it 'updates head pipeline for merge request' do - merge_request = create(:merge_request, source_branch: 'feature', - target_branch: "master", - source_project: project, - target_project: target_project) + merge_request = create( + :merge_request, + source_branch: 'feature', + target_branch: "master", + source_project: project, + target_project: target_project + ) subject @@ -109,9 +115,12 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker, feature_category: :code_ context 'when the pipeline is not the latest for the branch' do it 'does not update merge request head pipeline' do - merge_request = create(:merge_request, source_branch: 'master', - target_branch: "branch_1", - source_project: project) + merge_request = create( + :merge_request, + source_branch: 'master', + target_branch: "branch_1", + source_project: project + ) create(:ci_pipeline, project: pipeline.project, ref: pipeline.ref) @@ -127,9 +136,12 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker, feature_category: :code_ end it 'updates merge request head pipeline reference' do - merge_request = create(:merge_request, source_branch: 'master', - target_branch: 'feature', - source_project: project) + merge_request = create( + :merge_request, + source_branch: 'master', + target_branch: 'feature', + source_project: project + ) subject diff --git a/spec/workers/metrics/global_metrics_update_worker_spec.rb b/spec/workers/metrics/global_metrics_update_worker_spec.rb new file mode 100644 index 00000000000..d5bfbcc928a --- /dev/null +++ b/spec/workers/metrics/global_metrics_update_worker_spec.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Metrics::GlobalMetricsUpdateWorker, feature_category: :metrics do + subject { described_class.new } + + describe '#perform' do + let(:service) { ::Metrics::GlobalMetricsUpdateService.new } + + it 'delegates to ::Metrics::GlobalMetricsUpdateService' do + expect(::Metrics::GlobalMetricsUpdateService).to receive(:new).and_return(service) + expect(service).to receive(:execute) + + subject.perform + end + + context 'for an idempotent worker' do + include_examples 'an idempotent worker' do + it 'exports metrics' do + allow(Gitlab).to receive(:maintenance_mode?).and_return(true).at_least(1).time + + perform_multiple + + expect(service.maintenance_mode_metric.get).to eq(1) + end + end + end + end +end diff --git a/spec/workers/ml/experiment_tracking/associate_ml_candidate_to_package_worker_spec.rb b/spec/workers/ml/experiment_tracking/associate_ml_candidate_to_package_worker_spec.rb new file mode 100644 index 00000000000..5e1742b3298 --- /dev/null +++ b/spec/workers/ml/experiment_tracking/associate_ml_candidate_to_package_worker_spec.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Ml::ExperimentTracking::AssociateMlCandidateToPackageWorker, feature_category: :mlops do + describe '.handle_event' do + let_it_be(:candidate) { create(:ml_candidates) } + let_it_be(:package) do + create( + :generic_package, + project: candidate.project, + name: candidate.package_name, + version: candidate.package_version + ) + end + + let(:package_version) { package.version } + let(:project_id) { package.project_id } + let(:data) do + { + project_id: project_id, + id: package.id, + name: package.name, + version: package_version, + package_type: package.package_type + } + end + + let(:package_created_event) { Packages::PackageCreatedEvent.new(data: data) } + + it_behaves_like 'subscribes to event' do + let(:event) { package_created_event } + end + + context 'when package name matches ml_experiment_{id}' do + before do + consume_event(subscriber: described_class, event: package_created_event) + end + + context 'when candidate with iid exists' do + it 'associates candidate to package' do + expect(candidate.reload.package).to eq(package) + end + end + + context 'when no candidate with iid exists' do + let(:package_version) { non_existing_record_iid.to_s } + + it 'does not associate candidate' do + expect(candidate.reload.package).to be_nil + end + end + + context 'when candidate with iid exists but in a different project' do + let(:project_id) { non_existing_record_id } + + it 'does not associate candidate' do + expect(candidate.reload.package).to be_nil + end + end + end + + context 'when package is deleted before event is called' do + before do + package.delete + end + + it 'does not associate candidate' do + consume_event(subscriber: described_class, event: package_created_event) + + expect(candidate.reload.package_id).to be_nil + end + end + end + + describe '#handles_event?' do + using RSpec::Parameterized::TableSyntax + + let(:event) do + Packages::PackageCreatedEvent.new( + data: { + project_id: 1, + id: 1, + name: package_name, + version: '', + package_type: package_type + } + ) + end + + subject { described_class.handles_event?(event) } + + where(:package_name, :package_type, :handles_event) do + 'ml_experiment_1234' | 'generic' | true + 'ml_experiment_1234' | 'maven' | false + '1234' | 'generic' | false + 'ml_experiment_' | 'generic' | false + 'blah' | 'generic' | false + end + + with_them do + it { is_expected.to eq(handles_event) } + end + end +end diff --git a/spec/workers/namespaces/process_sync_events_worker_spec.rb b/spec/workers/namespaces/process_sync_events_worker_spec.rb index efa0053c145..c11cd32cfc7 100644 --- a/spec/workers/namespaces/process_sync_events_worker_spec.rb +++ b/spec/workers/namespaces/process_sync_events_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Namespaces::ProcessSyncEventsWorker, feature_category: :pods do +RSpec.describe Namespaces::ProcessSyncEventsWorker, feature_category: :cell do let!(:group1) { create(:group) } let!(:group2) { create(:group) } let!(:group3) { create(:group) } diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb index 8409fffca26..bc2eca86711 100644 --- a/spec/workers/namespaces/root_statistics_worker_spec.rb +++ b/spec/workers/namespaces/root_statistics_worker_spec.rb @@ -3,48 +3,88 @@ require 'spec_helper' RSpec.describe Namespaces::RootStatisticsWorker, '#perform', feature_category: :source_code_management do - let(:group) { create(:group, :with_aggregation_schedule) } + let_it_be(:group) { create(:group, :with_aggregation_schedule) } subject(:worker) { described_class.new } - context 'with a namespace' do + RSpec.shared_examples 'bypasses aggregation schedule' do it 'executes refresher service' do expect_any_instance_of(Namespaces::StatisticsRefresherService) .to receive(:execute).and_call_original + expect(group).not_to receive(:aggregation_scheduled?) worker.perform(group.id) end - it 'deletes namespace aggregated schedule row' do - worker.perform(group.id) + it 'does not change AggregationSchedule count' do + expect do + worker.perform(group.id) + end.not_to change { Namespace::AggregationSchedule.count } + end + end + + context 'with a namespace' do + context 'with remove_aggregation_schedule_lease feature flag enabled' do + it_behaves_like 'bypasses aggregation schedule' + + context 'when something goes wrong when updating' do + before do + allow_any_instance_of(Namespaces::StatisticsRefresherService) + .to receive(:execute) + .and_raise(Namespaces::StatisticsRefresherService::RefresherError, 'error') + end + + it 'logs the error' do + expect(Gitlab::ErrorTracking).to receive(:track_exception).once - expect(group.reload.aggregation_schedule).to be_nil + worker.perform(group.id) + end + end end - context 'when something goes wrong when updating' do + context 'with remove_aggregation_schedule_lease feature flag disabled' do before do - allow_any_instance_of(Namespaces::StatisticsRefresherService) - .to receive(:execute) - .and_raise(Namespaces::StatisticsRefresherService::RefresherError, 'error') + stub_feature_flags(remove_aggregation_schedule_lease: false) + end + + it 'executes refresher service' do + expect_any_instance_of(Namespaces::StatisticsRefresherService) + .to receive(:execute).and_call_original + + worker.perform(group.id) end - it 'does not delete the aggregation schedule' do + it 'deletes namespace aggregated schedule row' do worker.perform(group.id) - expect(group.reload.aggregation_schedule).to be_present + expect(group.reload.aggregation_schedule).to be_nil end - it 'logs the error' do - # A Namespace::RootStatisticsWorker is scheduled when - # a Namespace::AggregationSchedule is created, so having - # create(:group, :with_aggregation_schedule), will execute - # another worker - allow_any_instance_of(Namespace::AggregationSchedule) - .to receive(:schedule_root_storage_statistics).and_return(nil) + context 'when something goes wrong when updating' do + before do + allow_any_instance_of(Namespaces::StatisticsRefresherService) + .to receive(:execute) + .and_raise(Namespaces::StatisticsRefresherService::RefresherError, 'error') + end - expect(Gitlab::ErrorTracking).to receive(:track_exception).once + it 'does not delete the aggregation schedule' do + worker.perform(group.id) - worker.perform(group.id) + expect(group.reload.aggregation_schedule).to be_present + end + + it 'logs the error' do + # A Namespace::RootStatisticsWorker is scheduled when + # a Namespace::AggregationSchedule is created, so having + # create(:group, :with_aggregation_schedule), will execute + # another worker + allow_any_instance_of(Namespace::AggregationSchedule) + .to receive(:schedule_root_storage_statistics).and_return(nil) + + expect(Gitlab::ErrorTracking).to receive(:track_exception).once + + worker.perform(group.id) + end end end end @@ -67,32 +107,46 @@ RSpec.describe Namespaces::RootStatisticsWorker, '#perform', feature_category: : group.aggregation_schedule.destroy! end - it 'does not execute the refresher service' do - expect_any_instance_of(Namespaces::StatisticsRefresherService) - .not_to receive(:execute) + context 'with remove_aggregation_schedule_lease feature flag disabled' do + before do + stub_feature_flags(remove_aggregation_schedule_lease: false) + end - worker.perform(group.id) + it 'does not execute the refresher service' do + expect_any_instance_of(Namespaces::StatisticsRefresherService) + .not_to receive(:execute) + + worker.perform(group.id) + end + end + + context 'with remove_aggregation_schedule_lease feature flag enabled' do + it_behaves_like 'bypasses aggregation schedule' end end it_behaves_like 'an idempotent worker' do let(:job_args) { [group.id] } - it 'deletes one aggregation schedule' do - # Make sure the group and it's aggregation schedule are created before - # counting - group + context 'with remove_aggregation_schedule_lease feature flag disabled' do + before do + stub_feature_flags(remove_aggregation_schedule_lease: false) + end - expect { worker.perform(*job_args) } - .to change { Namespace::AggregationSchedule.count }.by(-1) - expect { worker.perform(*job_args) } - .not_to change { Namespace::AggregationSchedule.count } + it 'deletes one aggregation schedule' do + # Make sure the group and it's aggregation schedule are created before + # counting + group + + expect { worker.perform(*job_args) } + .to change { Namespace::AggregationSchedule.count }.by(-1) + expect { worker.perform(*job_args) } + .not_to change { Namespace::AggregationSchedule.count } + end end end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :sticky + it_behaves_like 'worker with data consistency', described_class, data_consistency: :sticky it 'has the `until_executed` deduplicate strategy' do expect(described_class.get_deduplicate_strategy).to eq(:until_executed) diff --git a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb index 69bd0f1ce47..249c143606f 100644 --- a/spec/workers/namespaces/schedule_aggregation_worker_spec.rb +++ b/spec/workers/namespaces/schedule_aggregation_worker_spec.rb @@ -3,32 +3,68 @@ require 'spec_helper' RSpec.describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_redis_shared_state, feature_category: :source_code_management do - let(:group) { create(:group) } + let_it_be(:group) { create(:group) } subject(:worker) { described_class.new } + RSpec.shared_examples 'schedule root statistic worker' do + it 'enqueues only RootStatisticsWorker' do + expect(Namespaces::RootStatisticsWorker).to receive(:perform_async).with(group.root_ancestor.id) + expect(Namespace::AggregationSchedule).not_to receive(:safe_find_or_create_by!) + .with(namespace_id: group.root_ancestor.id) + + worker.perform(group.id) + end + + it 'does not change AggregationSchedule count' do + expect do + worker.perform(group.root_ancestor.id) + end.not_to change { Namespace::AggregationSchedule.count } + end + end + context 'when group is the root ancestor' do - context 'when aggregation schedule exists' do - it 'does not create a new one' do - stub_aggregation_schedule_statistics + context 'with remove_aggregation_schedule_lease feature flag enabled' do + context 'when aggregation schedule does not exist' do + it_behaves_like "schedule root statistic worker" + end - Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: group.id) + context 'when aggregation schedule does exist' do + before do + Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: group.id) + end - expect do - worker.perform(group.id) - end.not_to change { Namespace::AggregationSchedule.count } + it_behaves_like "schedule root statistic worker" end end - context 'when aggregation schedule does not exist' do - it 'creates one' do - stub_aggregation_schedule_statistics + context 'with remove_aggregation_schedule_lease feature flag disabled' do + before do + stub_feature_flags(remove_aggregation_schedule_lease: false) + end + + context 'when aggregation schedule exists' do + it 'does not create a new one' do + stub_aggregation_schedule_statistics + + Namespace::AggregationSchedule.safe_find_or_create_by!(namespace_id: group.id) - expect do - worker.perform(group.id) - end.to change { Namespace::AggregationSchedule.count }.by(1) + expect do + worker.perform(group.id) + end.not_to change { Namespace::AggregationSchedule.count } + end + end + + context 'when aggregation schedule does not exist' do + it 'creates one' do + stub_aggregation_schedule_statistics - expect(group.aggregation_schedule).to be_present + expect do + worker.perform(group.id) + end.to change { Namespace::AggregationSchedule.count }.by(1) + + expect(group.aggregation_schedule).to be_present + end end end end @@ -37,12 +73,22 @@ RSpec.describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_ let(:parent_group) { create(:group) } let(:group) { create(:group, parent: parent_group) } - it 'creates an aggregation schedule for the root' do - stub_aggregation_schedule_statistics + context 'with remove_aggregation_schedule_lease feature flag enabled' do + it_behaves_like "schedule root statistic worker" + end - worker.perform(group.id) + context 'with remove_aggregation_schedule_lease feature flag disabled' do + before do + stub_feature_flags(remove_aggregation_schedule_lease: false) + end - expect(parent_group.aggregation_schedule).to be_present + it 'creates an aggregation schedule for the root' do + stub_aggregation_schedule_statistics + + worker.perform(group.id) + + expect(parent_group.aggregation_schedule).to be_present + end end end @@ -57,11 +103,17 @@ RSpec.describe Namespaces::ScheduleAggregationWorker, '#perform', :clean_gitlab_ it_behaves_like 'an idempotent worker' do let(:job_args) { [group.id] } - it 'creates a single aggregation schedule' do - expect { worker.perform(*job_args) } - .to change { Namespace::AggregationSchedule.count }.by(1) - expect { worker.perform(*job_args) } - .not_to change { Namespace::AggregationSchedule.count } + context 'with remove_aggregation_schedule_lease feature flag disabled' do + before do + stub_feature_flags(remove_aggregation_schedule_lease: false) + end + + it 'creates a single aggregation schedule' do + expect { worker.perform(*job_args) } + .to change { Namespace::AggregationSchedule.count }.by(1) + expect { worker.perform(*job_args) } + .not_to change { Namespace::AggregationSchedule.count } + end end end diff --git a/spec/workers/object_pool/destroy_worker_spec.rb b/spec/workers/object_pool/destroy_worker_spec.rb index f83d3814c63..7db3404ed36 100644 --- a/spec/workers/object_pool/destroy_worker_spec.rb +++ b/spec/workers/object_pool/destroy_worker_spec.rb @@ -16,9 +16,13 @@ RSpec.describe ObjectPool::DestroyWorker, feature_category: :shared do subject { described_class.new } it 'requests Gitaly to remove the object pool' do - expect(Gitlab::GitalyClient).to receive(:call) - .with(pool.shard_name, :object_pool_service, :delete_object_pool, - Object, timeout: Gitlab::GitalyClient.long_timeout) + expect(Gitlab::GitalyClient).to receive(:call).with( + pool.shard_name, + :object_pool_service, + :delete_object_pool, + Object, + timeout: Gitlab::GitalyClient.long_timeout + ) subject.perform(pool.id) end diff --git a/spec/workers/packages/debian/cleanup_dangling_package_files_worker_spec.rb b/spec/workers/packages/debian/cleanup_dangling_package_files_worker_spec.rb new file mode 100644 index 00000000000..b6373dbda95 --- /dev/null +++ b/spec/workers/packages/debian/cleanup_dangling_package_files_worker_spec.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::Debian::CleanupDanglingPackageFilesWorker, type: :worker, + feature_category: :package_registry do + describe '#perform' do + let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') } + let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) } + let_it_be(:package) { create(:debian_package, project: distribution.project) } + + subject { described_class.new.perform } + + context 'when debian_packages flag is disabled' do + before do + stub_feature_flags(debian_packages: false) + end + + it 'does nothing' do + expect(::Packages::MarkPackageFilesForDestructionService).not_to receive(:new) + + subject + end + end + + context 'with mocked service returning success' do + it 'calls MarkPackageFilesForDestructionService' do + expect(Gitlab::ErrorTracking).not_to receive(:log_exception) + expect_next_instance_of(::Packages::MarkPackageFilesForDestructionService) do |service| + expect(service).to receive(:execute) + .with(batch_deadline: an_instance_of(ActiveSupport::TimeWithZone)) + .and_return(ServiceResponse.success) + end + + subject + end + end + + context 'with mocked service returning error' do + it 'ignore error' do + expect(Gitlab::ErrorTracking).not_to receive(:log_exception) + expect_next_instance_of(::Packages::MarkPackageFilesForDestructionService) do |service| + expect(service).to receive(:execute) + .with(batch_deadline: an_instance_of(ActiveSupport::TimeWithZone)) + .and_return(ServiceResponse.error(message: 'Custom error')) + end + + subject + end + end + + context 'when the service raises an error' do + it 'logs exception' do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with( + instance_of(ArgumentError) + ) + expect_next_instance_of(::Packages::MarkPackageFilesForDestructionService) do |service| + expect(service).to receive(:execute) + .and_raise(ArgumentError, 'foobar') + end + + subject + end + end + + context 'with valid parameters' do + it_behaves_like 'an idempotent worker' do + before do + incoming.package_files.first.debian_file_metadatum.update! updated_at: 1.day.ago + incoming.package_files.second.update! updated_at: 1.day.ago, status: :error + end + + it 'mark dangling package files as pending destruction', :aggregate_failures do + expect(Gitlab::ErrorTracking).not_to receive(:log_exception) + + # Using subject inside this block will process the job multiple times + expect { subject } + .to not_change { distribution.project.package_files.count } + .and change { distribution.project.package_files.pending_destruction.count }.from(0).to(1) + .and not_change { distribution.project.packages.count } + end + end + end + end +end diff --git a/spec/workers/packages/debian/process_package_file_worker_spec.rb b/spec/workers/packages/debian/process_package_file_worker_spec.rb index 44769ec6a14..6010f4eac27 100644 --- a/spec/workers/packages/debian/process_package_file_worker_spec.rb +++ b/spec/workers/packages/debian/process_package_file_worker_spec.rb @@ -64,6 +64,7 @@ RSpec.describe Packages::Debian::ProcessPackageFileWorker, type: :worker, featur .to not_change(Packages::Package, :count) .and not_change { Packages::PackageFile.count } .and not_change { package.package_files.count } + .and change { package_file.reload.status }.to('error') .and change { package.reload.status }.from('processing').to('error') end end diff --git a/spec/workers/packages/npm/deprecate_package_worker_spec.rb b/spec/workers/packages/npm/deprecate_package_worker_spec.rb new file mode 100644 index 00000000000..100a8a3af73 --- /dev/null +++ b/spec/workers/packages/npm/deprecate_package_worker_spec.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Packages::Npm::DeprecatePackageWorker, feature_category: :package_registry do + describe '#perform' do + let_it_be(:project) { create(:project) } + let(:worker) { described_class.new } + let(:params) do + { + package_name: 'package_name', + versions: { + '1.0.1' => { + name: 'package_name', + deprecated: 'This version is deprecated' + } + } + } + end + + include_examples 'an idempotent worker' do + let(:job_args) { [project.id, params] } + + it 'calls the deprecation service' do + expect(::Packages::Npm::DeprecatePackageService).to receive(:new).with(project, params) do + double.tap do |service| + expect(service).to receive(:execute) + end + end + + worker.perform(*job_args) + end + end + end +end diff --git a/spec/workers/pipeline_hooks_worker_spec.rb b/spec/workers/pipeline_hooks_worker_spec.rb index a8b0f91bf7d..7a85038d946 100644 --- a/spec/workers/pipeline_hooks_worker_spec.rb +++ b/spec/workers/pipeline_hooks_worker_spec.rb @@ -37,7 +37,5 @@ RSpec.describe PipelineHooksWorker, feature_category: :continuous_integration do end end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed end diff --git a/spec/workers/pipeline_metrics_worker_spec.rb b/spec/workers/pipeline_metrics_worker_spec.rb index f7b397d91a6..7bd98f8f55d 100644 --- a/spec/workers/pipeline_metrics_worker_spec.rb +++ b/spec/workers/pipeline_metrics_worker_spec.rb @@ -6,19 +6,19 @@ RSpec.describe PipelineMetricsWorker, feature_category: :continuous_integration let(:project) { create(:project, :repository) } let!(:merge_request) do - create(:merge_request, source_project: project, - source_branch: pipeline.ref, - head_pipeline: pipeline) + create(:merge_request, source_project: project, source_branch: pipeline.ref, head_pipeline: pipeline) end let(:pipeline) do - create(:ci_empty_pipeline, - status: status, - project: project, - ref: 'master', - sha: project.repository.commit('master').id, - started_at: 1.hour.ago, - finished_at: Time.current) + create( + :ci_empty_pipeline, + status: status, + project: project, + ref: 'master', + sha: project.repository.commit('master').id, + started_at: 1.hour.ago, + finished_at: Time.current + ) end let(:status) { 'pending' } diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb index 1fc77c42cbc..c95119b0d02 100644 --- a/spec/workers/process_commit_worker_spec.rb +++ b/spec/workers/process_commit_worker_spec.rb @@ -82,11 +82,13 @@ RSpec.describe ProcessCommitWorker, feature_category: :source_code_management do context 'when commit is a merge request merge commit to the default branch' do let(:merge_request) do - create(:merge_request, - description: "Closes #{issue.to_reference}", - source_branch: 'feature-merged', - target_branch: 'master', - source_project: project) + create( + :merge_request, + description: "Closes #{issue.to_reference}", + source_branch: 'feature-merged', + target_branch: 'master', + source_project: project + ) end let(:commit) do diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb index 15234827efa..68af5e61e3b 100644 --- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb +++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb @@ -92,8 +92,11 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: : it 'invokes Projects::InactiveProjectsDeletionNotificationWorker for inactive projects' do Gitlab::Redis::SharedState.with do |redis| - expect(redis).to receive(:hset).with('inactive_projects_deletion_warning_email_notified', - "project:#{inactive_large_project.id}", Date.current) + expect(redis).to receive(:hset).with( + 'inactive_projects_deletion_warning_email_notified', + "project:#{inactive_large_project.id}", + Date.current + ) end expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_async).with( inactive_large_project.id, deletion_date).and_call_original @@ -104,8 +107,11 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: : it 'does not invoke InactiveProjectsDeletionNotificationWorker for already notified inactive projects' do Gitlab::Redis::SharedState.with do |redis| - redis.hset('inactive_projects_deletion_warning_email_notified', "project:#{inactive_large_project.id}", - Date.current.to_s) + redis.hset( + 'inactive_projects_deletion_warning_email_notified', + "project:#{inactive_large_project.id}", + Date.current.to_s + ) end expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async) @@ -116,8 +122,11 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: : it 'invokes Projects::DestroyService for projects that are inactive even after being notified' do Gitlab::Redis::SharedState.with do |redis| - redis.hset('inactive_projects_deletion_warning_email_notified', "project:#{inactive_large_project.id}", - 15.months.ago.to_date.to_s) + redis.hset( + 'inactive_projects_deletion_warning_email_notified', + "project:#{inactive_large_project.id}", + 15.months.ago.to_date.to_s + ) end expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async) @@ -129,8 +138,9 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker, feature_category: : expect(inactive_large_project.reload.pending_delete).to eq(true) Gitlab::Redis::SharedState.with do |redis| - expect(redis.hget('inactive_projects_deletion_warning_email_notified', - "project:#{inactive_large_project.id}")).to be_nil + expect( + redis.hget('inactive_projects_deletion_warning_email_notified', "project:#{inactive_large_project.id}") + ).to be_nil end end diff --git a/spec/workers/projects/inactive_projects_deletion_notification_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_notification_worker_spec.rb index 28668188497..2ac2b5d0795 100644 --- a/spec/workers/projects/inactive_projects_deletion_notification_worker_spec.rb +++ b/spec/workers/projects/inactive_projects_deletion_notification_worker_spec.rb @@ -22,14 +22,15 @@ RSpec.describe Projects::InactiveProjectsDeletionNotificationWorker, feature_cat worker.perform(project.id, deletion_date) Gitlab::Redis::SharedState.with do |redis| - expect(redis.hget('inactive_projects_deletion_warning_email_notified', - "project:#{project.id}")).to eq(Date.current.to_s) + expect( + redis.hget('inactive_projects_deletion_warning_email_notified', "project:#{project.id}") + ).to eq(Date.current.to_s) end end it 'rescues and logs the exception if project does not exist' do - expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(ActiveRecord::RecordNotFound), - { project_id: non_existing_project_id }) + expect(Gitlab::ErrorTracking).to receive(:log_exception) + .with(instance_of(ActiveRecord::RecordNotFound), { project_id: non_existing_project_id }) worker.perform(non_existing_project_id, deletion_date) end diff --git a/spec/workers/projects/process_sync_events_worker_spec.rb b/spec/workers/projects/process_sync_events_worker_spec.rb index 77ccf14a32b..fe53b6d6d8c 100644 --- a/spec/workers/projects/process_sync_events_worker_spec.rb +++ b/spec/workers/projects/process_sync_events_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Projects::ProcessSyncEventsWorker, feature_category: :pods do +RSpec.describe Projects::ProcessSyncEventsWorker, feature_category: :cell do let!(:group) { create(:group) } let!(:project) { create(:project) } diff --git a/spec/workers/rebase_worker_spec.rb b/spec/workers/rebase_worker_spec.rb index eec221094e6..ee8fd8b7461 100644 --- a/spec/workers/rebase_worker_spec.rb +++ b/spec/workers/rebase_worker_spec.rb @@ -10,11 +10,13 @@ RSpec.describe RebaseWorker, '#perform', feature_category: :source_code_manageme let(:forked_project) { fork_project(upstream_project, nil, repository: true) } let(:merge_request) do - create(:merge_request, - source_project: forked_project, - source_branch: 'feature_conflict', - target_project: upstream_project, - target_branch: 'master') + create( + :merge_request, + source_project: forked_project, + source_branch: 'feature_conflict', + target_project: upstream_project, + target_branch: 'master' + ) end it 'sets the correct project for running hooks' do diff --git a/spec/workers/remote_mirror_notification_worker_spec.rb b/spec/workers/remote_mirror_notification_worker_spec.rb index 46f44d0047b..e7c32d79457 100644 --- a/spec/workers/remote_mirror_notification_worker_spec.rb +++ b/spec/workers/remote_mirror_notification_worker_spec.rb @@ -30,8 +30,10 @@ RSpec.describe RemoteMirrorNotificationWorker, :mailer, feature_category: :sourc end it 'does nothing when a notification has already been sent' do - mirror.update_columns(last_error: "There was a problem fetching", - error_notification_sent: true) + mirror.update_columns( + last_error: "There was a problem fetching", + error_notification_sent: true + ) expect(NotificationService).not_to receive(:new) diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb index 354ce3fc9b4..f77a078750d 100644 --- a/spec/workers/remove_expired_members_worker_spec.rb +++ b/spec/workers/remove_expired_members_worker_spec.rb @@ -35,8 +35,10 @@ RSpec.describe RemoveExpiredMembersWorker, feature_category: :system_access do new_job = Sidekiq::Worker.jobs.last - expect(new_job).to include('meta.project' => expired_project_member.project.full_path, - 'meta.user' => expired_project_member.user.username) + expect(new_job).to include( + 'meta.project' => expired_project_member.project.full_path, + 'meta.user' => expired_project_member.user.username + ) end end @@ -60,8 +62,7 @@ RSpec.describe RemoveExpiredMembersWorker, feature_category: :system_access do worker.perform expect( - Users::GhostUserMigration.where(user: expired_project_bot, - initiator_user: nil) + Users::GhostUserMigration.where(user: expired_project_bot, initiator_user: nil) ).to be_exists end end @@ -116,8 +117,10 @@ RSpec.describe RemoveExpiredMembersWorker, feature_category: :system_access do new_job = Sidekiq::Worker.jobs.last - expect(new_job).to include('meta.root_namespace' => expired_group_member.group.full_path, - 'meta.user' => expired_group_member.user.username) + expect(new_job).to include( + 'meta.root_namespace' => expired_group_member.group.full_path, + 'meta.user' => expired_group_member.user.username + ) end end diff --git a/spec/workers/remove_unaccepted_member_invites_worker_spec.rb b/spec/workers/remove_unaccepted_member_invites_worker_spec.rb index 97ddf9223b3..5173967c57a 100644 --- a/spec/workers/remove_unaccepted_member_invites_worker_spec.rb +++ b/spec/workers/remove_unaccepted_member_invites_worker_spec.rb @@ -13,15 +13,19 @@ RSpec.describe RemoveUnacceptedMemberInvitesWorker, feature_category: :system_ac it 'removes unaccepted members', :aggregate_failures do unaccepted_group_invitee = create( - :group_member, invite_token: 't0ken', - invite_email: 'group_invitee@example.com', - user: nil, - created_at: Time.current - 5.days) + :group_member, + invite_token: 't0ken', + invite_email: 'group_invitee@example.com', + user: nil, + created_at: Time.current - 5.days + ) unaccepted_project_invitee = create( - :project_member, invite_token: 't0ken', - invite_email: 'project_invitee@example.com', - user: nil, - created_at: Time.current - 5.days) + :project_member, + invite_token: 't0ken', + invite_email: 'project_invitee@example.com', + user: nil, + created_at: Time.current - 5.days + ) expect { worker.perform }.to change { Member.count }.by(-2) @@ -33,13 +37,17 @@ RSpec.describe RemoveUnacceptedMemberInvitesWorker, feature_category: :system_ac context 'invited members still within expiration threshold' do it 'leaves invited members', :aggregate_failures do group_invitee = create( - :group_member, invite_token: 't0ken', - invite_email: 'group_invitee@example.com', - user: nil) + :group_member, + invite_token: 't0ken', + invite_email: 'group_invitee@example.com', + user: nil + ) project_invitee = create( - :project_member, invite_token: 't0ken', - invite_email: 'project_invitee@example.com', - user: nil) + :project_member, + invite_token: 't0ken', + invite_email: 'project_invitee@example.com', + user: nil + ) expect { worker.perform }.not_to change { Member.count } @@ -56,15 +64,19 @@ RSpec.describe RemoveUnacceptedMemberInvitesWorker, feature_category: :system_ac it 'leaves accepted members', :aggregate_failures do user = create(:user) accepted_group_invitee = create( - :group_member, invite_token: 't0ken', - invite_email: 'group_invitee@example.com', - user: user, - created_at: Time.current - 5.days) + :group_member, + invite_token: 't0ken', + invite_email: 'group_invitee@example.com', + user: user, + created_at: Time.current - 5.days + ) accepted_project_invitee = create( - :project_member, invite_token: nil, - invite_email: 'project_invitee@example.com', - user: user, - created_at: Time.current - 5.days) + :project_member, + invite_token: nil, + invite_email: 'project_invitee@example.com', + user: user, + created_at: Time.current - 5.days + ) expect { worker.perform }.not_to change { Member.count } diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb index 56dc3511cfc..e5564834443 100644 --- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb +++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb @@ -13,24 +13,16 @@ RSpec.describe RemoveUnreferencedLfsObjectsWorker, feature_category: :source_cod let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3' * 64) } let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4' * 64) } let!(:lfs_objects_project1_1) do - create(:lfs_objects_project, - project: project1, - lfs_object: referenced_lfs_object1 + create(:lfs_objects_project, project: project1, lfs_object: referenced_lfs_object1 ) end let!(:lfs_objects_project2_1) do - create(:lfs_objects_project, - project: project2, - lfs_object: referenced_lfs_object1 - ) + create(:lfs_objects_project, project: project2, lfs_object: referenced_lfs_object1) end let!(:lfs_objects_project1_2) do - create(:lfs_objects_project, - project: project1, - lfs_object: referenced_lfs_object2 - ) + create(:lfs_objects_project, project: project1, lfs_object: referenced_lfs_object2) end it 'removes unreferenced lfs objects' do diff --git a/spec/workers/repository_update_remote_mirror_worker_spec.rb b/spec/workers/repository_update_remote_mirror_worker_spec.rb index c1987658b0d..61b9441ec27 100644 --- a/spec/workers/repository_update_remote_mirror_worker_spec.rb +++ b/spec/workers/repository_update_remote_mirror_worker_spec.rb @@ -57,14 +57,16 @@ RSpec.describe RepositoryUpdateRemoteMirrorWorker, :clean_gitlab_redis_shared_st end it 'retries 3 times for the worker to finish before rescheduling' do - expect(subject).to receive(:in_lock) - .with("#{described_class.name}:#{remote_mirror.id}", - retries: 3, - ttl: remote_mirror.max_runtime, - sleep_sec: described_class::LOCK_WAIT_TIME) - .and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError) - expect(described_class).to receive(:perform_in) - .with(remote_mirror.backoff_delay, remote_mirror.id, scheduled_time, 0) + expect(subject).to receive(:in_lock).with( + "#{described_class.name}:#{remote_mirror.id}", + retries: 3, + ttl: remote_mirror.max_runtime, + sleep_sec: described_class::LOCK_WAIT_TIME + ).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError) + + expect(described_class) + .to receive(:perform_in) + .with(remote_mirror.backoff_delay, remote_mirror.id, scheduled_time, 0) subject.perform(remote_mirror.id, scheduled_time) end diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb index 75938d3b793..d0e4de1aa98 100644 --- a/spec/workers/run_pipeline_schedule_worker_spec.rb +++ b/spec/workers/run_pipeline_schedule_worker_spec.rb @@ -137,9 +137,11 @@ RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integrat expect(Gitlab::ErrorTracking) .to receive(:track_and_raise_for_dev_exception) - .with(ActiveRecord::StatementInvalid, - issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/41231', - schedule_id: pipeline_schedule.id).once + .with( + ActiveRecord::StatementInvalid, + issue_url: 'https://gitlab.com/gitlab-org/gitlab-foss/issues/41231', + schedule_id: pipeline_schedule.id + ).once end it 'increments Prometheus counter' do diff --git a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb index 5eae275be36..c64a597833d 100644 --- a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb +++ b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb @@ -66,11 +66,13 @@ RSpec.describe UpdateHeadPipelineForMergeRequestWorker, feature_category: :conti context 'when a merge request pipeline exists' do let_it_be(:merge_request_pipeline) do - create(:ci_pipeline, - project: project, - source: :merge_request_event, - sha: latest_sha, - merge_request: merge_request) + create( + :ci_pipeline, + project: project, + source: :merge_request_event, + sha: latest_sha, + merge_request: merge_request + ) end it 'sets the merge request pipeline as the head pipeline' do diff --git a/spec/workers/update_highest_role_worker_spec.rb b/spec/workers/update_highest_role_worker_spec.rb index 94811260f0e..3e4a2f6be36 100644 --- a/spec/workers/update_highest_role_worker_spec.rb +++ b/spec/workers/update_highest_role_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state, feature_category: :subscription_cost_management do +RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state, feature_category: :seat_cost_management do include ExclusiveLeaseHelpers let(:worker) { described_class.new } diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb index 1fb936b1fc2..fdcbb624562 100644 --- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb +++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :subscription_cost_management do +RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :seat_cost_management do using RSpec::Parameterized::TableSyntax describe '#perform' do @@ -35,6 +35,7 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :subscript where(:user_type, :expected_state) do :human | 'deactivated' + :human_deprecated | 'deactivated' :support_bot | 'active' :alert_bot | 'active' :visual_review_bot | 'active' @@ -57,11 +58,13 @@ RSpec.describe Users::DeactivateDormantUsersWorker, feature_category: :subscript it 'does not deactivate non-active users' do human_user = create(:user, user_type: :human, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date) + human_user2 = create(:user, user_type: :human_deprecated, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date) service_user = create(:user, user_type: :service_user, state: :blocked, last_activity_on: Gitlab::CurrentSettings.deactivate_dormant_users_period.days.ago.to_date) worker.perform expect(human_user.reload.state).to eq('blocked') + expect(human_user2.reload.state).to eq('blocked') expect(service_user.reload.state).to eq('blocked') end diff --git a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb index 73faffb5387..38ea7c43267 100644 --- a/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb +++ b/spec/workers/users/migrate_records_to_ghost_user_in_batches_worker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker, feature_category: :subscription_cost_management do +RSpec.describe Users::MigrateRecordsToGhostUserInBatchesWorker, feature_category: :seat_cost_management do include ExclusiveLeaseHelpers let(:worker) { described_class.new } diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb index e39017c4ccf..be43b83ec0a 100644 --- a/spec/workers/web_hook_worker_spec.rb +++ b/spec/workers/web_hook_worker_spec.rb @@ -28,8 +28,6 @@ RSpec.describe WebHookWorker, feature_category: :integrations do .to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid) end - it_behaves_like 'worker with data consistency', - described_class, - data_consistency: :delayed + it_behaves_like 'worker with data consistency', described_class, data_consistency: :delayed end end diff --git a/spec/workers/work_items/import_work_items_csv_worker_spec.rb b/spec/workers/work_items/import_work_items_csv_worker_spec.rb new file mode 100644 index 00000000000..056960fbcf2 --- /dev/null +++ b/spec/workers/work_items/import_work_items_csv_worker_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe WorkItems::ImportWorkItemsCsvWorker, feature_category: :team_planning do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + let(:upload) { create(:upload, :with_file) } + + before_all do + project.add_reporter(user) + end + + subject { described_class.new.perform(user.id, project.id, upload.id) } + + describe '#perform' do + it 'calls #execute on WorkItems::ImportCsvService and destroys upload' do + expect_next_instance_of(WorkItems::ImportCsvService) do |instance| + expect(instance).to receive(:execute).and_return({ success: 5, error_lines: [], parse_error: false }) + end + + subject + + expect { upload.reload }.to raise_error ActiveRecord::RecordNotFound + end + + it_behaves_like 'an idempotent worker' do + let(:job_args) { [user.id, project.id, upload.id] } + end + end + + describe '.sidekiq_retries_exhausted' do + let_it_be(:job) { { 'args' => [user.id, project.id, create(:upload, :with_file).id] } } + + subject(:sidekiq_retries_exhausted) do + described_class.sidekiq_retries_exhausted_block.call(job) + end + + it 'destroys upload' do + expect { sidekiq_retries_exhausted }.to change { Upload.count }.by(-1) + end + end +end -- cgit v1.2.3