Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/feature_flag_spec.rb273
-rw-r--r--spec/components/pajamas/avatar_component_spec.rb59
-rw-r--r--spec/components/previews/pajamas/banner_component_preview.rb2
-rw-r--r--spec/components/projects/ml/models_index_component_spec.rb87
-rw-r--r--spec/components/projects/ml/show_ml_model_component_spec.rb1
-rw-r--r--spec/controllers/admin/projects_controller_spec.rb4
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb4
-rw-r--r--spec/controllers/application_controller_spec.rb28
-rw-r--r--spec/controllers/concerns/issuable_collections_spec.rb1
-rw-r--r--spec/controllers/concerns/renders_commits_spec.rb8
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb4
-rw-r--r--spec/controllers/groups/releases_controller_spec.rb4
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb12
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb24
-rw-r--r--spec/controllers/import/fogbugz_controller_spec.rb35
-rw-r--r--spec/controllers/import/github_controller_spec.rb13
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb30
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb18
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb111
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb12
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb89
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb18
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb6
-rw-r--r--spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb2
-rw-r--r--spec/controllers/projects/packages/packages_controller_spec.rb2
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb4
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb10
-rw-r--r--spec/controllers/projects/refs_controller_spec.rb17
-rw-r--r--spec/controllers/projects/security/configuration_controller_spec.rb13
-rw-r--r--spec/controllers/projects/starrers_controller_spec.rb4
-rw-r--r--spec/controllers/projects_controller_spec.rb60
-rw-r--r--spec/controllers/uploads_controller_spec.rb39
-rw-r--r--spec/db/docs_spec.rb3
-rw-r--r--spec/db/schema_spec.rb8
-rw-r--r--spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb22
-rw-r--r--spec/factories/ci/job_artifacts.rb9
-rw-r--r--spec/factories/ci/pipeline_chat_data.rb9
-rw-r--r--spec/factories/ci/pipeline_config.rb8
-rw-r--r--spec/factories/ci/reports/security/findings.rb1
-rw-r--r--spec/factories/ci/runners.rb3
-rw-r--r--spec/factories/groups.rb4
-rw-r--r--spec/factories/integrations.rb8
-rw-r--r--spec/factories/ml/model_version_metadata.rb11
-rw-r--r--spec/factories/namespace_package_settings.rb3
-rw-r--r--spec/factories/namespaces/descendants.rb12
-rw-r--r--spec/factories/organizations/organization_users.rb4
-rw-r--r--spec/factories/projects.rb33
-rw-r--r--spec/factories/users.rb13
-rw-r--r--spec/factories/users/in_product_marketing_email.rb10
-rw-r--r--spec/factories/work_items/dates_sources.rb7
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb2
-rw-r--r--spec/features/admin/admin_appearance_spec.rb287
-rw-r--r--spec/features/admin/admin_browse_spam_logs_spec.rb2
-rw-r--r--spec/features/admin/admin_deploy_keys_spec.rb2
-rw-r--r--spec/features/admin/admin_dev_ops_reports_spec.rb2
-rw-r--r--spec/features/admin/admin_disables_git_access_protocol_spec.rb2
-rw-r--r--spec/features/admin/admin_disables_two_factor_spec.rb4
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_health_check_spec.rb2
-rw-r--r--spec/features/admin/admin_hook_logs_spec.rb2
-rw-r--r--spec/features/admin/admin_hooks_spec.rb2
-rw-r--r--spec/features/admin/admin_jobs_spec.rb2
-rw-r--r--spec/features/admin/admin_labels_spec.rb2
-rw-r--r--spec/features/admin/admin_manage_applications_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb366
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb6
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb4
-rw-r--r--spec/features/admin/admin_mode_spec.rb53
-rw-r--r--spec/features/admin/admin_projects_spec.rb2
-rw-r--r--spec/features/admin/admin_runners_spec.rb2
-rw-r--r--spec/features/admin/admin_search_settings_spec.rb2
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb2
-rw-r--r--spec/features/admin/admin_sees_project_statistics_spec.rb2
-rw-r--r--spec/features/admin/admin_sees_projects_statistics_spec.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb2
-rw-r--r--spec/features/admin/admin_system_info_spec.rb2
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb2
-rw-r--r--spec/features/admin/admin_users_spec.rb2
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/admin/broadcast_messages_spec.rb2
-rw-r--r--spec/features/admin/dashboard_spec.rb2
-rw-r--r--spec/features/admin/users/admin_impersonates_user_spec.rb208
-rw-r--r--spec/features/admin/users/admin_manages_user_identities_spec.rb56
-rw-r--r--spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb52
-rw-r--r--spec/features/admin/users/admin_sees_user_spec.rb248
-rw-r--r--spec/features/admin/users/user_spec.rb537
-rw-r--r--spec/features/admin/users/users_spec.rb20
-rw-r--r--spec/features/admin_variables_spec.rb2
-rw-r--r--spec/features/boards/board_filters_spec.rb2
-rw-r--r--spec/features/boards/keyboard_shortcut_spec.rb4
-rw-r--r--spec/features/broadcast_messages_spec.rb2
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb2
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb4
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb2
-rw-r--r--spec/features/explore/catalog/catalog_details_spec.rb48
-rw-r--r--spec/features/explore/catalog/catalog_releases_spec.rb111
-rw-r--r--spec/features/explore/catalog/catalog_settings_spec.rb93
-rw-r--r--spec/features/explore/catalog/catalog_spec.rb182
-rw-r--r--spec/features/gitlab_experiments_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb2
-rw-r--r--spec/features/groups/members/list_members_spec.rb2
-rw-r--r--spec/features/groups/settings/user_enables_namespace_hierarchy_cache_spec.rb58
-rw-r--r--spec/features/groups/show_spec.rb1
-rw-r--r--spec/features/groups_spec.rb19
-rw-r--r--spec/features/help_dropdown_spec.rb2
-rw-r--r--spec/features/issuables/issuable_list_spec.rb4
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb12
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb3
-rw-r--r--spec/features/merge_request/user_creates_mr_spec.rb3
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb3
-rw-r--r--spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb2
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb29
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb4
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb2
-rw-r--r--spec/features/projects/clusters/user_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb2
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/members/manage_members_spec.rb5
-rw-r--r--spec/features/projects/navbar_spec.rb13
-rw-r--r--spec/features/projects/new_project_spec.rb14
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb89
-rw-r--r--spec/features/projects/show/clone_button_spec.rb4
-rw-r--r--spec/features/projects/show/redirects_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb2
-rw-r--r--spec/features/projects/user_creates_project_spec.rb4
-rw-r--r--spec/features/projects/user_sorts_projects_spec.rb16
-rw-r--r--spec/features/projects/user_views_empty_project_spec.rb2
-rw-r--r--spec/features/projects/work_items/linked_work_items_spec.rb85
-rw-r--r--spec/features/protected_branches_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb14
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb8
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb8
-rw-r--r--spec/features/tags/developer_views_tags_spec.rb4
-rw-r--r--spec/features/usage_stats_consent_spec.rb2
-rw-r--r--spec/features/user_settings/active_sessions_spec.rb2
-rw-r--r--spec/features/users/login_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb501
-rw-r--r--spec/finders/ci/catalog/resources/versions_finder_spec.rb22
-rw-r--r--spec/finders/ci/runner_jobs_finder_spec.rb88
-rw-r--r--spec/finders/ci/runner_managers_finder_spec.rb77
-rw-r--r--spec/finders/ci/runners_finder_spec.rb19
-rw-r--r--spec/finders/deployments_finder_spec.rb4
-rw-r--r--spec/finders/members_finder_spec.rb6
-rw-r--r--spec/finders/packages/terraform_module/packages_finder_spec.rb65
-rw-r--r--spec/finders/projects/ml/experiment_finder_spec.rb51
-rw-r--r--spec/finders/projects_finder_spec.rb4
-rw-r--r--spec/finders/releases/group_releases_finder_spec.rb6
-rw-r--r--spec/finders/resource_milestone_event_finder_spec.rb6
-rw-r--r--spec/finders/users_finder_spec.rb25
-rw-r--r--spec/fixtures/api/schemas/ml/get_latest_versions.json4
-rw-r--r--spec/fixtures/api/schemas/ml/get_model_version.json17
-rw-r--r--spec/fixtures/api/schemas/ml/search_experiments.json39
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/job.json6
-rw-r--r--spec/fixtures/api/schemas/variable.json8
-rw-r--r--spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project.json11
-rw-r--r--spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/issues.ndjson2
-rw-r--r--spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/milestones.ndjson2
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/total_single_event.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml1
-rw-r--r--spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml1
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report-with-unicode-null-character.json79
-rw-r--r--spec/frontend/admin/abuse_report/components/user_details_spec.js67
-rw-r--r--spec/frontend/admin/abuse_report/mock_data.js6
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js250
-rw-r--r--spec/frontend/behaviors/secret_values_spec.js230
-rw-r--r--spec/frontend/blob/openapi/index_spec.js13
-rw-r--r--spec/frontend/boards/board_list_helper.js1
-rw-r--r--spec/frontend/boards/board_list_spec.js83
-rw-r--r--spec/frontend/boards/components/board_add_new_column_form_spec.js22
-rw-r--r--spec/frontend/boards/components/board_add_new_column_trigger_spec.js7
-rw-r--r--spec/frontend/boards/components/board_app_spec.js8
-rw-r--r--spec/frontend/boards/components/board_card_move_to_position_spec.js12
-rw-r--r--spec/frontend/boards/components/board_card_spec.js1
-rw-r--r--spec/frontend/boards/components/board_content_spec.js2
-rw-r--r--spec/frontend/boards/components/board_top_bar_spec.js1
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js1
-rw-r--r--spec/frontend/boards/components/config_toggle_spec.js7
-rw-r--r--spec/frontend/boards/mock_data.js78
-rw-r--r--spec/frontend/boards/project_select_spec.js6
-rw-r--r--spec/frontend/boards/stores/actions_spec.js2098
-rw-r--r--spec/frontend/boards/stores/getters_spec.js203
-rw-r--r--spec/frontend/boards/stores/state_spec.js11
-rw-r--r--spec/frontend/captcha/captcha_modal_spec.js63
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js4
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js4
-rw-r--r--spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js2
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_search_spec.js13
-rw-r--r--spec/frontend/ci/catalog/components/list/catalog_tabs_spec.js71
-rw-r--r--spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js4
-rw-r--r--spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js97
-rw-r--r--spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js68
-rw-r--r--spec/frontend/ci/catalog/mock.js34
-rw-r--r--spec/frontend/ci/ci_environments_dropdown/ci_environments_dropdown_spec.js (renamed from spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js)65
-rw-r--r--spec/frontend/ci/ci_environments_dropdown/utils_spec.js (renamed from spec/frontend/ci/ci_variable_list/utils_spec.js)14
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js2
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js2
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js8
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js4
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js6
-rw-r--r--spec/frontend/ci/ci_variable_list/mocks.js2
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/mock_data.js8
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js44
-rw-r--r--spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js10
-rw-r--r--spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js23
-rw-r--r--spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js3
-rw-r--r--spec/frontend/ci/runner/components/runner_job_status_badge_spec.js3
-rw-r--r--spec/frontend/clusters/agents/components/show_spec.js20
-rw-r--r--spec/frontend/comment_templates/components/form_spec.js14
-rw-r--r--spec/frontend/commit/components/signature_badge_spec.js1
-rw-r--r--spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js42
-rw-r--r--spec/frontend/content_editor/extensions/copy_paste_spec.js13
-rw-r--r--spec/frontend/content_editor/extensions/task_item_spec.js115
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js50
-rw-r--r--spec/frontend/content_editor/services/markdown_sourcemap_spec.js33
-rw-r--r--spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap2
-rw-r--r--spec/frontend/custom_emoji/components/list_spec.js3
-rw-r--r--spec/frontend/deploy_keys/components/action_btn_spec.js43
-rw-r--r--spec/frontend/deploy_keys/components/app_spec.js244
-rw-r--r--spec/frontend/deploy_keys/components/key_spec.js154
-rw-r--r--spec/frontend/deploy_keys/components/keys_panel_spec.js13
-rw-r--r--spec/frontend/deploy_keys/graphql/resolvers_spec.js7
-rw-r--r--spec/frontend/diffs/components/__snapshots__/tree_list_spec.js.snap160
-rw-r--r--spec/frontend/diffs/components/app_spec.js39
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js27
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js146
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js39
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js103
-rw-r--r--spec/frontend/diffs/store/actions_spec.js137
-rw-r--r--spec/frontend/diffs/store/getters_spec.js32
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js57
-rw-r--r--spec/frontend/diffs/store/utils_spec.js11
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js22
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml11
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml18
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml14
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml (renamed from spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml)1
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_job_failure.yml7
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_new_commit.yml7
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml4
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml13
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml29
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml15
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml (renamed from spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml)1
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_job_failure.yml7
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_new_commit.yml7
-rw-r--r--spec/frontend/emoji/components/emoji_group_spec.js1
-rw-r--r--spec/frontend/environments/helpers/k8s_integration_helper_spec.js30
-rw-r--r--spec/frontend/environments/kubernetes_status_bar_spec.js53
-rw-r--r--spec/frontend/error_tracking/components/error_details_info_spec.js7
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js22
-rw-r--r--spec/frontend/fixtures/static/oauth_remember_me.html8
-rw-r--r--spec/frontend/groups/components/app_spec.js3
-rw-r--r--spec/frontend/groups/components/group_item_spec.js61
-rw-r--r--spec/frontend/groups/components/group_name_and_path_spec.js17
-rw-r--r--spec/frontend/groups/components/overview_tabs_spec.js16
-rw-r--r--spec/frontend/groups_projects/components/more_actions_dropdown_spec.js30
-rw-r--r--spec/frontend/ide/lib/alerts/environment_spec.js21
-rw-r--r--spec/frontend/ide/services/index_spec.js33
-rw-r--r--spec/frontend/ide/stores/actions/alert_spec.js46
-rw-r--r--spec/frontend/ide/stores/getters/alert_spec.js46
-rw-r--r--spec/frontend/ide/stores/mutations/alert_spec.js26
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_status_spec.js1
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js17
-rw-r--r--spec/frontend/invite_members/utils/member_utils_spec.js16
-rw-r--r--spec/frontend/jira_connect/branches/components/project_dropdown_spec.js78
-rw-r--r--spec/frontend/jira_connect/branches/mock_data.js30
-rw-r--r--spec/frontend/kubernetes_dashboard/components/workload_table_spec.js11
-rw-r--r--spec/frontend/kubernetes_dashboard/graphql/mock_data.js246
-rw-r--r--spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js254
-rw-r--r--spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js80
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/cron_jobs_page_spec.js102
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/jobs_page_spec.js102
-rw-r--r--spec/frontend/kubernetes_dashboard/pages/services_page_spec.js104
-rw-r--r--spec/frontend/lib/utils/number_utils_spec.js (renamed from spec/frontend/lib/utils/number_utility_spec.js)22
-rw-r--r--spec/frontend/lib/utils/secret_detection_spec.js5
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js8
-rw-r--r--spec/frontend/logo_spec.js8
-rw-r--r--spec/frontend/ml/model_registry/apps/index_ml_models_spec.js45
-rw-r--r--spec/frontend/ml/model_registry/apps/new_ml_model_spec.js119
-rw-r--r--spec/frontend/ml/model_registry/apps/show_ml_model_spec.js11
-rw-r--r--spec/frontend/ml/model_registry/components/actions_dropdown_spec.js39
-rw-r--r--spec/frontend/ml/model_registry/components/candidate_list_spec.js94
-rw-r--r--spec/frontend/ml/model_registry/components/model_version_list_spec.js90
-rw-r--r--spec/frontend/ml/model_registry/components/searchable_list_spec.js170
-rw-r--r--spec/frontend/ml/model_registry/graphql_mock_data.js24
-rw-r--r--spec/frontend/ml/model_registry/mock_data.js1
-rw-r--r--spec/frontend/oauth_remember_me_spec.js8
-rw-r--r--spec/frontend/observability/client_spec.js32
-rw-r--r--spec/frontend/organizations/new/components/app_spec.js27
-rw-r--r--spec/frontend/organizations/settings/general/components/organization_settings_spec.js71
-rw-r--r--spec/frontend/organizations/shared/components/groups_view_spec.js10
-rw-r--r--spec/frontend/organizations/shared/components/new_edit_form_spec.js71
-rw-r--r--spec/frontend/organizations/shared/components/projects_view_spec.js10
-rw-r--r--spec/frontend/organizations/show/components/app_spec.js7
-rw-r--r--spec/frontend/organizations/show/components/organization_description_spec.js46
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js14
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/packages_protection_rules_spec.js97
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js19
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/mock_data.js33
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js112
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js6
-rw-r--r--spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js6
-rw-r--r--spec/frontend/performance_bar/components/performance_bar_app_spec.js3
-rw-r--r--spec/frontend/performance_bar/components/request_warning_spec.js23
-rw-r--r--spec/frontend/profile/preferences/components/profile_preferences_spec.js15
-rw-r--r--spec/frontend/projects/commit/components/commit_comments_button_spec.js42
-rw-r--r--spec/frontend/projects/new/components/new_project_url_select_spec.js69
-rw-r--r--spec/frontend/projects/settings/components/new_access_dropdown_spec.js23
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/app_spec.js123
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/mock_data.js14
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap8
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js19
-rw-r--r--spec/frontend/releases/components/app_index_spec.js44
-rw-r--r--spec/frontend/releases/mock_data.js11
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js52
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js15
-rw-r--r--spec/frontend/search/store/actions_spec.js25
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js2
-rw-r--r--spec/frontend/security_configuration/mock_data.js79
-rw-r--r--spec/frontend/security_configuration/utils_spec.js109
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js45
-rw-r--r--spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js24
-rw-r--r--spec/frontend/sidebar/components/mock_data.js24
-rw-r--r--spec/frontend/sidebar/components/sidebar_color_picker_spec.js58
-rw-r--r--spec/frontend/super_sidebar/components/create_menu_spec.js1
-rw-r--r--spec/frontend/super_sidebar/components/user_menu_spec.js58
-rw-r--r--spec/frontend/super_sidebar/mock_data.js101
-rw-r--r--spec/frontend/usage_quotas/storage/components/namespace_storage_app_spec.js51
-rw-r--r--spec/frontend/usage_quotas/storage/components/storage_usage_overview_card_spec.js44
-rw-r--r--spec/frontend/usage_quotas/storage/components/storage_usage_statistics_spec.js43
-rw-r--r--spec/frontend/usage_quotas/storage/mock_data.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js16
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js49
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/file_row_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/daterange_token_spec.js170
-rw-r--r--spec/frontend/vue_shared/components/gl_countdown_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/groups_list/groups_list_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/help_page_link/help_page_link_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/projects_list/projects_list_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/segmented_control_button_group_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/upload_dropzone/avatar_upload_dropzone_spec.js116
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js45
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js79
-rw-r--r--spec/frontend/work_items/components/notes/work_item_comment_form_spec.js25
-rw-r--r--spec/frontend/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit_spec.js161
-rw-r--r--spec/frontend/work_items/components/shared/work_item_token_input_spec.js238
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js3
-rw-r--r--spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js26
-rw-r--r--spec/frontend/work_items/components/work_item_description_rendered_spec.js15
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js36
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js84
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js26
-rw-r--r--spec/frontend/work_items/components/work_item_milestone_inline_spec.js (renamed from spec/frontend/work_items/components/work_item_milestone_spec.js)20
-rw-r--r--spec/frontend/work_items/components/work_item_milestone_with_edit_spec.js209
-rw-r--r--spec/frontend/work_items/components/work_item_parent_inline_spec.js6
-rw-r--r--spec/frontend/work_items/components/work_item_parent_with_edit_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_state_toggle_spec.js28
-rw-r--r--spec/frontend/work_items/components/work_item_title_with_edit_spec.js59
-rw-r--r--spec/frontend/work_items/mock_data.js72
-rw-r--r--spec/frontend/work_items/utils_spec.js24
-rw-r--r--spec/graphql/graphql_triggers_spec.rb14
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb12
-rw-r--r--spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb20
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/project_runners_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/concerns/resolves_groups_spec.rb11
-rw-r--r--spec/graphql/resolvers/container_repository_tags_resolver_spec.rb14
-rw-r--r--spec/graphql/resolvers/design_management/versions_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/group_labels_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/labels_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ml/find_models_resolver_spec.rb47
-rw-r--r--spec/graphql/resolvers/users_resolver_spec.rb36
-rw-r--r--spec/graphql/types/ci/catalog/resources/component_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/catalog/resources/version_type_spec.rb5
-rw-r--r--spec/graphql/types/ci/inherited_ci_variable_type_spec.rb9
-rw-r--r--spec/graphql/types/ci/instance_variable_type_spec.rb4
-rw-r--r--spec/graphql/types/commit_signatures/verification_status_enum_spec.rb8
-rw-r--r--spec/graphql/types/container_repository_referrer_type_spec.rb17
-rw-r--r--spec/graphql/types/container_repository_tag_type_spec.rb3
-rw-r--r--spec/graphql/types/current_user_todos_type_spec.rb6
-rw-r--r--spec/graphql/types/group_type_spec.rb3
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb1
-rw-r--r--spec/graphql/types/ml/model_links_type_spec.rb11
-rw-r--r--spec/graphql/types/ml/model_type_spec.rb2
-rw-r--r--spec/graphql/types/ml/model_version_links_type_spec.rb2
-rw-r--r--spec/graphql/types/ml/models_order_by_enum_spec.rb11
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb2
-rw-r--r--spec/graphql/types/namespace_type_spec.rb2
-rw-r--r--spec/graphql/types/organizations/organization_type_spec.rb4
-rw-r--r--spec/graphql/types/permission_types/issue_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb10
-rw-r--r--spec/graphql/types/subscription_type_spec.rb1
-rw-r--r--spec/graphql/types/work_items/widgets/notes_type_spec.rb2
-rw-r--r--spec/helpers/application_settings_helper_spec.rb1
-rw-r--r--spec/helpers/avatars_helper_spec.rb15
-rw-r--r--spec/helpers/ci/builds_helper_spec.rb22
-rw-r--r--spec/helpers/ci/catalog/resources_helper_spec.rb12
-rw-r--r--spec/helpers/ci/status_helper_spec.rb13
-rw-r--r--spec/helpers/ci/variables_helper_spec.rb17
-rw-r--r--spec/helpers/environments_helper_spec.rb24
-rw-r--r--spec/helpers/groups_helper_spec.rb5
-rw-r--r--spec/helpers/ide_helper_spec.rb33
-rw-r--r--spec/helpers/listbox_helper_spec.rb1
-rw-r--r--spec/helpers/markup_helper_spec.rb6
-rw-r--r--spec/helpers/nav_helper_spec.rb64
-rw-r--r--spec/helpers/organizations/organization_helper_spec.rb24
-rw-r--r--spec/helpers/search_helper_spec.rb4
-rw-r--r--spec/helpers/sessions_helper_spec.rb36
-rw-r--r--spec/helpers/sidebars_helper_spec.rb22
-rw-r--r--spec/helpers/time_zone_helper_spec.rb23
-rw-r--r--spec/helpers/webpack_helper_spec.rb5
-rw-r--r--spec/initializers/session_store_spec.rb4
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_failure_spec.rb14
-rw-r--r--spec/lib/api/entities/diff_spec.rb3
-rw-r--r--spec/lib/api/entities/group_spec.rb24
-rw-r--r--spec/lib/api/entities/merge_request_basic_spec.rb2
-rw-r--r--spec/lib/api/helpers_spec.rb90
-rw-r--r--spec/lib/api/ml/mlflow/api_helpers_spec.rb52
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb6
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb6
-rw-r--r--spec/lib/backup/database_model_spec.rb185
-rw-r--r--spec/lib/backup/repositories_spec.rb8
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/markdown_engines/base_spec.rb6
-rw-r--r--spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb17
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/design_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/issuable_extractor_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb15
-rw-r--r--spec/lib/banzai/reference_parser/snippet_parser_spec.rb4
-rw-r--r--spec/lib/click_house/iterator_spec.rb10
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb20
-rw-r--r--spec/lib/container_registry/tag_spec.rb25
-rw-r--r--spec/lib/feature/definition_spec.rb6
-rw-r--r--spec/lib/gitlab/application_setting_fetcher_spec.rb224
-rw-r--r--spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb29
-rw-r--r--spec/lib/gitlab/auth_spec.rb10
-rw-r--r--spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb225
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb67
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb84
-rw-r--r--spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb124
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb81
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb118
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb67
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb52
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb6
-rw-r--r--spec/lib/gitlab/checks/diff_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_check_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/rules_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/needs_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb57
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb67
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/config/external/file/component_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb225
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb50
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb32
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb90
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb263
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb209
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb8
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb4
-rw-r--r--spec/lib/gitlab/database/click_house_client_spec.rb8
-rw-r--r--spec/lib/gitlab/database/dictionary_spec.rb52
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb56
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb83
-rw-r--r--spec/lib/gitlab/database/namespace_each_batch_spec.rb174
-rw-r--r--spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb3
-rw-r--r--spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb173
-rw-r--r--spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb317
-rw-r--r--spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb468
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb4
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb71
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb2
-rw-r--r--spec/lib/gitlab/database/sharding_key_spec.rb43
-rw-r--r--spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb16
-rw-r--r--spec/lib/gitlab/dependency_linker_spec.rb13
-rw-r--r--spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb23
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb162
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb32
-rw-r--r--spec/lib/gitlab/event_store/event_spec.rb7
-rw-r--r--spec/lib/gitlab/git/changed_path_spec.rb31
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/git/compare_spec.rb47
-rw-r--r--spec/lib/gitlab/git/push_spec.rb14
-rw-r--r--spec/lib/gitlab/git/repository_cleaner_spec.rb4
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb156
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb2
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb12
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb4
-rw-r--r--spec/lib/gitlab/git_access_spec.rb12
-rw-r--r--spec/lib/gitlab/git_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/analysis_service_spec.rb137
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb30
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/attachments_downloader_spec.rb46
-rw-r--r--spec/lib/gitlab/github_import/events_cache_spec.rb79
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb67
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb68
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb79
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb67
-rw-r--r--spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/events/commented_spec.rb69
-rw-r--r--spec/lib/gitlab/github_import/importer/events/merged_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb85
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb139
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb211
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb85
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/representation/note_text_spec.rb34
-rw-r--r--spec/lib/gitlab/github_import/representation/replay_event_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb52
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb42
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/highlight_spec.rb29
-rw-r--r--spec/lib/gitlab/http_spec.rb51
-rw-r--r--spec/lib/gitlab/import/page_counter_spec.rb (renamed from spec/lib/gitlab/github_import/page_counter_spec.rb)4
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml9
-rw-r--r--spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb37
-rw-r--r--spec/lib/gitlab/import_export/import_failure_service_spec.rb3
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml7
-rw-r--r--spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb224
-rw-r--r--spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb10
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb16
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb16
-rw-r--r--spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb8
-rw-r--r--spec/lib/gitlab/legacy_http_spec.rb448
-rw-r--r--spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb6
-rw-r--r--spec/lib/gitlab/namespaced_session_store_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb6
-rw-r--r--spec/lib/gitlab/pagination/keyset/paginator_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb8
-rw-r--r--spec/lib/gitlab/redis/cluster_util_spec.rb13
-rw-r--r--spec/lib/gitlab/redis/cross_slot_spec.rb12
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb210
-rw-r--r--spec/lib/gitlab/redis/sessions_spec.rb21
-rw-r--r--spec/lib/gitlab/redis/shared_state_spec.rb1
-rw-r--r--spec/lib/gitlab/runtime_spec.rb4
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb101
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb2
-rw-r--r--spec/lib/gitlab/ssh/commit_spec.rb2
-rw-r--r--spec/lib/gitlab/ssh/signature_spec.rb33
-rw-r--r--spec/lib/gitlab/themes_spec.rb14
-rw-r--r--spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb140
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb10
-rw-r--r--spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb12
-rw-r--r--spec/lib/gitlab/tracking_spec.rb32
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb14
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb58
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb11
-rw-r--r--spec/lib/sidebars/organizations/menus/scope_menu_spec.rb5
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb29
-rw-r--r--spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb3
-rw-r--r--spec/mailers/notify_spec.rb103
-rw-r--r--spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb4
-rw-r--r--spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb4
-rw-r--r--spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb4
-rw-r--r--spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb6
-rw-r--r--spec/migrations/20231207145335_cleanup_group_level_work_items_spec.rb216
-rw-r--r--spec/migrations/20231211154654_add_work_items_rolledup_dates_widget_spec.rb10
-rw-r--r--spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb26
-rw-r--r--spec/migrations/20231214111617_queue_backfill_owasp_top_ten_of_vulnerability_reads_spec.rb26
-rw-r--r--spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb56
-rw-r--r--spec/migrations/20231221223259_queue_drop_vulnerabilities_without_finding_id_spec.rb26
-rw-r--r--spec/migrations/20240105144908_queue_backfill_issue_search_data_namespace_id_spec.rb26
-rw-r--r--spec/migrations/20240108082419_queue_backfill_partition_id_ci_pipeline_metadata_spec.rb56
-rw-r--r--spec/migrations/20240108121335_copy_internal_ids_for_epics_and_issues_usage_on_groups_spec.rb69
-rw-r--r--spec/migrations/20240108185335_backfill_internal_ids_with_issues_usage_for_epics_spec.rb52
-rw-r--r--spec/migrations/20240109090354_queue_backfill_partition_id_ci_pipeline_artifact_spec.rb56
-rw-r--r--spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb56
-rw-r--r--spec/migrations/sent_notifications_self_install_id_swap_spec.rb81
-rw-r--r--spec/models/analytics/cycle_analytics/aggregation_spec.rb57
-rw-r--r--spec/models/analytics/cycle_analytics/stage_spec.rb16
-rw-r--r--spec/models/application_setting_spec.rb341
-rw-r--r--spec/models/bulk_imports/entity_spec.rb6
-rw-r--r--spec/models/bulk_imports/failure_spec.rb16
-rw-r--r--spec/models/ci/build_spec.rb143
-rw-r--r--spec/models/ci/catalog/resources/version_spec.rb30
-rw-r--r--spec/models/ci/instance_variable_spec.rb6
-rw-r--r--spec/models/ci/namespace_mirror_spec.rb10
-rw-r--r--spec/models/ci/pipeline_artifact_spec.rb17
-rw-r--r--spec/models/ci/pipeline_chat_data_spec.rb27
-rw-r--r--spec/models/ci/pipeline_config_spec.rb17
-rw-r--r--spec/models/ci/pipeline_metadata_spec.rb17
-rw-r--r--spec/models/ci/pipeline_spec.rb38
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/project_mirror_spec.rb16
-rw-r--r--spec/models/ci/runner_manager_spec.rb343
-rw-r--r--spec/models/ci/runner_spec.rb171
-rw-r--r--spec/models/clusters/cluster_spec.rb6
-rw-r--r--spec/models/commit_collection_spec.rb2
-rw-r--r--spec/models/commit_status_spec.rb2
-rw-r--r--spec/models/concerns/commit_signature_spec.rb22
-rw-r--r--spec/models/concerns/database_event_tracking_spec.rb85
-rw-r--r--spec/models/concerns/routable_spec.rb26
-rw-r--r--spec/models/container_registry/protection/rule_spec.rb190
-rw-r--r--spec/models/container_repository_spec.rb39
-rw-r--r--spec/models/deployment_spec.rb16
-rw-r--r--spec/models/design_management/design_spec.rb2
-rw-r--r--spec/models/group_spec.rb121
-rw-r--r--spec/models/integrations/chat_message/push_message_spec.rb8
-rw-r--r--spec/models/integrations/diffblue_cover_spec.rb74
-rw-r--r--spec/models/integrations/teamcity_spec.rb2
-rw-r--r--spec/models/issue_email_participant_spec.rb16
-rw-r--r--spec/models/label_spec.rb14
-rw-r--r--spec/models/member_spec.rb288
-rw-r--r--spec/models/members/group_member_spec.rb61
-rw-r--r--spec/models/merge_request/metrics_spec.rb8
-rw-r--r--spec/models/merge_request_diff_spec.rb122
-rw-r--r--spec/models/merge_request_spec.rb60
-rw-r--r--spec/models/ml/candidate_metric_spec.rb2
-rw-r--r--spec/models/ml/candidate_param_spec.rb2
-rw-r--r--spec/models/ml/experiment_spec.rb14
-rw-r--r--spec/models/ml/model_metadata_spec.rb2
-rw-r--r--spec/models/ml/model_version_metadata_spec.rb29
-rw-r--r--spec/models/ml/model_version_spec.rb29
-rw-r--r--spec/models/namespace/package_setting_spec.rb69
-rw-r--r--spec/models/namespace_spec.rb29
-rw-r--r--spec/models/namespaces/descendants_spec.rb68
-rw-r--r--spec/models/namespaces/traversal/cached_spec.rb104
-rw-r--r--spec/models/note_diff_file_spec.rb2
-rw-r--r--spec/models/note_spec.rb4
-rw-r--r--spec/models/onboarding/completion_spec.rb40
-rw-r--r--spec/models/organizations/organization_detail_spec.rb9
-rw-r--r--spec/models/organizations/organization_spec.rb28
-rw-r--r--spec/models/organizations/organization_user_spec.rb37
-rw-r--r--spec/models/packages/protection/rule_spec.rb46
-rw-r--r--spec/models/preloaders/commit_status_preloader_spec.rb4
-rw-r--r--spec/models/project_authorizations/changes_spec.rb100
-rw-r--r--spec/models/project_spec.rb60
-rw-r--r--spec/models/project_statistics_spec.rb18
-rw-r--r--spec/models/project_team_spec.rb54
-rw-r--r--spec/models/projects/project_topic_spec.rb1
-rw-r--r--spec/models/projects/topic_spec.rb15
-rw-r--r--spec/models/release_spec.rb19
-rw-r--r--spec/models/repository_spec.rb6
-rw-r--r--spec/models/resource_label_event_spec.rb62
-rw-r--r--spec/models/route_spec.rb16
-rw-r--r--spec/models/time_tracking/timelog_category_spec.rb3
-rw-r--r--spec/models/timelog_spec.rb1
-rw-r--r--spec/models/user_spec.rb225
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb137
-rw-r--r--spec/models/users/phone_number_validation_spec.rb165
-rw-r--r--spec/models/work_items/hierarchy_restriction_spec.rb20
-rw-r--r--spec/models/work_items/widget_definition_spec.rb4
-rw-r--r--spec/policies/global_policy_spec.rb16
-rw-r--r--spec/policies/organizations/organization_policy_spec.rb18
-rw-r--r--spec/policies/project_policy_spec.rb10
-rw-r--r--spec/presenters/blob_presenter_spec.rb16
-rw-r--r--spec/presenters/blobs/notebook_presenter_spec.rb2
-rw-r--r--spec/presenters/projects/security/configuration_presenter_spec.rb1
-rw-r--r--spec/requests/api/ci/jobs_spec.rb9
-rw-r--r--spec/requests/api/ci/pipeline_schedules_spec.rb6
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb12
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/yamls/image-executor_opts-user.yml25
-rw-r--r--spec/requests/api/ci/runner/yamls/service-executor_opts-user.yml27
-rw-r--r--spec/requests/api/ci/runners_spec.rb281
-rw-r--r--spec/requests/api/commits_spec.rb10
-rw-r--r--spec/requests/api/deploy_keys_spec.rb4
-rw-r--r--spec/requests/api/deployments_spec.rb4
-rw-r--r--spec/requests/api/draft_notes_spec.rb4
-rw-r--r--spec/requests/api/feature_flags_spec.rb4
-rw-r--r--spec/requests/api/graphql/achievements/user_achievements_query_spec.rb6
-rw-r--r--spec/requests/api/graphql/boards/board_list_issues_query_spec.rb4
-rw-r--r--spec/requests/api/graphql/ci/catalog/resource_spec.rb248
-rw-r--r--spec/requests/api/graphql/ci/catalog/resources_spec.rb8
-rw-r--r--spec/requests/api/graphql/ci/instance_variables_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb158
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb202
-rw-r--r--spec/requests/api/graphql/container_repository/container_repository_details_spec.rb79
-rw-r--r--spec/requests/api/graphql/mutations/branch_rules/create_spec.rb68
-rw-r--r--spec/requests/api/graphql/mutations/ml/models/create_spec.rb48
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb16
-rw-r--r--spec/requests/api/graphql/mutations/organizations/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_spec.rb12
-rw-r--r--spec/requests/api/graphql/namespace/projects_spec.rb21
-rw-r--r--spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb2
-rw-r--r--spec/requests/api/graphql/namespace_query_spec.rb48
-rw-r--r--spec/requests/api/graphql/organizations/organization_query_spec.rb193
-rw-r--r--spec/requests/api/graphql/organizations/organizations_query_spec.rb56
-rw-r--r--spec/requests/api/graphql/project/container_repositories_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/tree/tree_spec.rb2
-rw-r--r--spec/requests/api/graphql/projects/projects_spec.rb6
-rw-r--r--spec/requests/api/graphql/user/user_achievements_query_spec.rb6
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb4
-rw-r--r--spec/requests/api/group_milestones_spec.rb4
-rw-r--r--spec/requests/api/groups_spec.rb77
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb2
-rw-r--r--spec/requests/api/import_github_spec.rb7
-rw-r--r--spec/requests/api/internal/base_spec.rb2
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb8
-rw-r--r--spec/requests/api/invitations_spec.rb10
-rw-r--r--spec/requests/api/issue_links_spec.rb4
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb12
-rw-r--r--spec/requests/api/maven_packages_spec.rb11
-rw-r--r--spec/requests/api/members_spec.rb2
-rw-r--r--spec/requests/api/merge_request_approvals_spec.rb12
-rw-r--r--spec/requests/api/merge_requests_spec.rb4
-rw-r--r--spec/requests/api/ml/mlflow/experiments_spec.rb77
-rw-r--r--spec/requests/api/ml/mlflow/model_versions_spec.rb41
-rw-r--r--spec/requests/api/namespaces_spec.rb13
-rw-r--r--spec/requests/api/pages_domains_spec.rb2
-rw-r--r--spec/requests/api/pages_spec.rb91
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb15
-rw-r--r--spec/requests/api/project_import_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb4
-rw-r--r--spec/requests/api/releases_spec.rb12
-rw-r--r--spec/requests/api/terraform/modules/v1/namespace_packages_spec.rb (renamed from spec/requests/api/terraform/modules/v1/packages_spec.rb)48
-rw-r--r--spec/requests/api/terraform/modules/v1/project_packages_spec.rb112
-rw-r--r--spec/requests/api/users_spec.rb30
-rw-r--r--spec/requests/application_controller_spec.rb30
-rw-r--r--spec/requests/explore/catalog_controller_spec.rb36
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb10
-rw-r--r--spec/requests/groups/registry/repositories_controller_spec.rb4
-rw-r--r--spec/requests/import/gitlab_projects_controller_spec.rb34
-rw-r--r--spec/requests/jwks_controller_spec.rb14
-rw-r--r--spec/requests/oauth/authorizations_controller_spec.rb27
-rw-r--r--spec/requests/openid_connect_spec.rb2
-rw-r--r--spec/requests/organizations/organizations_controller_spec.rb25
-rw-r--r--spec/requests/organizations/settings_controller_spec.rb19
-rw-r--r--spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb5
-rw-r--r--spec/requests/projects/google_cloud/databases_controller_spec.rb8
-rw-r--r--spec/requests/projects/google_cloud/deployments_controller_spec.rb10
-rw-r--r--spec/requests/projects/ml/models_controller_spec.rb25
-rw-r--r--spec/requests/projects/pipelines_controller_spec.rb18
-rw-r--r--spec/requests/projects/settings/packages_and_registries_controller_spec.rb30
-rw-r--r--spec/requests/search_controller_spec.rb4
-rw-r--r--spec/requests/users_controller_spec.rb40
-rw-r--r--spec/routing/organizations/organizations_controller_routing_spec.rb5
-rw-r--r--spec/routing/uploads_routing_spec.rb13
-rw-r--r--spec/rubocop/cop/gitlab/license_available_usage_spec.rb34
-rw-r--r--spec/rubocop/cop/rails/avoid_time_comparison_spec.rb57
-rw-r--r--spec/rubocop/cop/scalability/file_uploads_spec.rb2
-rw-r--r--spec/scripts/download_downstream_artifact_spec.rb180
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb12
-rw-r--r--spec/scripts/setup/generate_as_if_foss_env_spec.rb152
-rw-r--r--spec/serializers/activity_pub/activity_serializer_spec.rb138
-rw-r--r--spec/serializers/activity_pub/activity_streams_serializer_spec.rb157
-rw-r--r--spec/serializers/activity_pub/publish_release_activity_serializer_spec.rb13
-rw-r--r--spec/serializers/activity_pub/releases_actor_serializer_spec.rb2
-rw-r--r--spec/serializers/admin/abuse_report_details_entity_spec.rb57
-rw-r--r--spec/serializers/ci/group_variable_entity_spec.rb2
-rw-r--r--spec/serializers/ci/variable_entity_spec.rb2
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb2
-rw-r--r--spec/serializers/profile/event_entity_spec.rb2
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb45
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb4
-rw-r--r--spec/services/ci/cancel_pipeline_service_spec.rb76
-rw-r--r--spec/services/ci/catalog/resources/create_service_spec.rb4
-rw-r--r--spec/services/ci/catalog/resources/destroy_service_spec.rb4
-rw-r--r--spec/services/ci/catalog/resources/versions/create_service_spec.rb9
-rw-r--r--spec/services/ci/create_pipeline_service/partitioning_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb86
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb2
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb209
-rw-r--r--spec/services/ci/retry_job_service_spec.rb4
-rw-r--r--spec/services/ci/runners/unregister_runner_manager_service_spec.rb22
-rw-r--r--spec/services/ci/unlock_pipeline_service_spec.rb22
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb4
-rw-r--r--spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb169
-rw-r--r--spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb128
-rw-r--r--spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/create_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb (renamed from spec/services/google_cloud/create_service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb (renamed from spec/services/google_cloud/enable_cloud_run_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb (renamed from spec/services/google_cloud/enable_cloudsql_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb (renamed from spec/services/google_cloud/enable_vision_ai_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb (renamed from spec/services/google_cloud/fetch_google_ip_list_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb (renamed from spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb (renamed from spec/services/google_cloud/generate_pipeline_service_spec.rb)22
-rw-r--r--spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb (renamed from spec/services/google_cloud/get_cloudsql_instances_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb (renamed from spec/services/google_cloud/service_accounts_service_spec.rb)2
-rw-r--r--spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb (renamed from spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb)2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb2
-rw-r--r--spec/services/event_create_service_spec.rb34
-rw-r--r--spec/services/git/base_hooks_service_spec.rb2
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb18
-rw-r--r--spec/services/git/branch_push_service_spec.rb2
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb26
-rw-r--r--spec/services/git/tag_hooks_service_spec.rb2
-rw-r--r--spec/services/git/tag_push_service_spec.rb2
-rw-r--r--spec/services/git/wiki_push_service_spec.rb2
-rw-r--r--spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb (renamed from spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb)2
-rw-r--r--spec/services/groups/create_service_spec.rb279
-rw-r--r--spec/services/groups/participants_service_spec.rb16
-rw-r--r--spec/services/groups/update_service_spec.rb54
-rw-r--r--spec/services/import/github_service_spec.rb54
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb6
-rw-r--r--spec/services/issue_email_participants/create_service_spec.rb10
-rw-r--r--spec/services/issue_email_participants/destroy_service_spec.rb147
-rw-r--r--spec/services/issue_links/list_service_spec.rb4
-rw-r--r--spec/services/issues/export_csv_service_spec.rb4
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb16
-rw-r--r--spec/services/issues/update_service_spec.rb12
-rw-r--r--spec/services/labels/available_labels_service_spec.rb8
-rw-r--r--spec/services/members/create_service_spec.rb28
-rw-r--r--spec/services/members/update_service_spec.rb2
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb56
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb2
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb6
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb8
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb6
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb4
-rw-r--r--spec/services/merge_requests/reload_diffs_service_spec.rb6
-rw-r--r--spec/services/merge_requests/remove_approval_service_spec.rb60
-rw-r--r--spec/services/merge_requests/request_review_service_spec.rb8
-rw-r--r--spec/services/milestones/destroy_service_spec.rb4
-rw-r--r--spec/services/milestones/promote_service_spec.rb16
-rw-r--r--spec/services/ml/create_model_service_spec.rb5
-rw-r--r--spec/services/ml/create_model_version_service_spec.rb55
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb12
-rw-r--r--spec/services/notification_recipients/build_service_spec.rb14
-rw-r--r--spec/services/notification_service_spec.rb163
-rw-r--r--spec/services/organizations/create_service_spec.rb2
-rw-r--r--spec/services/organizations/update_service_spec.rb10
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb66
-rw-r--r--spec/services/packages/terraform_module/create_package_service_spec.rb75
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb24
-rw-r--r--spec/services/post_receive_service_spec.rb2
-rw-r--r--spec/services/preview_markdown_service_spec.rb2
-rw-r--r--spec/services/projects/cleanup_service_spec.rb2
-rw-r--r--spec/services/projects/destroy_service_spec.rb8
-rw-r--r--spec/services/projects/fork_service_spec.rb698
-rw-r--r--spec/services/projects/participants_service_spec.rb36
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb8
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb34
-rw-r--r--spec/services/push_event_payload_service_spec.rb12
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb129
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb4
-rw-r--r--spec/services/routes/rename_descendants_service_spec.rb208
-rw-r--r--spec/services/security/merge_reports_service_spec.rb56
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb11
-rw-r--r--spec/services/system_note_service_spec.rb24
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb23
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb34
-rw-r--r--spec/services/todo_service_spec.rb18
-rw-r--r--spec/services/todos/destroy/destroyed_issuable_service_spec.rb4
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb4
-rw-r--r--spec/services/users/migrate_records_to_ghost_user_service_spec.rb10
-rw-r--r--spec/services/users/update_todo_count_cache_service_spec.rb4
-rw-r--r--spec/services/work_items/callbacks/assignees_spec.rb (renamed from spec/services/work_items/widgets/assignees_service/update_service_spec.rb)27
-rw-r--r--spec/services/work_items/callbacks/current_user_todos_spec.rb (renamed from spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb)20
-rw-r--r--spec/services/work_items/callbacks/description_spec.rb (renamed from spec/services/work_items/widgets/description_service/update_service_spec.rb)14
-rw-r--r--spec/services/work_items/callbacks/notifications_spec.rb (renamed from spec/services/work_items/widgets/notifications_service/update_service_spec.rb)8
-rw-r--r--spec/services/work_items/callbacks/start_and_due_date_spec.rb (renamed from spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb)20
-rw-r--r--spec/services/work_items/update_service_spec.rb13
-rw-r--r--spec/spec_helper.rb7
-rw-r--r--spec/support/before_all_adapter.rb33
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb7
-rw-r--r--spec/support/db_cleaner.rb2
-rw-r--r--spec/support/factory_bot.rb9
-rw-r--r--spec/support/finder_collection.rb2
-rw-r--r--spec/support/finder_collection_allowlist.yml1
-rw-r--r--spec/support/formatters/json_formatter.rb8
-rw-r--r--spec/support/helpers/admin_mode_helpers.rb27
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb12
-rw-r--r--spec/support/helpers/database/duplicate_indexes.yml19
-rw-r--r--spec/support/helpers/database/multiple_databases_helpers.rb2
-rw-r--r--spec/support/helpers/debug_with_puts.rb13
-rw-r--r--spec/support/helpers/dns_helpers.rb24
-rw-r--r--spec/support/helpers/features/invite_members_modal_helpers.rb1
-rw-r--r--spec/support/helpers/login_helpers.rb27
-rw-r--r--spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb4
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb16
-rw-r--r--spec/support/helpers/orphan_final_artifacts_cleanup_helpers.rb82
-rw-r--r--spec/support/helpers/stub_configuration.rb7
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb4
-rw-r--r--spec/support/helpers/stub_requests.rb18
-rw-r--r--spec/support/helpers/user_with_namespace_shim.rb73
-rw-r--r--spec/support/helpers/user_with_namespace_shim.yml1000
-rw-r--r--spec/support/matchers/have_user.rb13
-rw-r--r--spec/support/migration.rb2
-rw-r--r--spec/support/rspec_order_todo.yml18
-rw-r--r--spec/support/rspec_run_time.rb11
-rw-r--r--spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb45
-rw-r--r--spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/graphql/types/query_type_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb1
-rw-r--r--spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb13
-rw-r--r--spec/support/shared_examples/ci/runner_with_status_scope_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/snippet_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/variables_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/inviting_groups_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/work_items_shared_examples.rb279
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/base_slack_notification_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/database_event_tracking_shared_examples.rb49
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb96
-rw-r--r--spec/support/shared_examples/models/members_notifications_shared_example.rb42
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/redis/multi_store_wrapper_shared_examples.rb74
-rw-r--r--spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb37
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb67
-rw-r--r--spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb175
-rw-r--r--spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/common_system_notes_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/services/count_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/destroy_label_links_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/jira/requests/base_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/protected_branches_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb17
-rw-r--r--spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb181
-rw-r--r--spec/support/sidekiq.rb10
-rw-r--r--spec/support/sidekiq_middleware.rb9
-rw-r--r--spec/tasks/gitlab/cleanup_rake_spec.rb84
-rw-r--r--spec/tooling/danger/bulk_database_actions_spec.rb136
-rw-r--r--spec/tooling/danger/project_helper_spec.rb7
-rw-r--r--spec/tooling/lib/tooling/find_changes_spec.rb35
-rw-r--r--spec/uploaders/object_storage/cdn/google_cdn_spec.rb2
-rw-r--r--spec/validators/ip_cidr_array_validator_spec.rb1
-rw-r--r--spec/views/admin/application_settings/network.html.haml_spec.rb8
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb2
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb73
-rw-r--r--spec/views/devise/shared/_footer.html.haml_spec.rb46
-rw-r--r--spec/views/devise/shared/_signup_box.html.haml_spec.rb16
-rw-r--r--spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb35
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb8
-rw-r--r--spec/views/projects/commit/show.html.haml_spec.rb3
-rw-r--r--spec/views/projects/merge_requests/creations/new.html.haml_spec.rb67
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb18
-rw-r--r--spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb9
-rw-r--r--spec/workers/click_house/event_authors_consistency_cron_worker_spec.rb104
-rw-r--r--spec/workers/click_house/events_sync_worker_spec.rb174
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb27
-rw-r--r--spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb11
-rw-r--r--spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb195
-rw-r--r--spec/workers/emails_on_push_worker_spec.rb4
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb15
-rw-r--r--spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb65
-rw-r--r--spec/workers/gitlab/github_import/replay_events_worker_spec.rb32
-rw-r--r--spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb5
-rw-r--r--spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb24
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb10
-rw-r--r--spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb7
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb4
-rw-r--r--spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb8
-rw-r--r--spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb2
-rw-r--r--spec/workers/jira_connect/sync_project_worker_spec.rb4
-rw-r--r--spec/workers/new_issue_worker_spec.rb8
-rw-r--r--spec/workers/releases/publish_event_worker_spec.rb45
1064 files changed, 28761 insertions, 12421 deletions
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb
index d1e4be5be28..f9caa5549ca 100644
--- a/spec/bin/feature_flag_spec.rb
+++ b/spec/bin/feature_flag_spec.rb
@@ -7,12 +7,28 @@ load File.expand_path('../../bin/feature-flag', __dir__)
RSpec.describe 'bin/feature-flag', feature_category: :feature_flags do
using RSpec::Parameterized::TableSyntax
+ let(:groups) do
+ {
+ geo: { label: 'group::geo' }
+ }
+ end
+
+ before do
+ allow(HTTParty)
+ .to receive(:get)
+ .with(FeatureFlagOptionParser::WWW_GITLAB_COM_GROUPS_JSON, format: :plain)
+ .and_return(groups.to_json)
+ end
+
describe FeatureFlagCreator do
- let(:argv) { %w[feature-flag-name -t development -g group::geo -i https://url -m http://url] }
+ let(:argv) { %w[feature-flag-name -t gitlab_com_derisk -g group::geo -a https://url -i https://url -m http://url -u username -M 16.6] }
let(:options) { FeatureFlagOptionParser.parse(argv) }
let(:creator) { described_class.new(options) }
let(:existing_flags) do
- { 'existing_feature_flag' => File.join('config', 'feature_flags', 'development', 'existing_feature_flag.yml') }
+ {
+ 'existing_feature_flag' =>
+ File.join('config', 'feature_flags', 'gitlab_com_derisk', 'existing_feature_flag.yml')
+ }
end
before do
@@ -31,7 +47,7 @@ RSpec.describe 'bin/feature-flag', feature_category: :feature_flags do
it 'properly creates a feature flag' do
expect(File).to receive(:write).with(
- File.join('config', 'feature_flags', 'development', 'feature_flag_name.yml'),
+ File.join('config', 'feature_flags', 'gitlab_com_derisk', 'feature_flag_name.yml'),
anything)
expect do
@@ -108,85 +124,97 @@ RSpec.describe 'bin/feature-flag', feature_category: :feature_flags do
end
describe '.read_type' do
- let(:type) { 'development' }
+ before do
+ stub_const('FeatureFlagOptionParser::TYPES',
+ development: { description: 'short' },
+ deprecated: { description: 'deprecated', deprecated: true },
+ licensed: { description: 'licensed' }
+ )
+ end
- context 'when there is only a single type defined' do
- before do
- stub_const('FeatureFlagOptionParser::TYPES',
- development: { description: 'short' }
- )
- end
+ context 'when valid type is given' do
+ let(:type) { 'development' }
- it 'returns that type' do
- expect(described_class.read_type).to eq(:development)
+ it 'reads type from stdin' do
+ expect(Readline).to receive(:readline).and_return(type)
+ expect do
+ expect(described_class.read_type).to eq(:development)
+ end.to output(/Specify the feature flag type/).to_stdout
end
end
- context 'when there is deprecated feature flag type' do
- before do
- stub_const('FeatureFlagOptionParser::TYPES',
- development: { description: 'short' },
- deprecated: { description: 'deprecated', deprecated: true }
- )
+ context 'when valid index is given' do
+ it 'picks the type successfully' do
+ expect(Readline).to receive(:readline).and_return('3')
+ expect do
+ expect(described_class.read_type).to eq(:licensed)
+ end.to output(/Specify the feature flag type./).to_stdout
end
+ end
- context 'and deprecated type is given' do
- let(:type) { 'deprecated' }
+ context 'when deprecated type is given' do
+ let(:type) { 'deprecated' }
- it 'shows error message and retries' do
- expect(Readline).to receive(:readline).and_return(type)
- expect(Readline).to receive(:readline).and_raise('EOF')
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return(type)
+ expect(Readline).to receive(:readline).and_raise('EOF')
- expect do
- expect { described_class.read_type }.to raise_error(/EOF/)
- end.to output(/Specify the feature flag type/).to_stdout
- .and output(/Invalid type specified/).to_stderr
- end
+ expect do
+ expect { described_class.read_type }.to raise_error(/EOF/)
+ end.to output(/Specify the feature flag type/).to_stdout
+ .and output(/Invalid type specified/).to_stderr
end
end
- context 'when there are many types defined' do
- before do
- stub_const('FeatureFlagOptionParser::TYPES',
- development: { description: 'short' },
- licensed: { description: 'licensed' }
- )
- end
+ context 'when invalid type is given' do
+ let(:type) { 'invalid' }
- it 'reads type from stdin' do
+ it 'shows error message and retries' do
expect(Readline).to receive(:readline).and_return(type)
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
expect do
- expect(described_class.read_type).to eq(:development)
+ expect { described_class.read_type }.to raise_error(/EOF/)
end.to output(/Specify the feature flag type/).to_stdout
+ .and output(/Invalid type specified/).to_stderr
end
+ end
- context 'when invalid type is given' do
- let(:type) { 'invalid' }
-
- it 'shows error message and retries' do
- expect(Readline).to receive(:readline).and_return(type)
- expect(Readline).to receive(:readline).and_raise('EOF')
+ context 'when invalid index is given' do
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return('12')
+ expect(Readline).to receive(:readline).and_raise('EOF')
- expect do
- expect { described_class.read_type }.to raise_error(/EOF/)
- end.to output(/Specify the feature flag type/).to_stdout
- .and output(/Invalid type specified/).to_stderr
- end
+ expect do
+ expect { described_class.read_type }.to raise_error(/EOF/)
+ end.to output(/Specify the feature flag type/).to_stdout
+ .and output(/Invalid type specified/).to_stderr
end
end
end
describe '.read_group' do
- let(:group) { 'group::geo' }
+ context 'when valid group is given' do
+ let(:group) { 'group::geo' }
- it 'reads type from stdin' do
- expect(Readline).to receive(:readline).and_return(group)
- expect do
- expect(described_class.read_group).to eq('group::geo')
- end.to output(/Specify the group introducing the feature flag/).to_stdout
+ it 'reads group from stdin' do
+ expect(Readline).to receive(:readline).and_return(group)
+ expect do
+ expect(described_class.read_group).to eq('group::geo')
+ end.to output(/Specify the group label to which the feature flag belongs, from the following list/).to_stdout
+ end
end
- context 'invalid group given' do
+ context 'when valid index is given' do
+ it 'picks the group successfully' do
+ expect(Readline).to receive(:readline).and_return('1')
+ expect do
+ expect(described_class.read_group).to eq('group::geo')
+ end.to output(/Specify the group label to which the feature flag belongs, from the following list/).to_stdout
+ end
+ end
+
+ context 'with invalid group given' do
let(:type) { 'invalid' }
it 'shows error message and retries' do
@@ -195,78 +223,151 @@ RSpec.describe 'bin/feature-flag', feature_category: :feature_flags do
expect do
expect { described_class.read_group }.to raise_error(/EOF/)
- end.to output(/Specify the group introducing the feature flag/).to_stdout
- .and output(/The group needs to include/).to_stderr
+ end.to output(/Specify the group label to which the feature flag belongs, from the following list/).to_stdout
+ .and output(/The group label isn't in the above labels list/).to_stderr
+ end
+ end
+
+ context 'when invalid index is given' do
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return('12')
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_group }.to raise_error(/EOF/)
+ end.to output(/Specify the group label to which the feature flag belongs, from the following list/).to_stdout
+ .and output(/The group label isn't in the above labels list/).to_stderr
end
end
end
- describe '.read_introduced_by_url' do
- let(:url) { 'https://merge-request' }
+ shared_examples 'read_url' do |method, prompt|
+ context 'with valid URL given' do
+ let(:url) { 'https://merge-request' }
- it 'reads type from stdin' do
- expect(Readline).to receive(:readline).and_return(url)
- expect do
- expect(described_class.read_introduced_by_url).to eq('https://merge-request')
- end.to output(/URL of the MR introducing the feature flag/).to_stdout
+ it 'reads URL from stdin' do
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: true))
+
+ expect do
+ expect(described_class.public_send(method)).to eq('https://merge-request')
+ end.to output(/#{prompt}/).to_stdout
+ end
end
- context 'empty URL given' do
+ context 'with invalid URL given' do
+ let(:url) { 'https://invalid' }
+
+ it 'shows error message and retries' do
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: false))
+ expect(Readline).to receive(:readline).and_raise('EOF')
+
+ expect do
+ expect { described_class.public_send(method) }.to raise_error(/EOF/)
+ end.to output(/#{prompt}/).to_stdout
+ .and output(/URL '#{url}' isn't valid/).to_stderr
+ end
+ end
+
+ context 'with empty URL given' do
let(:url) { '' }
it 'skips entry' do
expect(Readline).to receive(:readline).and_return(url)
+
expect do
- expect(described_class.read_introduced_by_url).to be_nil
- end.to output(/URL of the MR introducing the feature flag/).to_stdout
+ expect(described_class.public_send(method)).to be_nil
+ end.to output(/#{prompt}/).to_stdout
end
end
- context 'invalid URL given' do
- let(:url) { 'invalid' }
+ context 'with a non-URL given' do
+ let(:url) { 'malformed' }
it 'shows error message and retries' do
expect(Readline).to receive(:readline).and_return(url)
expect(Readline).to receive(:readline).and_raise('EOF')
expect do
- expect { described_class.read_introduced_by_url }.to raise_error(/EOF/)
- end.to output(/URL of the MR introducing the feature flag/).to_stdout
+ expect { described_class.public_send(method) }.to raise_error(/EOF/)
+ end.to output(/#{prompt}/).to_stdout
.and output(/URL needs to start with/).to_stderr
end
end
end
+ describe '.read_feature_issue_url' do
+ it_behaves_like 'read_url', :read_feature_issue_url, 'URL of the original feature issue'
+ end
+
+ describe '.read_introduced_by_url' do
+ it_behaves_like 'read_url', :read_introduced_by_url, 'URL of the MR introducing the feature flag'
+ end
+
describe '.read_rollout_issue_url' do
- let(:options) { double('options', name: 'foo', type: :development) }
- let(:url) { 'https://issue' }
+ let(:options) do
+ FeatureFlagOptionParser::Options.new({
+ name: 'foo',
+ username: 'joe',
+ type: :gitlab_com_derisk,
+ introduced_by_url: 'https://introduced_by_url',
+ feature_issue_url: 'https://feature_issue_url',
+ milestone: '16.6',
+ group: 'group::geo'
+ })
+ end
- it 'reads type from stdin' do
- expect(Readline).to receive(:readline).and_return(url)
- expect do
- expect(described_class.read_rollout_issue_url(options)).to eq('https://issue')
- end.to output(/URL of the rollout issue/).to_stdout
+ context 'with valid URL given' do
+ let(:url) { 'https://rollout_issue_url' }
+
+ it 'reads type from stdin' do
+ expect(described_class).to receive(:copy_to_clipboard!).and_return(true)
+ expect(Readline).to receive(:readline).and_return('') # enter to open the new issue url
+ expect(described_class).to receive(:open_url!).and_return(true)
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: true))
+
+ expect do
+ expect(described_class.read_rollout_issue_url(options)).to eq(url)
+ end.to output(/URL of the rollout issue/).to_stdout
+ end
end
- context 'invalid URL given' do
- let(:type) { 'invalid' }
+ context 'with invalid URL given' do
+ let(:url) { 'https://invalid' }
it 'shows error message and retries' do
- expect(Readline).to receive(:readline).and_return(type)
+ expect(described_class).to receive(:copy_to_clipboard!).and_return(true)
+ expect(Readline).to receive(:readline).and_return('') # enter to open the new issue url
+ expect(described_class).to receive(:open_url!).and_return(true)
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(HTTParty).to receive(:head).with(url).and_return(instance_double(HTTParty::Response, success?: false))
expect(Readline).to receive(:readline).and_raise('EOF')
expect do
expect { described_class.read_rollout_issue_url(options) }.to raise_error(/EOF/)
end.to output(/URL of the rollout issue/).to_stdout
- .and output(/URL needs to start/).to_stderr
+ .and output(/URL '#{url}' isn't valid/).to_stderr
end
end
- end
- describe '.read_ee_only' do
- let(:options) { double('options', name: 'foo', type: :development) }
+ context 'with a non-URL given' do
+ let(:url) { 'malformed' }
+
+ it 'shows error message and retries' do
+ expect(described_class).to receive(:copy_to_clipboard!).and_return(true)
+ expect(Readline).to receive(:readline).and_return('') # enter to open the new issue url
+ expect(described_class).to receive(:open_url!).and_return(true)
+ expect(Readline).to receive(:readline).and_return(url)
+ expect(Readline).to receive(:readline).and_raise('EOF')
- it { expect(described_class.read_ee_only(options)).to eq(false) }
+ expect do
+ expect { described_class.read_rollout_issue_url(options) }.to raise_error(/EOF/)
+ end.to output(/URL of the rollout issue/).to_stdout
+ .and output(/URL needs to start/).to_stderr
+ end
+ end
end
end
end
diff --git a/spec/components/pajamas/avatar_component_spec.rb b/spec/components/pajamas/avatar_component_spec.rb
index d59ef390fad..9c1a40ad5b5 100644
--- a/spec/components/pajamas/avatar_component_spec.rb
+++ b/spec/components/pajamas/avatar_component_spec.rb
@@ -5,16 +5,25 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
+ let_it_be(:email) { Pajamas::AvatarEmail.new('kitty@cat.com') }
let(:options) { {} }
before do
- render_inline(described_class.new(record, **options))
+ render_inline(described_class.new(item, **options))
end
describe "avatar shape" do
context "for a User" do
- let(:record) { user }
+ let(:item) { user }
+
+ it "has a circle shape" do
+ expect(page).to have_css ".gl-avatar.gl-avatar-circle"
+ end
+ end
+
+ context "for an Email" do
+ let(:item) { email }
it "has a circle shape" do
expect(page).to have_css ".gl-avatar.gl-avatar-circle"
@@ -22,7 +31,7 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
end
context "for a Project" do
- let(:record) { project }
+ let(:item) { project }
it "has default shape (rect)" do
expect(page).to have_css ".gl-avatar"
@@ -31,7 +40,7 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
end
context "for a Group" do
- let(:record) { group }
+ let(:item) { group }
it "has default shape (rect)" do
expect(page).to have_css ".gl-avatar"
@@ -42,11 +51,11 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
describe "avatar image" do
context "when it has an uploaded image" do
- let(:record) { project }
+ let(:item) { project }
before do
- allow(record).to receive(:avatar_url).and_return "/example.png"
- render_inline(described_class.new(record, **options))
+ allow(item).to receive(:avatar_url).and_return "/example.png"
+ render_inline(described_class.new(item, **options))
end
it "uses the avatar_url as image src" do
@@ -73,14 +82,14 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
end
context "when a project or group has no uploaded image" do
- let(:record) { project }
+ let(:item) { project }
- it "uses an identicon with the record's initial" do
- expect(page).to have_css "div.gl-avatar.gl-avatar-identicon", text: record.name[0].upcase
+ it "uses an identicon with the item's initial" do
+ expect(page).to have_css "div.gl-avatar.gl-avatar-identicon", text: item.name[0].upcase
end
- context "when the record has no id" do
- let(:record) { build :group }
+ context "when the item has no id" do
+ let(:item) { build :group }
it "uses an identicon with default background color" do
expect(page).to have_css "div.gl-avatar.gl-avatar-identicon-bg1"
@@ -89,16 +98,34 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
end
context "when a user has no uploaded image" do
- let(:record) { user }
+ let(:item) { user }
it "uses a gravatar" do
expect(rendered_content).to match /gravatar\.com/
end
end
+
+ context "when an email has no linked user" do
+ context "when the email is blank" do
+ let(:item) { Pajamas::AvatarEmail.new('') }
+
+ it "uses the default avatar" do
+ expect(rendered_content).to match /no_avatar/
+ end
+ end
+
+ context "when the email is not blank" do
+ let(:item) { email }
+
+ it "uses a agravatar" do
+ expect(rendered_content).to match /gravatar\.com/
+ end
+ end
+ end
end
describe "options" do
- let(:record) { user }
+ let(:item) { user }
describe "alt" do
context "with a value" do
@@ -110,8 +137,8 @@ RSpec.describe Pajamas::AvatarComponent, type: :component do
end
context "without a value" do
- it "uses the record's name as alt text" do
- expect(page).to have_css ".gl-avatar[alt='#{record.name}']"
+ it "uses the item's name as alt text" do
+ expect(page).to have_css ".gl-avatar[alt='#{item.name}']"
end
end
end
diff --git a/spec/components/previews/pajamas/banner_component_preview.rb b/spec/components/previews/pajamas/banner_component_preview.rb
index 4c1d64e32f6..ae86ca87aee 100644
--- a/spec/components/previews/pajamas/banner_component_preview.rb
+++ b/spec/components/previews/pajamas/banner_component_preview.rb
@@ -18,7 +18,7 @@ module Pajamas
render(Pajamas::BannerComponent.new(
button_text: button_text,
button_link: button_link,
- svg_path: "illustrations/autodevops.svg",
+ svg_path: "illustrations/devops-sm.svg",
variant: variant
)) do |c|
content_tag :p, content
diff --git a/spec/components/projects/ml/models_index_component_spec.rb b/spec/components/projects/ml/models_index_component_spec.rb
index f020ae5bbef..1dab4c3c74a 100644
--- a/spec/components/projects/ml/models_index_component_spec.rb
+++ b/spec/components/projects/ml/models_index_component_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_category: :mlops do
let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:user) { project.owner }
let_it_be(:model1) { build_stubbed(:ml_models, :with_latest_version_and_package, project: project) }
let_it_be(:model2) { build_stubbed(:ml_models, project: project) }
let_it_be(:models) { [model1, model2] }
@@ -23,48 +24,66 @@ RSpec.describe Projects::Ml::ModelsIndexComponent, type: :component, feature_cat
end
subject(:component) do
- described_class.new(model_count: 5, paginator: paginator)
+ described_class.new(project: project, current_user: user, model_count: 5, paginator: paginator)
end
describe 'rendered' do
let(:element) { page.find("#js-index-ml-models") }
- before do
- allow(model1).to receive(:version_count).and_return(1)
- allow(model2).to receive(:version_count).and_return(0)
- render_inline component
+ context 'when user can write model registry' do
+ before do
+ allow(model1).to receive(:version_count).and_return(1)
+ allow(model2).to receive(:version_count).and_return(0)
+ render_inline component
+ end
+
+ it 'renders element with view_model' do
+ expect(Gitlab::Json.parse(element['data-view-model'])).to eq({
+ 'models' => [
+ {
+ 'name' => model1.name,
+ 'version' => model1.latest_version.version,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model1.id}",
+ 'versionPackagePath' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}",
+ 'versionPath' => "/#{project.full_path}/-/ml/models/#{model1.id}/versions/#{model1.latest_version.id}",
+ 'versionCount' => 1
+ },
+ {
+ 'name' => model2.name,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model2.id}",
+ 'version' => nil,
+ 'versionPackagePath' => nil,
+ 'versionPath' => nil,
+ 'versionCount' => 0
+ }
+ ],
+ 'pageInfo' => {
+ 'hasNextPage' => true,
+ 'hasPreviousPage' => false,
+ 'startCursor' => 'abcde',
+ 'endCursor' => 'defgh'
+ },
+ 'modelCount' => 5,
+ 'createModelPath' => "/#{project.full_path}/-/ml/models/new",
+ 'canWriteModelRegistry' => true,
+ 'mlflowTrackingUrl' => "http://localhost/api/v4/projects/#{project.id}/ml/mlflow/api/2.0/mlflow/"
+ })
+ end
end
- it 'renders element with view_model' do
- element = page.find("#js-index-ml-models")
+ context 'when user cannot write model registry' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :write_model_registry, project)
+ .and_return(false)
- expect(Gitlab::Json.parse(element['data-view-model'])).to eq({
- 'models' => [
- {
- 'name' => model1.name,
- 'version' => model1.latest_version.version,
- 'path' => "/#{project.full_path}/-/ml/models/#{model1.id}",
- 'versionPackagePath' => "/#{project.full_path}/-/packages/#{model1.latest_version.package_id}",
- 'versionPath' => "/#{project.full_path}/-/ml/models/#{model1.id}/versions/#{model1.latest_version.id}",
- 'versionCount' => 1
- },
- {
- 'name' => model2.name,
- 'path' => "/#{project.full_path}/-/ml/models/#{model2.id}",
- 'version' => nil,
- 'versionPackagePath' => nil,
- 'versionPath' => nil,
- 'versionCount' => 0
- }
- ],
- 'pageInfo' => {
- 'hasNextPage' => true,
- 'hasPreviousPage' => false,
- 'startCursor' => 'abcde',
- 'endCursor' => 'defgh'
- },
- 'modelCount' => 5
- })
+ render_inline component
+ end
+
+ it 'canWriteModelRegistry is false' do
+ expect(Gitlab::Json.parse(element['data-view-model'])['canWriteModelRegistry']).to eq(false)
+ end
end
end
end
diff --git a/spec/components/projects/ml/show_ml_model_component_spec.rb b/spec/components/projects/ml/show_ml_model_component_spec.rb
index 34b8cbe96ca..d12692ca792 100644
--- a/spec/components/projects/ml/show_ml_model_component_spec.rb
+++ b/spec/components/projects/ml/show_ml_model_component_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe Projects::Ml::ShowMlModelComponent, type: :component, feature_cat
'latestVersion' => {
'version' => model1.latest_version.version,
'description' => model1.latest_version.description,
+ 'path' => "/#{project.full_path}/-/ml/models/#{model1.id}/versions/#{model1.latest_version.id}",
'projectPath' => "/#{project.full_path}",
'packageId' => model1.latest_version.package_id,
'candidate' => {
diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb
index d81b067ffb6..95986b5c034 100644
--- a/spec/controllers/admin/projects_controller_spec.rb
+++ b/spec/controllers/admin/projects_controller_spec.rb
@@ -49,11 +49,11 @@ RSpec.describe Admin::ProjectsController do
it 'does not have N+1 queries', :use_clean_rails_memory_store_caching, :request_store do
get :index
- control_count = ActiveRecord::QueryRecorder.new { get :index }.count
+ control = ActiveRecord::QueryRecorder.new { get :index }
create(:project)
- expect { get :index }.not_to exceed_query_limit(control_count)
+ expect { get :index }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index d88fe41a869..186e1b13856 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -89,11 +89,11 @@ RSpec.describe Admin::RunnersController, feature_category: :fleet_visibility do
it 'avoids N+1 queries', :request_store do
get :edit, params: { id: runner.id }
- control_count = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }.count
+ control = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }
# There is one additional query looking up subject.group in ProjectPolicy for the
# needs_new_sso_session permission
- expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1)
+ expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control).with_threshold(1)
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index f4384dbaa69..715ded875fe 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -103,32 +103,6 @@ RSpec.describe ApplicationController, feature_category: :shared do
end
end
- describe 'session expiration' do
- controller(described_class) do
- def index
- render html: 'authenticated'
- end
- end
-
- context 'authenticated user' do
- it 'does not set the expire_after option' do
- sign_in(create(:user))
-
- get :index
-
- expect(request.env['rack.session.options'][:expire_after]).to be_nil
- end
- end
-
- context 'unauthenticated user' do
- it 'sets the expire_after option' do
- get :index
-
- expect(request.env['rack.session.options'][:expire_after]).to eq(Settings.gitlab['unauthenticated_session_expire_delay'])
- end
- end
- end
-
describe 'response format' do
controller(described_class) do
def index
@@ -470,7 +444,7 @@ RSpec.describe ApplicationController, feature_category: :shared do
enforce_terms
- expect { get :index }.not_to exceed_query_limit(control.count).with_threshold(1)
+ expect { get :index }.not_to exceed_query_limit(control).with_threshold(1)
end
context 'when terms are enforced' do
diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb
index 9eb0f36cb37..051172ea6da 100644
--- a/spec/controllers/concerns/issuable_collections_spec.rb
+++ b/spec/controllers/concerns/issuable_collections_spec.rb
@@ -74,7 +74,6 @@ RSpec.describe IssuableCollections do
assignee_username: 'user1',
author_id: '2',
author_username: 'user2',
- authorized_only: 'yes',
confidential: true,
due_date: '2017-01-01',
group_id: '3',
diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb
index 45f194b63e7..754107efee8 100644
--- a/spec/controllers/concerns/renders_commits_spec.rb
+++ b/spec/controllers/concerns/renders_commits_spec.rb
@@ -46,15 +46,15 @@ RSpec.describe RendersCommits do
it 'avoids N + 1', :request_store do
stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 5)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
go
- end.count
+ end
stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 15)
expect do
go
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
@@ -73,7 +73,7 @@ RSpec.describe RendersCommits do
expect do
subject.prepare_commits_for_rendering(merge_request.commits)
merge_request.commits.each(&:latest_pipeline)
- end.not_to exceed_all_query_limit(control.count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index 3dcf41941bb..38e39da2733 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -62,7 +62,9 @@ RSpec.describe Groups::LabelsController, feature_category: :team_planning do
create_list(:group_label, 3, group: group)
# some n+1 queries still exist
- expect { get :index, params: { group_id: group.to_param } }.not_to exceed_all_query_limit(control.count).with_threshold(10)
+ expect do
+ get :index, params: { group_id: group.to_param }
+ end.not_to exceed_all_query_limit(control).with_threshold(10)
expect(assigns(:labels).count).to eq(4)
end
end
diff --git a/spec/controllers/groups/releases_controller_spec.rb b/spec/controllers/groups/releases_controller_spec.rb
index 4b4333dea0e..1ca540ebb99 100644
--- a/spec/controllers/groups/releases_controller_spec.rb
+++ b/spec/controllers/groups/releases_controller_spec.rb
@@ -62,12 +62,12 @@ RSpec.describe Groups::ReleasesController do
context 'N+1 queries' do
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
create_list(:release, 5, project: project)
create_list(:release, 5, project: private_project)
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index 3266c4d4d39..7e036b75e76 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -112,6 +112,7 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
let(:token) { 'token' }
let(:username) { 'bitbucket-user' }
let(:url) { 'http://localhost:7990/bitbucket' }
+ let(:experiment) { instance_double(ApplicationExperiment) }
it 'clears out existing session' do
post :configure
@@ -124,6 +125,17 @@ RSpec.describe Import::BitbucketServerController, feature_category: :importers d
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
+ it 'tracks default_to_import_tab experiment' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).to receive(:track).with(:authentication, property: :bitbucket_server)
+
+ post :configure
+ end
+
it 'sets the session variables' do
allow(controller).to receive(:allow_local_requests?).and_return(true)
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 9b41089f4b8..54192f010ed 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -303,27 +303,11 @@ RSpec.describe Import::BulkImportsController, feature_category: :importers do
describe 'GET details' do
subject(:request) { get :details }
- context 'when bulk_import_details_page feature flag is enabled' do
- before do
- stub_feature_flags(bulk_import_details_page: true)
- request
- end
-
- it 'responds with a 200 and shows the template', :aggregate_failures do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:details)
- end
- end
+ it 'responds with a 200 and shows the template', :aggregate_failures do
+ request
- context 'when bulk_import_details_page feature flag is disabled' do
- before do
- stub_feature_flags(bulk_import_details_page: false)
- request
- end
-
- it 'responds with a 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:details)
end
end
diff --git a/spec/controllers/import/fogbugz_controller_spec.rb b/spec/controllers/import/fogbugz_controller_spec.rb
index 273dfd6a9c7..45b959b1b78 100644
--- a/spec/controllers/import/fogbugz_controller_spec.rb
+++ b/spec/controllers/import/fogbugz_controller_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Import::FogbugzController, feature_category: :importers do
end
describe 'POST #callback' do
+ let(:experiment) { instance_double(ApplicationExperiment) }
let(:xml_response) { %(<?xml version=\"1.0\" encoding=\"UTF-8\"?><response><token><![CDATA[#{token}]]></token></response>) }
before do
@@ -31,6 +32,17 @@ RSpec.describe Import::FogbugzController, feature_category: :importers do
expect(response).to redirect_to(new_user_map_import_fogbugz_path)
end
+ it 'tracks default_to_import_tab experiment' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).to receive(:track).with(:successfully_authenticated, property: :fogbugz)
+
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
+ end
+
it 'preserves namespace_id query param on success' do
post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword', namespace_id: namespace_id }
@@ -46,12 +58,27 @@ RSpec.describe Import::FogbugzController, feature_category: :importers do
expect(response).to redirect_to(new_import_fogbugz_url(namespace_id: namespace_id))
end
- it 'redirects to new page form when client raises authentication exception' do
- allow(::Gitlab::FogbugzImport::Client).to receive(:new).and_raise(::Fogbugz::AuthenticationException)
+ context 'when client raises authentication exception' do
+ before do
+ allow(::Gitlab::FogbugzImport::Client).to receive(:new).and_raise(::Fogbugz::AuthenticationException)
+ end
- post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
+ it 'redirects to new page form' do
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
+
+ expect(response).to redirect_to(new_import_fogbugz_url)
+ end
- expect(response).to redirect_to(new_import_fogbugz_url)
+ it 'does not track default_to_import_tab experiment when client raises authentication exception' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).not_to receive(:track)
+
+ post :callback, params: { uri: uri, email: 'test@example.com', password: 'mypassword' }
+ end
end
context 'verify url' do
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index aafba6e2b9f..3d6b35a5c26 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -100,7 +100,20 @@ RSpec.describe Import::GithubController, feature_category: :importers do
end
describe "POST personal_access_token" do
+ let(:experiment) { instance_double(ApplicationExperiment) }
+
it_behaves_like 'a GitHub-ish import controller: POST personal_access_token'
+
+ it 'tracks default_to_import_tab experiment' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).to receive(:track).with(:authentication, property: :github)
+
+ post :personal_access_token
+ end
end
describe "GET status" do
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
index c06bd660cd2..17a107dd839 100644
--- a/spec/controllers/import/manifest_controller_spec.rb
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -19,6 +19,8 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state, fea
end
describe 'POST upload' do
+ let(:experiment) { instance_double(ApplicationExperiment) }
+
context 'with a valid manifest' do
it 'saves the manifest and redirects to the status page', :aggregate_failures do
post :upload, params: {
@@ -34,6 +36,20 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state, fea
expect(response).to redirect_to(status_import_manifest_path)
end
+
+ it 'tracks default_to_import_tab experiment' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).to receive(:track).with(:successfully_imported, property: :manifest)
+
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
+ }
+ end
end
context 'with an invalid manifest' do
@@ -45,6 +61,20 @@ RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state, fea
expect(assigns(:errors)).to be_present
end
+
+ it 'does not track default_to_import_tab experiment' do
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+
+ expect(experiment).not_to receive(:track)
+
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/invalid_manifest.xml')
+ }
+ end
end
context 'when the user cannot import projects in the group' do
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index cfb512afc91..ce21c278a53 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -216,24 +216,6 @@ RSpec.describe Oauth::AuthorizationsController do
end
end
- context 'when the user is not signed in' do
- before do
- sign_out(user)
- end
-
- it 'sets a lower session expiry and redirects to the sign in page' do
- subject
-
- expect(request.env['rack.session.options'][:expire_after]).to eq(
- Settings.gitlab['unauthenticated_session_expire_delay']
- )
-
- expect(request.session['user_return_to']).to eq("/oauth/authorize?#{params.to_query}")
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
context 'when the user is admin' do
context 'when disable_admin_oauth_scopes is set' do
before do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 5b1fdd6388a..e99d9e949a8 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -31,6 +31,67 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
+ shared_examples 'omniauth sign in that remembers user' do
+ before do
+ stub_omniauth_setting(allow_bypass_two_factor: allow_bypass_two_factor)
+ (request.env['omniauth.params'] ||= {}).deep_merge!('remember_me' => omniauth_params_remember_me)
+ end
+
+ if params[:call_remember_me]
+ it 'calls devise method remember_me' do
+ expect(controller).to receive(:remember_me).with(user).and_call_original
+
+ post_action
+ end
+ else
+ it 'does not calls devise method remember_me' do
+ expect(controller).not_to receive(:remember_me)
+
+ post_action
+ end
+ end
+ end
+
+ shared_examples 'omniauth sign in that remembers user with two factor enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:post_action) { post provider }
+
+ where(:allow_bypass_two_factor, :omniauth_params_remember_me, :call_remember_me) do
+ true | '1' | true
+ true | '0' | false
+ true | nil | false
+ false | '1' | false
+ false | '0' | false
+ false | nil | false
+ end
+
+ with_them do
+ it_behaves_like 'omniauth sign in that remembers user'
+ end
+ end
+
+ shared_examples 'omniauth sign in that remembers user with two factor disabled' do
+ context "when user selects remember me for omniauth sign in flow" do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:post_action) { post provider }
+
+ where(:allow_bypass_two_factor, :omniauth_params_remember_me, :call_remember_me) do
+ true | '1' | true
+ true | '0' | false
+ true | nil | false
+ false | '1' | true
+ false | '0' | false
+ false | nil | false
+ end
+
+ with_them do
+ it_behaves_like 'omniauth sign in that remembers user'
+ end
+ end
+ end
+
describe 'omniauth' do
let(:user) { create(:omniauth_user, extern_uid: extern_uid, provider: provider) }
let(:additional_info) { {} }
@@ -190,6 +251,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
request.env['omniauth.params'] = { 'redirect_fragment' => 'L101' }
end
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled'
+
context 'when a redirect url is stored' do
it 'redirects with fragment' do
post provider, session: { user_return_to: '/fake/url' }
@@ -214,6 +277,12 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect(response.location).not_to include('#L101')
end
end
+
+ context 'when a user has 2FA enabled' do
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: extern_uid, provider: provider) }
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled'
+ end
end
context 'with strategies' do
@@ -271,6 +340,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
end
end
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled'
+
context 'when a user has 2FA enabled' do
render_views
@@ -296,6 +367,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled'
end
context 'for sign up' do
@@ -357,6 +430,10 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
let(:extern_uid) { '' }
let(:provider) { :auth0 }
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled' do
+ let(:extern_uid) { 'my-uid' }
+ end
+
it 'does not allow sign in without extern_uid' do
post 'auth0'
@@ -364,6 +441,14 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
expect(response).to have_gitlab_http_status(:found)
expect(controller).to set_flash[:alert].to('Wrong extern UID provided. Make sure Auth0 is configured correctly.')
end
+
+ context 'when a user has 2FA enabled' do
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: extern_uid, provider: provider) }
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled' do
+ let(:extern_uid) { 'my-uid' }
+ end
+ end
end
context 'for atlassian_oauth2' do
@@ -373,6 +458,8 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
context 'when the user and identity already exist' do
let(:user) { create(:atlassian_user, extern_uid: extern_uid) }
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled'
+
it 'allows sign-in' do
post :atlassian_oauth2
@@ -391,6 +478,12 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
post :atlassian_oauth2
end
+
+ context 'when a user has 2FA enabled' do
+ let(:user) { create(:atlassian_user, :two_factor, extern_uid: extern_uid) }
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled'
+ end
end
context 'for a new user' do
@@ -443,11 +536,21 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
include_context 'with sign_up'
let(:additional_info) { { extra: { email_verified: true } } }
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled' do
+ let(:user) { create(:omniauth_user, extern_uid: extern_uid, provider: provider) }
+ end
+
it 'allows sign in' do
post 'salesforce'
expect(request.env['warden']).to be_authenticated
end
+
+ context 'when a user has 2FA enabled' do
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: extern_uid, provider: provider) }
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled'
+ end
end
end
end
@@ -497,11 +600,19 @@ RSpec.describe OmniauthCallbacksController, type: :controller, feature_category:
let(:post_action) { post provider }
end
+ it_behaves_like 'omniauth sign in that remembers user with two factor disabled'
+
it 'allows sign in' do
post provider
expect(request.env['warden']).to be_authenticated
end
+
+ context 'when a user has 2FA enabled' do
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: extern_uid, provider: provider) }
+
+ it_behaves_like 'omniauth sign in that remembers user with two factor enabled'
+ end
end
describe '#saml' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index b29a172f5b1..721125749a5 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -987,11 +987,11 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
labels = create_list(:label, 10, project: project).map(&:to_reference)
issue = create(:issue, project: project, description: 'Test issue')
- control_count = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }.count
+ control = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }
# Follow-up to get rid of this `2 * label.count` requirement: https://gitlab.com/gitlab-org/gitlab-foss/issues/52230
expect { issue.update!(description: [issue.description, labels].join(' ')) }
- .not_to exceed_query_limit(control_count + 2 * labels.count)
+ .not_to exceed_query_limit(control).with_threshold(2 * labels.count)
end
it 'logs the view with Gitlab::Search::RecentIssues' do
@@ -1849,15 +1849,17 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
RequestStore.clear!
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
- end.count
+ end
RequestStore.clear!
create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
- expect { get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } }.not_to exceed_query_limit(control_count)
+ expect do
+ get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
+ end.not_to exceed_query_limit(control)
end
end
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index db8cac8bb4a..2333ff0a937 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe Projects::LabelsController, feature_category: :team_planning do
# some n+1 queries still exist
# calls to get max project authorization access level
- expect { list_labels }.not_to exceed_all_query_limit(control.count).with_threshold(25)
+ expect { list_labels }.not_to exceed_all_query_limit(control).with_threshold(25)
expect(assigns(:labels).count).to eq(10)
end
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index b2b591d7929..3c975c76337 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -138,10 +138,16 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
let(:maintainer) { true }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be(:other_project) { create(:project) }
+
+ before_all do
+ other_project.add_maintainer(user)
+ end
+
before do
project.add_maintainer(user) if maintainer
sign_in(user)
@@ -429,10 +435,7 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
context 'when the merge request belongs to a different project' do
- let(:other_project) { create(:project) }
-
before do
- other_project.add_maintainer(user)
diff_for_path(old_path: existing_path, new_path: existing_path, project_id: other_project)
end
@@ -442,6 +445,84 @@ RSpec.describe Projects::MergeRequests::DiffsController, feature_category: :code
end
end
+ describe 'GET diff_by_file_hash' do
+ def diff_by_file_hash(extra_params = {})
+ params = {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid,
+ format: 'json'
+ }
+
+ get :diff_by_file_hash, params: params.merge(extra_params)
+ end
+
+ let(:file) { merge_request.merge_request_diff.diffs.diff_files.first }
+ let(:file_hash) { file.file_hash }
+
+ context 'when the merge request exists' do
+ context 'when the user can view the merge request' do
+ context 'when the path exists in the diff' do
+ include_examples 'diff tracking' do
+ let(:method_call) { diff_by_file_hash(file_hash: file_hash) }
+ end
+
+ it 'enables diff notes' do
+ diff_by_file_hash(file_hash: file_hash)
+
+ expect(assigns(:diff_notes_disabled)).to be_falsey
+ expect(assigns(:new_diff_note_attrs)).to eq(
+ noteable_type: 'MergeRequest',
+ noteable_id: merge_request.id,
+ commit_id: nil
+ )
+ end
+
+ it 'only renders diff for the hash given' do
+ diff_by_file_hash(file_hash: file_hash)
+
+ diffs = json_response['diff_files']
+
+ expect(diffs.count).to eq(1)
+ expect(diffs.first['file_hash']).to eq(file_hash)
+ end
+ end
+ end
+
+ context 'when the user cannot view the merge request' do
+ let(:maintainer) { false }
+
+ before do
+ diff_by_file_hash(file_hash: file_hash)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ diff_by_file_hash(id: merge_request.iid.succ, file_hash: file_hash)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the merge request belongs to a different project' do
+ before do
+ diff_by_file_hash(project_id: other_project, file_hash: file_hash)
+ end
+
+ it 'returns a 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe 'GET diffs_batch' do
shared_examples_for 'serializes diffs with expected arguments' do
it 'serializes paginated merge request diff collection' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 55741a82862..d04cda240fa 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -244,6 +244,24 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
expect(response).to have_gitlab_http_status(:moved_permanently)
end
end
+
+ context 'when has pinned file' do
+ let(:file) { merge_request.merge_request_diff.diffs.diff_files.first }
+ let(:file_hash) { file.file_hash }
+
+ it 'adds pinned file url' do
+ go(pin: file_hash)
+
+ expect(assigns['pinned_file_url']).to eq(
+ diff_by_file_hash_namespace_project_merge_request_path(
+ format: 'json',
+ id: merge_request.iid,
+ namespace_id: project.namespace.to_param,
+ project_id: project.path,
+ file_hash: file_hash
+ ))
+ end
+ end
end
context 'when user is setting notes filters' do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 678991b91a5..6b440b90f37 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -249,15 +249,15 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
RequestStore.clear!
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get :index, params: request_params
- end.count
+ end
RequestStore.clear!
create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
- expect { get :index, params: request_params }.not_to exceed_query_limit(control_count)
+ expect { get :index, params: request_params }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb b/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
index fc741d0f3f6..292c4017d8e 100644
--- a/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
+++ b/spec/controllers/projects/packages/infrastructure_registry_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Packages::InfrastructureRegistryController do
+RSpec.describe Projects::Packages::InfrastructureRegistryController, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
diff --git a/spec/controllers/projects/packages/packages_controller_spec.rb b/spec/controllers/projects/packages/packages_controller_spec.rb
index da9cae47c62..8570af075ad 100644
--- a/spec/controllers/projects/packages/packages_controller_spec.rb
+++ b/spec/controllers/projects/packages/packages_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Packages::PackagesController do
+RSpec.describe Projects::Packages::PackagesController, feature_category: :package_registry do
let_it_be(:project) { create(:project, :public) }
let(:page) { :index }
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index 7cd4f43d4da..9fe2e4c23e0 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -108,11 +108,11 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end
it 'avoids N + 1 queries', :request_store do
- control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count
+ control = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }
create_list(:ci_pipeline_schedule, 2, project: project)
- expect { visit_pipelines_schedules }.not_to exceed_query_limit(control_count)
+ expect { visit_pipelines_schedules }.not_to exceed_query_limit(control)
end
context 'when the scope is set to active' do
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index deaed8e1162..82c1aa3e18c 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -381,7 +381,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
# Set up all required variables
get_pipeline_json
- control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
+ control = ActiveRecord::QueryRecorder.new { get_pipeline_json }
first_build = pipeline.builds.first
first_build.tag_list << [:hello, :world]
@@ -391,9 +391,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
second_build.tag_list << [:docker, :ruby]
create(:deployment, deployable: second_build)
- new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
-
- expect(new_count).to be_within(1).of(control_count)
+ expect { get_pipeline_json }.not_to exceed_query_limit(control).with_threshold(1)
end
end
@@ -1074,7 +1072,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
clear_controller_memoization
- control_count = ActiveRecord::QueryRecorder.new { get_test_report_json }.count
+ control = ActiveRecord::QueryRecorder.new { get_test_report_json }
create(:ci_build, name: 'karma', pipeline: pipeline).tap do |build|
create(:ci_job_artifact, :junit, job: build)
@@ -1082,7 +1080,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
clear_controller_memoization
- expect { get_test_report_json }.not_to exceed_query_limit(control_count)
+ expect { get_test_report_json }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/controllers/projects/refs_controller_spec.rb b/spec/controllers/projects/refs_controller_spec.rb
index 345e6e2e0de..ab879d9aeb7 100644
--- a/spec/controllers/projects/refs_controller_spec.rb
+++ b/spec/controllers/projects/refs_controller_spec.rb
@@ -78,6 +78,23 @@ RSpec.describe Projects::RefsController, feature_category: :source_code_manageme
expect(response).to have_gitlab_http_status(:bad_request)
end
end
+
+ context 'with an invalid path parameter' do
+ it 'returns 400 bad request' do
+ params = {
+ destination: 'graphs_commits',
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: 'master',
+ ref_type: nil,
+ path: '*'
+ }
+
+ get :switch, params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
describe 'GET #logs_tree' do
diff --git a/spec/controllers/projects/security/configuration_controller_spec.rb b/spec/controllers/projects/security/configuration_controller_spec.rb
index 1ce0fcd85db..bfd269f9398 100644
--- a/spec/controllers/projects/security/configuration_controller_spec.rb
+++ b/spec/controllers/projects/security/configuration_controller_spec.rb
@@ -48,19 +48,6 @@ RSpec.describe Projects::Security::ConfigurationController do
expect(sast_feature['available']).to be_truthy
expect(dast_feature['available']).to be_falsey
end
-
- context 'with feature flag unify_security_configuration turned off' do
- before do
- stub_feature_flags(unify_security_configuration: false)
- end
-
- it 'responds with empty configuration data json' do
- get :show, params: { namespace_id: project.namespace, project_id: project, format: :json }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_empty
- end
- end
end
end
end
diff --git a/spec/controllers/projects/starrers_controller_spec.rb b/spec/controllers/projects/starrers_controller_spec.rb
index 2148f495c31..236bb408d32 100644
--- a/spec/controllers/projects/starrers_controller_spec.rb
+++ b/spec/controllers/projects/starrers_controller_spec.rb
@@ -40,11 +40,11 @@ RSpec.describe Projects::StarrersController do
it 'avoids N+1s loading users', :request_store do
get_starrers
- control_count = ActiveRecord::QueryRecorder.new { get_starrers }.count
+ control = ActiveRecord::QueryRecorder.new { get_starrers }
create_list(:user, 5).each { |user| user.toggle_star(project) }
- expect { get_starrers }.not_to exceed_query_limit(control_count)
+ expect { get_starrers }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 88d9d1228e3..3ddfb5e7262 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -43,6 +43,49 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
end
end
end
+
+ context 'with managable group' do
+ context 'when managable_group_count is 1' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders the template' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('new')
+ end
+ end
+
+ context 'when managable_group_count is 0' do
+ context 'when create_projects on personal namespace is allowed' do
+ before do
+ allow(user).to receive(:can_create_project?).and_return(true)
+ end
+
+ it 'renders the template' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('new')
+ end
+ end
+
+ context 'when create_projects on personal namespace is not allowed' do
+ before do
+ stub_application_setting(allow_project_creation_for_guest_and_below: false)
+ end
+
+ it 'responds with status 404' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).not_to render_template('new')
+ end
+ end
+ end
+ end
end
end
@@ -1101,6 +1144,23 @@ RSpec.describe ProjectsController, feature_category: :groups_and_projects do
it_behaves_like 'feature update success'
end
end
+
+ context 'project topics' do
+ context 'on updates with topics of the same name (case insensitive)' do
+ it 'returns 200, with alert about update failing' do
+ put :update, params: {
+ namespace_id: project.namespace,
+ id: project.path,
+ project: {
+ topics: 'smoketest, SMOKETEST'
+ }
+ }
+
+ expect(response).to be_successful
+ expect(flash[:alert]).to eq('Project could not be updated!')
+ end
+ end
+ end
end
describe '#transfer', :enable_admin_mode do
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 8ae78c5ee35..9c246c21104 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -776,6 +776,45 @@ RSpec.describe UploadsController, feature_category: :groups_and_projects do
end
end
end
+
+ context 'when viewing an organization avatar' do
+ let(:organization_detail) { create(:organization_detail) }
+ let(:organization) { organization_detail.organization }
+
+ subject(:request) do
+ get(
+ :show,
+ params: {
+ model: 'organizations/organization_detail',
+ mounted_as: 'avatar',
+ id: organization.id,
+ filename: 'dk.png'
+ }
+ )
+ end
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ end
+
+ it 'responds with status 200' do
+ request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached'
+ end
+
+ context 'when not signed in' do
+ it 'responds with status 200' do
+ request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached'
+ end
+ end
end
def post_authorize(verified: true)
diff --git a/spec/db/docs_spec.rb b/spec/db/docs_spec.rb
index 03f944735a1..6f7a19fb2b1 100644
--- a/spec/db/docs_spec.rb
+++ b/spec/db/docs_spec.rb
@@ -17,6 +17,9 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
schema_inconsistencies
sharding_key
desired_sharding_key
+ allow_cross_joins
+ allow_cross_transactions
+ allow_cross_foreign_keys
]
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 7e3f2a3b61e..74267874eeb 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -48,6 +48,10 @@ RSpec.describe 'Database schema', feature_category: :database do
chat_teams: %w[team_id],
ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
+ ci_pipeline_artifacts: %w[partition_id],
+ ci_pipeline_chat_data: %w[partition_id],
+ ci_pipelines_config: %w[partition_id],
+ ci_pipeline_metadata: %w[partition_id],
ci_pipeline_variables: %w[partition_id],
ci_pipelines: %w[partition_id],
ci_runner_projects: %w[runner_id],
@@ -86,6 +90,7 @@ RSpec.describe 'Database schema', feature_category: :database do
merge_request_diffs: %w[project_id],
merge_request_diff_commits: %w[commit_author_id committer_id],
namespaces: %w[owner_id parent_id],
+ namespace_descendants: %w[namespace_id],
notes: %w[author_id commit_id noteable_id updated_by_id resolved_by_id confirmed_by_id discussion_id namespace_id],
notification_settings: %w[source_id],
oauth_access_grants: %w[resource_owner_id application_id],
@@ -128,7 +133,8 @@ RSpec.describe 'Database schema', feature_category: :database do
web_hook_logs: %w[web_hook_id],
webauthn_registrations: %w[u2f_registration_id], # this column will be dropped
ml_candidates: %w[internal_id],
- value_stream_dashboard_counts: %w[namespace_id]
+ value_stream_dashboard_counts: %w[namespace_id],
+ zoekt_indices: %w[namespace_id] # needed for cells sharding key
}.with_indifferent_access.freeze
context 'for table' do
diff --git a/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb b/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb
deleted file mode 100644
index d616672173e..00000000000
--- a/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe InProductGuidanceEnvironmentsWebideExperiment, :experiment do
- subject { described_class.new(project: project) }
-
- let(:project) { create(:project, :repository) }
-
- before do
- stub_experiments(in_product_guidance_environments_webide: :candidate)
- end
-
- it 'excludes projects with environments' do
- create(:environment, project: project)
- expect(subject).to exclude(project: project)
- end
-
- it 'does not exlude projects without environments' do
- expect(subject).not_to exclude(project: project)
- end
-end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 1c418f646f6..292138d76db 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -403,6 +403,15 @@ FactoryBot.define do
end
end
+ trait :common_security_report_with_unicode_null_character do
+ common_security_report
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-common-scanning-report-with-unicode-null-character.json'), 'application/json')
+ end
+ end
+
trait :sast_with_corrupted_data do
file_type { :sast }
file_format { :raw }
diff --git a/spec/factories/ci/pipeline_chat_data.rb b/spec/factories/ci/pipeline_chat_data.rb
new file mode 100644
index 00000000000..1f61c98f542
--- /dev/null
+++ b/spec/factories/ci/pipeline_chat_data.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_chat_data, class: 'Ci::PipelineChatData' do
+ pipeline factory: :ci_empty_pipeline
+ chat_name
+ response_url { "https://response.com" }
+ end
+end
diff --git a/spec/factories/ci/pipeline_config.rb b/spec/factories/ci/pipeline_config.rb
new file mode 100644
index 00000000000..a415efaf185
--- /dev/null
+++ b/spec/factories/ci/pipeline_config.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_config, class: 'Ci::PipelineConfig' do
+ pipeline factory: :ci_empty_pipeline
+ content { "content" }
+ end
+end
diff --git a/spec/factories/ci/reports/security/findings.rb b/spec/factories/ci/reports/security/findings.rb
index 670d833c1f8..2de17115934 100644
--- a/spec/factories/ci/reports/security/findings.rb
+++ b/spec/factories/ci/reports/security/findings.rb
@@ -2,7 +2,6 @@
FactoryBot.define do
factory :ci_reports_security_finding, class: '::Gitlab::Ci::Reports::Security::Finding' do
- compare_key { "#{identifiers.first&.external_type}:#{identifiers.first&.external_id}:#{location.fingerprint}" }
confidence { :medium }
identifiers { Array.new(1) { association(:ci_reports_security_identifier) } }
location factory: :ci_reports_security_locations_sast
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 2d67a4c0e80..63e8cec82e6 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -14,6 +14,7 @@ FactoryBot.define do
groups { [] }
projects { [] }
token_expires_at { nil }
+ creator { nil }
end
after(:build) do |runner, evaluator|
@@ -24,6 +25,8 @@ FactoryBot.define do
evaluator.groups.each do |group|
runner.runner_namespaces << build(:ci_runner_namespace, runner: runner, namespace: group)
end
+
+ runner.creator = evaluator.creator if evaluator.creator
end
after(:create) do |runner, evaluator|
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 807df94e115..2f55c3ab567 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -18,6 +18,10 @@ FactoryBot.define do
create(:namespace_settings, namespace: group) unless group.namespace_settings
end
+ trait :with_organization do
+ association :organization
+ end
+
trait :public do
visibility_level { Gitlab::VisibilityLevel::PUBLIC }
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index 74dfea585f2..1d698e1b4d8 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -30,6 +30,14 @@ FactoryBot.define do
api_key { 'secret' }
end
+ factory :diffblue_cover_integration, class: 'Integrations::DiffblueCover' do
+ project
+ active { true }
+ diffblue_license_key { '1234-ABCD-DCBA-4321' }
+ diffblue_access_token_name { 'Diffblue CI' }
+ diffblue_access_token_secret { 'glpat-00112233445566778899' } # gitleaks:allow
+ end
+
factory :emails_on_push_integration, class: 'Integrations::EmailsOnPush' do
project
type { 'Integrations::EmailsOnPush' }
diff --git a/spec/factories/ml/model_version_metadata.rb b/spec/factories/ml/model_version_metadata.rb
new file mode 100644
index 00000000000..de13465e5bf
--- /dev/null
+++ b/spec/factories/ml/model_version_metadata.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ml_model_version_metadata, class: '::Ml::ModelVersionMetadata' do
+ association :model_version, factory: :ml_model_versions
+ association :project, factory: :project
+
+ sequence(:name) { |n| "metadata_#{n}" }
+ sequence(:value) { |n| "value#{n}" }
+ end
+end
diff --git a/spec/factories/namespace_package_settings.rb b/spec/factories/namespace_package_settings.rb
index 33f290d0a2d..8c5f7193cde 100644
--- a/spec/factories/namespace_package_settings.rb
+++ b/spec/factories/namespace_package_settings.rb
@@ -15,6 +15,9 @@ FactoryBot.define do
nuget_symbol_server_enabled { false }
+ terraform_module_duplicates_allowed { false }
+ terraform_module_duplicate_exception_regex { 'foo' }
+
trait :group do
namespace { association(:group) }
end
diff --git a/spec/factories/namespaces/descendants.rb b/spec/factories/namespaces/descendants.rb
new file mode 100644
index 00000000000..6325481294a
--- /dev/null
+++ b/spec/factories/namespaces/descendants.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :namespace_descendants, class: 'Namespaces::Descendants' do
+ namespace { association(:group) }
+ self_and_descendant_group_ids { namespace.self_and_descendant_ids.pluck(:id).sort }
+ all_project_ids { namespace.all_projects.pluck(:id).sort }
+ traversal_ids { namespace.traversal_ids }
+ outdated_at { nil }
+ calculated_at { Time.current }
+ end
+end
diff --git a/spec/factories/organizations/organization_users.rb b/spec/factories/organizations/organization_users.rb
index 761f260ccb3..d73d2386356 100644
--- a/spec/factories/organizations/organization_users.rb
+++ b/spec/factories/organizations/organization_users.rb
@@ -4,5 +4,9 @@ FactoryBot.define do
factory :organization_user, class: 'Organizations::OrganizationUser' do
user
organization
+
+ trait :owner do
+ access_level { Gitlab::Access::OWNER }
+ end
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index a2848bd0256..83107e6cc4a 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -2,6 +2,10 @@
require_relative '../support/helpers/test_env'
+# TODO: Remove the debug_with_puts statements below! Used for debugging purposes.
+# TODO: https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/323#note_1688925316
+require_relative '../support/helpers/debug_with_puts'
+
FactoryBot.define do
# Project without repository
#
@@ -66,6 +70,8 @@ FactoryBot.define do
end
after(:build) do |project, evaluator|
+ DebugWithPuts.debug_with_puts "Beginning of after :build of projects factory in spec/factories/projects.rb"
+
# Builds and MRs can't have higher visibility level than repository access level.
builds_access_level = [evaluator.builds_access_level, evaluator.repository_access_level].min
merge_requests_access_level = [evaluator.merge_requests_access_level, evaluator.repository_access_level].min
@@ -87,6 +93,8 @@ FactoryBot.define do
security_and_compliance_access_level: evaluator.security_and_compliance_access_level
}
+ DebugWithPuts.debug_with_puts "During after :build of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
project_namespace_hash = {
name: evaluator.name,
path: evaluator.path,
@@ -97,10 +105,16 @@ FactoryBot.define do
project_namespace_hash[:id] = evaluator.project_namespace_id.presence
+ DebugWithPuts.debug_with_puts "During after :build of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
project.build_project_namespace(project_namespace_hash)
project.build_project_feature(project_feature_hash)
+ DebugWithPuts.debug_with_puts "During after :build of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
project.set_runners_token(evaluator.runners_token) if evaluator.runners_token.present?
+
+ DebugWithPuts.debug_with_puts "End of after :build of projects factory in spec/factories/projects.rb"
end
to_create do |project|
@@ -108,6 +122,7 @@ FactoryBot.define do
end
after(:create) do |project, evaluator|
+ DebugWithPuts.debug_with_puts "Beginning of after :create of projects factory in spec/factories/projects.rb"
# Normally the class Projects::CreateService is used for creating
# projects, and this class takes care of making sure the owner and current
# user have access to the project. Our specs don't use said service class,
@@ -116,12 +131,16 @@ FactoryBot.define do
project.add_owner(project.first_owner)
end
+ DebugWithPuts.debug_with_puts "During after :create of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
if project.group
project.run_after_commit_or_now do
AuthorizedProjectUpdate::ProjectRecalculateService.new(project).execute
end
end
+ DebugWithPuts.debug_with_puts "During after :create of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
# assign the delegated `#ci_cd_settings` attributes after create
project.group_runners_enabled = evaluator.group_runners_enabled unless evaluator.group_runners_enabled.nil?
project.merge_pipelines_enabled = evaluator.merge_pipelines_enabled unless evaluator.merge_pipelines_enabled.nil?
@@ -133,6 +152,8 @@ FactoryBot.define do
project.runner_token_expiration_interval = evaluator.runner_token_expiration_interval unless evaluator.runner_token_expiration_interval.nil?
project.runner_token_expiration_interval_human_readable = evaluator.runner_token_expiration_interval_human_readable unless evaluator.runner_token_expiration_interval_human_readable.nil?
+ DebugWithPuts.debug_with_puts "During after :create of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
if evaluator.import_status
import_state = project.import_state || project.build_import_state
import_state.status = evaluator.import_status
@@ -142,8 +163,12 @@ FactoryBot.define do
import_state.save!
end
+ DebugWithPuts.debug_with_puts "During after :create of projects factory in spec/factories/projects.rb:#{__LINE__}"
+
# simulating ::Projects::ProcessSyncEventsWorker because most tests don't run Sidekiq inline
project.create_ci_project_mirror!(namespace_id: project.namespace_id) unless project.ci_project_mirror
+
+ DebugWithPuts.debug_with_puts "End of after :create of projects factory in spec/factories/projects.rb"
end
trait :public do
@@ -326,6 +351,7 @@ FactoryBot.define do
end
after :create do |project, evaluator|
+ DebugWithPuts.debug_with_puts "Beginning of after :create of trait :repository do in spec/factories/projects.rb"
# Specify `lfs: true` to create the LfsObject for the LFS file in the test repo:
# https://gitlab.com/gitlab-org/gitlab-test/-/blob/master/files/lfs/lfs_object.iso
if evaluator.lfs
@@ -351,6 +377,8 @@ FactoryBot.define do
end
end
+ DebugWithPuts.debug_with_puts "During after :create of trait :repository do in spec/factories/projects.rb:#{__LINE__}"
+
if evaluator.create_templates
templates_path = "#{evaluator.create_templates}_templates"
@@ -380,6 +408,8 @@ FactoryBot.define do
branch_name: 'master')
end
+ DebugWithPuts.debug_with_puts "During after :create of trait :repository do in spec/factories/projects.rb:#{__LINE__}"
+
if evaluator.create_branch
project.repository.create_file(
project.creator,
@@ -389,6 +419,8 @@ FactoryBot.define do
branch_name: evaluator.create_branch)
end
+ DebugWithPuts.debug_with_puts "During after :create of trait :repository do in spec/factories/projects.rb:#{__LINE__}"
+
if evaluator.create_tag
project.repository.add_tag(
project.creator,
@@ -397,6 +429,7 @@ FactoryBot.define do
end
project.track_project_repository
+ DebugWithPuts.debug_with_puts "End of after :create of trait :repository do in spec/factories/projects.rb"
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 15c140954d5..7ade859dcf2 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -13,7 +13,18 @@ FactoryBot.define do
color_scheme_id { 1 }
after(:build) do |user, evaluator|
- user.assign_personal_namespace if Feature.enabled?(:create_personal_ns_outside_model, Feature.current_request)
+ # UserWithNamespaceShim is not defined in gdk reset-data. We assume the shim is enabled in this case.
+ assign_ns = if defined?(UserWithNamespaceShim)
+ UserWithNamespaceShim.enabled?
+ else
+ true
+ end
+
+ user.assign_personal_namespace if assign_ns
+ end
+
+ trait :with_namespace do
+ namespace { assign_personal_namespace }
end
trait :admin do
diff --git a/spec/factories/users/in_product_marketing_email.rb b/spec/factories/users/in_product_marketing_email.rb
deleted file mode 100644
index c86c469ff31..00000000000
--- a/spec/factories/users/in_product_marketing_email.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :in_product_marketing_email, class: 'Users::InProductMarketingEmail' do
- user
-
- track { 'create' }
- series { 0 }
- end
-end
diff --git a/spec/factories/work_items/dates_sources.rb b/spec/factories/work_items/dates_sources.rb
new file mode 100644
index 00000000000..ea82eceb402
--- /dev/null
+++ b/spec/factories/work_items/dates_sources.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :work_items_dates_source, class: 'WorkItems::DatesSource' do
+ work_item
+ end
+end
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 5e98d2ffcf3..9c425f83508 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe "Admin::AbuseReports", :js, feature_category: :insider_threat do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit admin_abuse_reports_path
end
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 71c904b3a19..ec63e43d183 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -10,189 +10,180 @@ RSpec.describe 'Admin Appearance', feature_category: :shared do
stub_feature_flags(edit_user_profile_vue: false)
end
- flag_values = [true, false]
- flag_values.each do |val|
- context "with #{val}" do
- before do
- stub_feature_flags(restyle_login_page: val)
- end
+ it 'create new appearance' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
+
+ fill_in 'appearance_title', with: 'MyCompany'
+ fill_in 'appearance_description', with: 'dev server'
+ fill_in 'appearance_pwa_name', with: 'GitLab PWA'
+ fill_in 'appearance_pwa_short_name', with: 'GitLab'
+ fill_in 'appearance_pwa_description', with: 'GitLab as PWA'
+ fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
+ fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines'
+ click_button 'Update appearance settings'
+
+ expect(page).to have_current_path admin_application_settings_appearances_path, ignore_query: true
+ expect(page).to have_content 'Appearance'
+
+ expect(page).to have_field('appearance_title', with: 'MyCompany')
+ expect(page).to have_field('appearance_description', with: 'dev server')
+ expect(page).to have_field('appearance_pwa_name', with: 'GitLab PWA')
+ expect(page).to have_field('appearance_pwa_short_name', with: 'GitLab')
+ expect(page).to have_field('appearance_pwa_description', with: 'GitLab as PWA')
+ expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
+ expect(page).to have_field('appearance_profile_image_guidelines', with: 'Custom profile image guidelines')
+ expect(page).to have_content 'Last edit'
+ end
- it 'create new appearance' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ it 'preview sign-in page appearance' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
- fill_in 'appearance_title', with: 'MyCompany'
- fill_in 'appearance_description', with: 'dev server'
- fill_in 'appearance_pwa_name', with: 'GitLab PWA'
- fill_in 'appearance_pwa_short_name', with: 'GitLab'
- fill_in 'appearance_pwa_description', with: 'GitLab as PWA'
- fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
- fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines'
- click_button 'Update appearance settings'
-
- expect(page).to have_current_path admin_application_settings_appearances_path, ignore_query: true
- expect(page).to have_content 'Appearance'
-
- expect(page).to have_field('appearance_title', with: 'MyCompany')
- expect(page).to have_field('appearance_description', with: 'dev server')
- expect(page).to have_field('appearance_pwa_name', with: 'GitLab PWA')
- expect(page).to have_field('appearance_pwa_short_name', with: 'GitLab')
- expect(page).to have_field('appearance_pwa_description', with: 'GitLab as PWA')
- expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
- expect(page).to have_field('appearance_profile_image_guidelines', with: 'Custom profile image guidelines')
- expect(page).to have_content 'Last edit'
- end
+ visit admin_application_settings_appearances_path
+ click_link "Sign-in page"
- it 'preview sign-in page appearance' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ expect(find('#login')).to be_disabled
+ expect(find('#password')).to be_disabled
+ expect(find('button')).to be_disabled
- visit admin_application_settings_appearances_path
- click_link "Sign-in page"
+ expect_custom_sign_in_appearance(appearance)
+ end
- expect(find('#login')).to be_disabled
- expect(find('#password')).to be_disabled
- expect(find('button')).to be_disabled
+ it 'preview new project page appearance', :js do
+ sign_in(admin)
+ enable_admin_mode!(admin)
- expect_custom_sign_in_appearance(appearance)
- end
+ visit admin_application_settings_appearances_path
+ click_link "New project page"
+
+ expect_custom_new_project_appearance(appearance)
+ end
- it 'preview new project page appearance', :js do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ context 'Custom system header and footer' do
+ before do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ end
+ context 'when system header and footer messages are empty' do
+ it 'shows custom system header and footer fields' do
visit admin_application_settings_appearances_path
- click_link "New project page"
- expect_custom_new_project_appearance(appearance)
+ expect(page).to have_field('appearance_header_message', with: '')
+ expect(page).to have_field('appearance_footer_message', with: '')
+ expect(page).to have_field('appearance_message_background_color')
+ expect(page).to have_field('appearance_message_font_color')
end
+ end
- context 'Custom system header and footer' do
- before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- end
-
- context 'when system header and footer messages are empty' do
- it 'shows custom system header and footer fields' do
- visit admin_application_settings_appearances_path
-
- expect(page).to have_field('appearance_header_message', with: '')
- expect(page).to have_field('appearance_footer_message', with: '')
- expect(page).to have_field('appearance_message_background_color')
- expect(page).to have_field('appearance_message_font_color')
- end
- end
-
- context 'when system header and footer messages are not empty' do
- before do
- appearance.update!(header_message: 'Foo', footer_message: 'Bar')
- end
-
- it 'shows custom system header and footer fields' do
- visit admin_application_settings_appearances_path
-
- expect(page).to have_field('appearance_header_message', with: appearance.header_message)
- expect(page).to have_field('appearance_footer_message', with: appearance.footer_message)
- expect(page).to have_field('appearance_message_background_color')
- expect(page).to have_field('appearance_message_font_color')
- end
- end
+ context 'when system header and footer messages are not empty' do
+ before do
+ appearance.update!(header_message: 'Foo', footer_message: 'Bar')
end
- it 'custom sign-in page' do
- visit new_user_session_path
+ it 'shows custom system header and footer fields' do
+ visit admin_application_settings_appearances_path
- expect_custom_sign_in_appearance(appearance)
+ expect(page).to have_field('appearance_header_message', with: appearance.header_message)
+ expect(page).to have_field('appearance_footer_message', with: appearance.footer_message)
+ expect(page).to have_field('appearance_message_background_color')
+ expect(page).to have_field('appearance_message_font_color')
end
+ end
+ end
- it 'custom new project page', :js do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit new_project_path
- click_link 'Create blank project'
+ it 'custom sign-in page' do
+ visit new_user_session_path
- expect_custom_new_project_appearance(appearance)
- end
+ expect_custom_sign_in_appearance(appearance)
+ end
- context 'Profile page with custom profile image guidelines' do
- before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
- fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines, please :smile:!'
- click_button 'Update appearance settings'
- end
-
- it 'renders guidelines when set' do
- sign_in create(:user)
- visit profile_path
-
- expect(page).to have_content 'Custom profile image guidelines, please 😄!'
- end
- end
+ it 'custom new project page', :js do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit new_project_path
+ click_link 'Create blank project'
- it 'appearance logo' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ expect_custom_new_project_appearance(appearance)
+ end
- attach_file(:appearance_logo, logo_fixture)
- click_button 'Update appearance settings'
- expect(page).to have_css(logo_selector)
+ context 'Profile page with custom profile image guidelines' do
+ before do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
+ fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines, please :smile:!'
+ click_button 'Update appearance settings'
+ end
- click_link 'Remove logo'
- expect(page).not_to have_css(logo_selector)
- end
+ it 'renders guidelines when set' do
+ sign_in create(:user)
+ visit profile_path
- it 'appearance pwa icon' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ expect(page).to have_content 'Custom profile image guidelines, please 😄!'
+ end
+ end
- attach_file(:appearance_pwa_icon, logo_fixture)
- click_button 'Update appearance settings'
- expect(page).to have_css(pwa_icon_selector)
+ it 'appearance logo' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
- click_link 'Remove icon'
- expect(page).not_to have_css(pwa_icon_selector)
- end
+ attach_file(:appearance_logo, logo_fixture)
+ click_button 'Update appearance settings'
+ expect(page).to have_css(logo_selector)
- it 'header logos' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ click_link 'Remove logo'
+ expect(page).not_to have_css(logo_selector)
+ end
- attach_file(:appearance_header_logo, logo_fixture)
- click_button 'Update appearance settings'
- expect(page).to have_css(header_logo_selector)
+ it 'appearance pwa icon' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
- click_link 'Remove header logo'
- expect(page).not_to have_css(header_logo_selector)
- end
+ attach_file(:appearance_pwa_icon, logo_fixture)
+ click_button 'Update appearance settings'
+ expect(page).to have_css(pwa_icon_selector)
- it 'Favicon' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ click_link 'Remove icon'
+ expect(page).not_to have_css(pwa_icon_selector)
+ end
- attach_file(:appearance_favicon, logo_fixture)
- click_button 'Update appearance settings'
+ it 'header logos' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
- expect(page).to have_css('.appearance-light-logo-preview')
+ attach_file(:appearance_header_logo, logo_fixture)
+ click_button 'Update appearance settings'
+ expect(page).to have_css(header_logo_selector)
- click_link 'Remove favicon'
+ click_link 'Remove header logo'
+ expect(page).not_to have_css(header_logo_selector)
+ end
- expect(page).not_to have_css('.appearance-light-logo-preview')
+ it 'Favicon' do
+ sign_in(admin)
+ enable_admin_mode!(admin)
+ visit admin_application_settings_appearances_path
- # allowed file types
- attach_file(:appearance_favicon, Rails.root.join('spec', 'fixtures', 'sanitized.svg'))
- click_button 'Update appearance settings'
+ attach_file(:appearance_favicon, logo_fixture)
+ click_button 'Update appearance settings'
- expect(page).to have_content 'Favicon You are not allowed to upload "svg" files, allowed types: png, ico'
- end
- end
+ expect(page).to have_css('.appearance-light-logo-preview')
+
+ click_link 'Remove favicon'
+
+ expect(page).not_to have_css('.appearance-light-logo-preview')
+
+ # allowed file types
+ attach_file(:appearance_favicon, Rails.root.join('spec', 'fixtures', 'sanitized.svg'))
+ click_button 'Update appearance settings'
+
+ expect(page).to have_content 'Favicon You are not allowed to upload "svg" files, allowed types: png, ico'
end
def expect_custom_sign_in_appearance(appearance)
diff --git a/spec/features/admin/admin_browse_spam_logs_spec.rb b/spec/features/admin/admin_browse_spam_logs_spec.rb
index f781e2adf07..1f89232759c 100644
--- a/spec/features/admin/admin_browse_spam_logs_spec.rb
+++ b/spec/features/admin/admin_browse_spam_logs_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Admin browse spam logs', feature_category: :shared do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'browse spam logs' do
diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb
index f9510ef296a..d6d021aeafc 100644
--- a/spec/features/admin/admin_deploy_keys_spec.rb
+++ b/spec/features/admin/admin_deploy_keys_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'admin deploy keys', :js, feature_category: :system_access do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'show all public deploy keys' do
diff --git a/spec/features/admin/admin_dev_ops_reports_spec.rb b/spec/features/admin/admin_dev_ops_reports_spec.rb
index 99d43e6b0da..e820ae866a1 100644
--- a/spec/features/admin/admin_dev_ops_reports_spec.rb
+++ b/spec/features/admin/admin_dev_ops_reports_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'DevOps Report page', :js, feature_category: :devops_reports do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
context 'without licensed feature devops adoption' do
diff --git a/spec/features/admin/admin_disables_git_access_protocol_spec.rb b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
index 76620b93557..039968025a9 100644
--- a/spec/features/admin/admin_disables_git_access_protocol_spec.rb
+++ b/spec/features/admin/admin_disables_git_access_protocol_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Admin disables Git access protocol', :js, feature_category: :sou
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
context 'with HTTP disabled' do
diff --git a/spec/features/admin/admin_disables_two_factor_spec.rb b/spec/features/admin/admin_disables_two_factor_spec.rb
index eed20d449cd..cb3c8973872 100644
--- a/spec/features/admin/admin_disables_two_factor_spec.rb
+++ b/spec/features/admin/admin_disables_two_factor_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Admin disables 2FA for a user', feature_category: :system_access
it 'successfully', :js do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
user = create(:user, :two_factor)
edit_user(user)
@@ -27,7 +27,7 @@ RSpec.describe 'Admin disables 2FA for a user', feature_category: :system_access
it 'for a user without 2FA enabled' do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
user = create(:user)
edit_user(user)
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index f071da1835a..283caddab6a 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Admin Groups', feature_category: :groups_and_projects do
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
stub_application_setting(default_group_visibility: internal)
end
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index 66014e676d5..fa91159c5f5 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe "Admin Health Check", :js, feature_category: :error_budgets do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe '#show' do
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb
index 0a537e65b99..146c83dbeee 100644
--- a/spec/features/admin/admin_hook_logs_spec.rb
+++ b/spec/features/admin/admin_hook_logs_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Admin::HookLogs', feature_category: :webhooks do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'show list of hook logs' do
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index 2aec5baf351..9bdb7ce000f 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Admin::Hooks', feature_category: :webhooks do
before do
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
describe 'GET /admin/hooks' do
diff --git a/spec/features/admin/admin_jobs_spec.rb b/spec/features/admin/admin_jobs_spec.rb
index b3e21d02354..fc775c180ff 100644
--- a/spec/features/admin/admin_jobs_spec.rb
+++ b/spec/features/admin/admin_jobs_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Admin Jobs', :js, feature_category: :continuous_integration do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe 'GET /admin/jobs' do
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index 47dc8577037..2fe68363062 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'admin issues labels', feature_category: :team_planning do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe 'list' do
diff --git a/spec/features/admin/admin_manage_applications_spec.rb b/spec/features/admin/admin_manage_applications_spec.rb
index b4c77e802a8..3e351af6121 100644
--- a/spec/features/admin/admin_manage_applications_spec.rb
+++ b/spec/features/admin/admin_manage_applications_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'admin manage applications', feature_category: :system_access do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
include_examples 'manage applications'
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index f2262464386..5298407deb3 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -13,256 +13,248 @@ RSpec.describe 'Admin Mode Login', feature_category: :system_access do
click_button 'Verify code'
end
- flag_values = [true, false]
- flag_values.each do |val|
- context "with #{val}" do
- before do
- stub_feature_flags(restyle_login_page: val)
- end
- context 'with valid username/password' do
- let(:user) { create(:admin, :two_factor) }
+ context 'with valid username/password' do
+ let(:user) { create(:admin, :two_factor) }
- context 'using one-time code' do
- it 'blocks login if we reuse the same code immediately' do
- gitlab_sign_in(user, remember: true)
+ context 'using one-time code' do
+ it 'blocks login if we reuse the same code immediately' do
+ gitlab_sign_in(user, remember: true)
- expect(page).to have_content(_('Enter verification code'))
+ expect(page).to have_content(_('Enter verification code'))
- repeated_otp = user.current_otp
- enter_code(repeated_otp)
- gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
+ repeated_otp = user.current_otp
+ enter_code(repeated_otp)
+ enable_admin_mode!(user, use_ui: true)
- expect(page).to have_content(_('Enter verification code'))
+ expect(page).to have_content(_('Enter verification code'))
- enter_code(repeated_otp)
+ enter_code(repeated_otp)
- expect(page).to have_current_path admin_session_path, ignore_query: true
- expect(page).to have_content('Invalid two-factor code')
- end
+ expect(page).to have_current_path admin_session_path, ignore_query: true
+ expect(page).to have_content('Invalid two-factor code')
+ end
- context 'not re-using codes' do
- before do
- gitlab_sign_in(user, remember: true)
+ context 'not re-using codes' do
+ before do
+ gitlab_sign_in(user, remember: true)
- expect(page).to have_content('Enter verification code')
+ expect(page).to have_content('Enter verification code')
- enter_code(user.current_otp)
- gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
+ enter_code(user.current_otp)
+ enable_admin_mode!(user, use_ui: true)
- expect(page).to have_content(_('Enter verification code'))
- end
+ expect(page).to have_content(_('Enter verification code'))
+ end
- it 'allows login with valid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code(user.current_otp)
+ it 'allows login with valid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
- it 'blocks login with invalid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code('foo')
+ it 'blocks login with invalid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code('foo')
- expect(page).to have_content('Invalid two-factor code')
- end
- end
+ expect(page).to have_content('Invalid two-factor code')
+ end
+ end
- it 'allows login with invalid code, then valid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code('foo')
+ it 'allows login with invalid code, then valid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code('foo')
- expect(page).to have_content('Invalid two-factor code')
+ expect(page).to have_content('Invalid two-factor code')
- enter_code(user.current_otp)
+ enter_code(user.current_otp)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
- context 'using backup code' do
- let(:codes) { user.generate_otp_backup_codes! }
+ context 'using backup code' do
+ let(:codes) { user.generate_otp_backup_codes! }
- before do
- expect(codes.size).to eq 10
+ before do
+ expect(codes.size).to eq 10
- # Ensure the generated codes get saved
- user.save!
- end
+ # Ensure the generated codes get saved
+ user.save!
+ end
- context 'with valid code' do
- it 'allows login' do
- enter_code(codes.sample)
+ context 'with valid code' do
+ it 'allows login' do
+ enter_code(codes.sample)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
- it 'invalidates the used code' do
- expect { enter_code(codes.sample) }
- .to change { user.reload.otp_backup_codes.size }.by(-1)
- end
- end
+ it 'invalidates the used code' do
+ expect { enter_code(codes.sample) }
+ .to change { user.reload.otp_backup_codes.size }.by(-1)
+ end
+ end
- context 'with invalid code' do
- it 'blocks login' do
- code = codes.sample
- expect(user.invalidate_otp_backup_code!(code)).to eq true
+ context 'with invalid code' do
+ it 'blocks login' do
+ code = codes.sample
+ expect(user.invalidate_otp_backup_code!(code)).to eq true
- user.save!
- expect(user.reload.otp_backup_codes.size).to eq 9
+ user.save!
+ expect(user.reload.otp_backup_codes.size).to eq 9
- enter_code(code)
+ enter_code(code)
- expect(page).to have_content('Invalid two-factor code.')
- end
- end
+ expect(page).to have_content('Invalid two-factor code.')
end
end
end
+ end
+ end
- context 'when logging in via omniauth' do
- let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml', password_automatically_set: false) }
- let(:mock_saml_response) do
- File.read('spec/fixtures/authentication/saml_response.xml')
- end
+ context 'when logging in via omniauth' do
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml', password_automatically_set: false) }
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ end
- before do
- stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
- end
+ before do
+ stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
+ end
- context 'when authn_context is worth two factors' do
- let(:mock_saml_response) do
- File.read('spec/fixtures/authentication/saml_response.xml')
- .gsub(
- 'urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
- 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS'
- )
- end
+ context 'when authn_context is worth two factors' do
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ .gsub(
+ 'urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
+ 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS'
+ )
+ end
- it 'signs user in without prompting for second factor' do
- sign_in_using_saml!
+ it 'signs user in without prompting for second factor' do
+ sign_in_using_saml!
- expect(page).not_to have_content(_('Enter verification code'))
+ expect(page).not_to have_content(_('Enter verification code'))
- enable_admin_mode_using_saml!
+ enable_admin_mode_using_saml!
- expect(page).not_to have_content(_('Enter verification code'))
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
+ expect(page).not_to have_content(_('Enter verification code'))
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
- context 'when two factor authentication is required' do
- it 'shows 2FA prompt after omniauth login' do
- sign_in_using_saml!
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after omniauth login' do
+ sign_in_using_saml!
- expect(page).to have_content(_('Enter verification code'))
- enter_code(user.current_otp)
+ expect(page).to have_content(_('Enter verification code'))
+ enter_code(user.current_otp)
- enable_admin_mode_using_saml!
+ enable_admin_mode_using_saml!
- expect(page).to have_content(_('Enter verification code'))
+ expect(page).to have_content(_('Enter verification code'))
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code(user.current_otp)
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
end
+ end
+ end
- def sign_in_using_saml!
- gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
- end
+ def sign_in_using_saml!
+ gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
- def enable_admin_mode_using_saml!
- gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
- end
- end
+ def enable_admin_mode_using_saml!
+ gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+ end
- context 'when logging in via ldap' do
- let(:uid) { 'my-uid' }
- let(:provider_label) { 'Main LDAP' }
- let(:provider_name) { 'main' }
- let(:provider) { "ldap#{provider_name}" }
- let(:ldap_server_config) do
- {
- 'label' => provider_label,
- 'provider_name' => provider,
- 'attributes' => {},
- 'encryption' => 'plain',
- 'uid' => 'uid',
- 'base' => 'dc=example,dc=com'
- }
- end
+ context 'when logging in via ldap' do
+ let(:uid) { 'my-uid' }
+ let(:provider_label) { 'Main LDAP' }
+ let(:provider_name) { 'main' }
+ let(:provider) { "ldap#{provider_name}" }
+ let(:ldap_server_config) do
+ {
+ 'label' => provider_label,
+ 'provider_name' => provider,
+ 'attributes' => {},
+ 'encryption' => 'plain',
+ 'uid' => 'uid',
+ 'base' => 'dc=example,dc=com'
+ }
+ end
- let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: uid, provider: provider) }
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: uid, provider: provider) }
- before do
- setup_ldap(provider, user, uid, ldap_server_config)
- end
+ before do
+ setup_ldap(provider, user, uid, ldap_server_config)
+ end
- context 'when two factor authentication is required' do
- it 'shows 2FA prompt after ldap login' do
- sign_in_using_ldap!(user, provider_label)
- expect(page).to have_content(_('Enter verification code'))
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after ldap login' do
+ sign_in_using_ldap!(user, provider_label)
+ expect(page).to have_content(_('Enter verification code'))
- enter_code(user.current_otp)
- enable_admin_mode_using_ldap!(user)
+ enter_code(user.current_otp)
+ enable_admin_mode_using_ldap!(user)
- expect(page).to have_content(_('Enter verification code'))
+ expect(page).to have_content(_('Enter verification code'))
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code(user.current_otp)
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
end
+ end
+ end
- def setup_ldap(provider, user, uid, ldap_server_config)
- stub_ldap_setting(enabled: true)
+ def setup_ldap(provider, user, uid, ldap_server_config)
+ stub_ldap_setting(enabled: true)
- allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
- allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
+ allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
+ allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
- Ldap::OmniauthCallbacksController.define_providers!
- Rails.application.reload_routes!
+ Ldap::OmniauthCallbacksController.define_providers!
+ Rails.application.reload_routes!
- mock_auth_hash(provider, uid, user.email)
- allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).with(user).and_return(true)
+ mock_auth_hash(provider, uid, user.email)
+ allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).with(user).and_return(true)
- allow_any_instance_of(ActionDispatch::Routing::RoutesProxy)
- .to receive(:"user_#{provider}_omniauth_callback_path")
- .and_return("/users/auth/#{provider}/callback")
- end
+ allow_any_instance_of(ActionDispatch::Routing::RoutesProxy)
+ .to receive(:"user_#{provider}_omniauth_callback_path")
+ .and_return("/users/auth/#{provider}/callback")
+ end
- def sign_in_using_ldap!(user, provider_label)
- visit new_user_session_path
- click_link provider_label
- fill_in 'username', with: user.username
- fill_in 'password', with: user.password
- click_button 'Sign in'
- end
+ def sign_in_using_ldap!(user, provider_label)
+ visit new_user_session_path
+ click_link provider_label
+ fill_in 'username', with: user.username
+ fill_in 'password', with: user.password
+ click_button 'Sign in'
+ end
- def enable_admin_mode_using_ldap!(user)
- visit new_admin_session_path
- click_link provider_label
- fill_in 'username', with: user.username
- fill_in 'password', with: user.password
- click_button 'Enter admin mode'
- end
- end
+ def enable_admin_mode_using_ldap!(user)
+ visit new_admin_session_path
+ click_link provider_label
+ fill_in 'username', with: user.username
+ fill_in 'password', with: user.password
+ click_button 'Enter admin mode'
end
end
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index 584151726a6..aa0569b9f0d 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do
# TODO: This used to use gitlab_sign_in, instead of sign_in, but that is buggy. See
# this issue to look into why: https://gitlab.com/gitlab-org/gitlab/-/issues/331851
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: false)
+ enable_admin_mode!(user, use_ui: true)
visit admin_root_path
end
@@ -21,7 +21,7 @@ RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do
expect(page).to have_current_path root_path, ignore_query: true
- click_button 'Search or go to…'
+ find_by_testid('user-menu-toggle').click
expect(page).to have_link(href: new_admin_session_path)
end
@@ -42,7 +42,7 @@ RSpec.describe 'Admin Mode Logout', :js, feature_category: :system_access do
expect(page).to have_current_path root_path, ignore_query: true
- click_button 'Search or go to…'
+ find_by_testid('user-menu-toggle').click
expect(page).to have_link(href: new_admin_session_path)
end
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
index 124c43eef9d..f0cea425bb7 100644
--- a/spec/features/admin/admin_mode/workers_spec.rb
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Admin mode for workers', :request_store, feature_category: :syst
context 'when admin mode enabled', :delete do
before do
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
it 'can delete user', :js do
@@ -67,6 +67,6 @@ RSpec.describe 'Admin mode for workers', :request_store, feature_category: :syst
Sidekiq::Worker.drain_all
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 2a655cdb1f4..b58953989d2 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -20,18 +20,18 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
context 'when not in admin mode' do
it 'has no leave admin mode button' do
visit new_admin_session_path
- open_search_modal
+ open_user_menu
expect(page).not_to have_link(href: destroy_admin_session_path)
end
it 'can open pages not in admin scope' do
visit new_admin_session_path
- open_search_modal
+ open_user_menu
- click_link('View all my projects')
+ click_link('Edit profile')
- expect(page).to have_current_path(dashboard_projects_path)
+ expect(page).to have_current_path(profile_path)
end
it 'is necessary to provide credentials again before opening pages in admin scope' do
@@ -41,11 +41,14 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
end
it 'can enter admin mode' do
- visit new_admin_session_path
+ visit root_dashboard_path
+ open_user_menu
+ click_link 'Enter Admin Mode'
fill_in 'user_password', with: admin.password
click_button 'Enter admin mode'
+ click_link 'Admin Area'
expect(page).to have_current_path(admin_root_path)
end
@@ -69,38 +72,30 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
context 'when in admin_mode' do
before do
- gitlab_enable_admin_mode_sign_in(admin, use_mock_admin_mode: false)
+ enable_admin_mode!(admin, use_ui: true)
end
it 'contains link to leave admin mode' do
- open_search_modal
-
- expect(page).to have_link(href: destroy_admin_session_path)
- end
-
- it 'can leave admin mode using main dashboard link' do
- gitlab_disable_admin_mode
-
- open_search_modal
-
- expect(page).to have_link(href: new_admin_session_path)
+ open_user_menu
+ click_link('Leave Admin Mode', href: destroy_admin_session_path)
+ expect(page).to have_text 'Admin mode disabled'
end
it 'can open pages not in admin scope' do
- open_search_modal
+ open_user_menu
- click_link('View all my projects')
+ click_link('Edit profile')
- expect(page).to have_current_path(dashboard_projects_path)
+ expect(page).to have_current_path(profile_path)
end
- context 'nav bar' do
- it 'shows admin dashboard links on bigger screen' do
+ context 'sidebar' do
+ it 'shows admin dashboard link' do
visit root_dashboard_path
- open_search_modal
- expect(page).to have_link(text: 'Admin', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave admin mode', href: destroy_admin_session_path, visible: true)
+ within '#super-sidebar' do
+ expect(page).to have_link('Admin Area')
+ end
end
end
@@ -112,7 +107,7 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
it 'can leave admin mode' do
gitlab_disable_admin_mode
- open_search_modal
+ open_user_menu
expect(page).to have_link(href: new_admin_session_path)
end
@@ -128,14 +123,14 @@ RSpec.describe 'Admin mode', :js, feature_category: :shared do
it 'shows no admin mode buttons in navbar' do
visit admin_root_path
- open_search_modal
+ open_user_menu
expect(page).not_to have_link(href: new_admin_session_path)
expect(page).not_to have_link(href: destroy_admin_session_path)
end
end
- def open_search_modal
- click_button 'Search or go to…'
+ def open_user_menu
+ find_by_testid('user-menu-toggle').click
end
end
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index b793299e253..978b2176431 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe "Admin::Projects", feature_category: :groups_and_projects do
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
end
describe 'when membership is set to expire', :js do
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 653458710e3..2b4ceb05f00 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe "Admin Runners", feature_category: :fleet_visibility do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe "Admin Runners page", :js do
diff --git a/spec/features/admin/admin_search_settings_spec.rb b/spec/features/admin/admin_search_settings_spec.rb
index 3254bf75738..c20a50cf218 100644
--- a/spec/features/admin/admin_search_settings_spec.rb
+++ b/spec/features/admin/admin_search_settings_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Admin searches application settings', :js, feature_category: :gl
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
context 'in appearances page' do
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index ae307b8038c..43b920c657c 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe "Admin > Admin sees background migrations", feature_category: :da
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'can navigate to background migrations', :js do
diff --git a/spec/features/admin/admin_sees_project_statistics_spec.rb b/spec/features/admin/admin_sees_project_statistics_spec.rb
index d977735daf8..07f90d247cd 100644
--- a/spec/features/admin/admin_sees_project_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_project_statistics_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe "Admin > Admin sees project statistics", feature_category: :group
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
visit admin_project_path(project)
end
diff --git a/spec/features/admin/admin_sees_projects_statistics_spec.rb b/spec/features/admin/admin_sees_projects_statistics_spec.rb
index 3363a67ea90..cc6ccbab0a0 100644
--- a/spec/features/admin/admin_sees_projects_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_projects_statistics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe "Admin > Admin sees projects statistics", feature_category: :grou
create(:project, :repository) { |project| project.statistics.destroy! }
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
visit admin_projects_path
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 77707a67d58..d1fdbfc5329 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Admin updates settings', feature_category: :shared do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin, use_mock_admin_mode: false)
+ enable_admin_mode!(admin, use_ui: true)
end
context 'General page' do
diff --git a/spec/features/admin/admin_system_info_spec.rb b/spec/features/admin/admin_system_info_spec.rb
index 71a0b829932..f285fefe6db 100644
--- a/spec/features/admin/admin_system_info_spec.rb
+++ b/spec/features/admin/admin_system_info_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Admin System Info', feature_category: :shared do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe 'GET /admin/system_info' do
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index 543dc2cc2a6..7cfe0cdbc81 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js, feature_category: :s
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe "token creation" do
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index 9ab5b1fd3bb..f09c1a59b0d 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe "Admin::Users", feature_category: :user_management do
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
end
describe 'Tabs' do
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index 05232de35e5..a628344bfeb 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Admin uses repository checks', :request_store, feature_category:
context 'when admin mode is enabled' do
before do
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'to trigger a single check', :js do
diff --git a/spec/features/admin/broadcast_messages_spec.rb b/spec/features/admin/broadcast_messages_spec.rb
index e4a2e31ee1c..16651ffa07e 100644
--- a/spec/features/admin/broadcast_messages_spec.rb
+++ b/spec/features/admin/broadcast_messages_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Admin Broadcast Messages', :js, feature_category: :onboarding do
it 'previews, creates and edits a broadcast message' do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
# create
visit admin_broadcast_messages_path
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index 06f9c531e74..9e8f5138815 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'admin visits dashboard' do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
context 'counting forks', :js, feature_category: :source_code_management do
diff --git a/spec/features/admin/users/admin_impersonates_user_spec.rb b/spec/features/admin/users/admin_impersonates_user_spec.rb
new file mode 100644
index 00000000000..e37b4bf1562
--- /dev/null
+++ b/spec/features/admin/users/admin_impersonates_user_spec.rb
@@ -0,0 +1,208 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin impersonates user', feature_category: :user_management do
+ let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ let_it_be(:current_user) { create(:admin) }
+
+ before do
+ sign_in(current_user)
+ enable_admin_mode!(current_user, use_ui: true)
+ end
+
+ describe 'GET /admin/users/:id' do
+ describe 'Impersonation' do
+ let_it_be(:another_user) { create(:user) }
+
+ context 'before impersonating' do
+ subject { visit admin_user_path(user_to_visit) }
+
+ let_it_be(:user_to_visit) { another_user }
+
+ shared_examples "user that cannot be impersonated" do
+ it 'disables impersonate button' do
+ subject
+
+ impersonate_btn = find_by_testid('impersonate-user-link')
+
+ expect(impersonate_btn).not_to be_nil
+ expect(impersonate_btn['disabled']).not_to be_nil
+ end
+
+ it "shows tooltip with correct error message" do
+ subject
+
+ expect(find("span[title='#{impersonation_error_msg}']")).not_to be_nil
+ end
+ end
+
+ context 'for other users' do
+ it 'shows impersonate button for other users' do
+ subject
+
+ expect(page).to have_content('Impersonate')
+ impersonate_btn = find_by_testid('impersonate-user-link')
+ expect(impersonate_btn['disabled']).to be_nil
+ end
+ end
+
+ context 'for admin itself' do
+ let(:user_to_visit) { current_user }
+
+ it 'does not show impersonate button for admin itself' do
+ subject
+
+ expect(page).to have_no_content('Impersonate')
+ end
+ end
+
+ context 'for blocked user' do
+ let_it_be(:blocked_user) { create(:user, :blocked) }
+ let(:user_to_visit) { blocked_user }
+ let(:impersonation_error_msg) { _('You cannot impersonate a blocked user') }
+
+ it_behaves_like "user that cannot be impersonated"
+ end
+
+ context 'for user with expired password' do
+ let_it_be(:user_to_visit) do
+ another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
+ another_user
+ end
+
+ let(:impersonation_error_msg) { _("You cannot impersonate a user with an expired password") }
+
+ it_behaves_like "user that cannot be impersonated"
+ end
+
+ context 'for internal user' do
+ let_it_be(:internal_user) { create(:user, :bot) }
+ let(:user_to_visit) { internal_user }
+ let(:impersonation_error_msg) { _("You cannot impersonate an internal user") }
+
+ it_behaves_like "user that cannot be impersonated"
+ end
+
+ context 'for locked user' do
+ let_it_be(:locked_user) { create(:user, :locked) }
+ let(:user_to_visit) { locked_user }
+ let(:impersonation_error_msg) { _("You cannot impersonate a user who cannot log in") }
+
+ it_behaves_like "user that cannot be impersonated"
+ end
+
+ context 'when already impersonating another user' do
+ let_it_be(:admin_user) { create(:user, :admin) }
+ let(:impersonation_error_msg) { _("You are already impersonating another user") }
+
+ subject do
+ visit admin_user_path(admin_user)
+ click_link 'Impersonate'
+ visit admin_user_path(another_user)
+ end
+
+ it_behaves_like "user that cannot be impersonated"
+ end
+
+ context 'when impersonation is disabled' do
+ before do
+ stub_config_setting(impersonation_enabled: false)
+ end
+
+ it 'does not show impersonate button' do
+ subject
+
+ expect(page).to have_no_content('Impersonate')
+ end
+ end
+ end
+
+ context 'when impersonating' do
+ subject { click_link 'Impersonate' }
+
+ before do
+ visit admin_user_path(another_user)
+ end
+
+ it 'logs in as the user when impersonate is clicked', :js do
+ subject
+
+ expect(page).to have_button("#{another_user.name} user’s menu")
+ end
+
+ it 'sees impersonation log out icon', :js do
+ subject
+
+ icon = first('[data-testid="incognito-icon"]')
+ expect(icon).not_to be nil
+ end
+
+ context 'when viewing the confirm email warning', :js do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'soft')
+ end
+
+ let_it_be(:another_user) { create(:user, :unconfirmed) }
+ let(:warning_alert) { page.find(:css, '[data-testid="alert-warning"]') }
+
+ context 'with an email that does not contain HTML' do
+ before do
+ subject
+ end
+
+ it 'displays the warning alert including the email' do
+ expect(warning_alert.text).to include("Please check your email (#{another_user.email}) to verify")
+ end
+ end
+
+ context 'with an email that contains HTML' do
+ let(:malicious_email) { "malicious@test.com<form><input/title='<script>alert(document.domain)</script>'>" }
+ let(:another_user) { create(:user, confirmed_at: nil, unconfirmed_email: malicious_email) }
+
+ before do
+ subject
+ end
+
+ it 'displays the impersonation alert, excludes email, and disables links' do
+ expect(warning_alert.text).to include("check your email (#{another_user.unconfirmed_email}) to verify")
+ end
+ end
+ end
+ end
+
+ context 'ending impersonation', :js do
+ subject { click_on 'Stop impersonating' }
+
+ before do
+ visit admin_user_path(another_user)
+ click_link 'Impersonate'
+ end
+
+ it 'logs out of impersonated user back to original user' do
+ subject
+
+ expect(page).to have_button("#{current_user.name} user’s menu")
+ end
+
+ it 'is redirected back to the impersonated users page in the admin after stopping' do
+ subject
+
+ expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
+ end
+
+ context 'a user with an expired password' do
+ before do
+ another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
+ end
+
+ it 'is redirected back to the impersonated users page in the admin after stopping' do
+ subject
+
+ expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/users/admin_manages_user_identities_spec.rb b/spec/features/admin/users/admin_manages_user_identities_spec.rb
new file mode 100644
index 00000000000..cbb7eed9e1c
--- /dev/null
+++ b/spec/features/admin/users/admin_manages_user_identities_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin manages user identities', feature_category: :user_management do
+ let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ let_it_be(:current_user) { create(:admin) }
+
+ before do
+ sign_in(current_user)
+ enable_admin_mode!(current_user, use_ui: true)
+ end
+
+ describe 'GET /admin/users/:id' do
+ describe 'show user identities' do
+ it 'shows user identities', :aggregate_failures do
+ visit admin_user_identities_path(user)
+
+ expect(page).to(
+ have_content(user.name)
+ .and(have_content('twitter'))
+ )
+ end
+ end
+
+ describe 'update user identities' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated])
+ end
+
+ it 'modifies twitter identity', :aggregate_failures do
+ visit admin_user_identities_path(user)
+
+ find('.table').find(:link, 'Edit').click
+ fill_in 'identity_extern_uid', with: '654321'
+ select 'twitter_updated', from: 'identity_provider'
+ click_button 'Save changes'
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('twitter_updated')
+ expect(page).to have_content('654321')
+ end
+ end
+
+ describe 'remove user with identities' do
+ it 'removes user with twitter identity', :aggregate_failures do
+ visit admin_user_identities_path(user)
+
+ click_link 'Delete'
+
+ expect(page).to have_content(user.name)
+ expect(page).not_to have_content('twitter')
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb b/spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb
new file mode 100644
index 00000000000..7b45e5b5cde
--- /dev/null
+++ b/spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin sees unconfirmed user', feature_category: :user_management do
+ include Spec::Support::Helpers::ModalHelpers
+
+ let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ let_it_be(:current_user) { create(:admin) }
+
+ before do
+ sign_in(current_user)
+ enable_admin_mode!(current_user, use_ui: true)
+ end
+
+ context 'when user has an unconfirmed email', :js do
+ # Email address contains HTML to ensure email address is displayed in an HTML safe way.
+ let_it_be(:unconfirmed_email) { "#{generate(:email)}<h2>testing<img/src=http://localhost:8000/test.png>" }
+ let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, unconfirmed_email: unconfirmed_email) }
+
+ where(:path_helper) do
+ [
+ [-> (user) { admin_user_path(user) }],
+ [-> (user) { projects_admin_user_path(user) }],
+ [-> (user) { keys_admin_user_path(user) }],
+ [-> (user) { admin_user_identities_path(user) }],
+ [-> (user) { admin_user_impersonation_tokens_path(user) }]
+ ]
+ end
+
+ with_them do
+ it "allows an admin to force confirmation of the user's email", :aggregate_failures do
+ visit path_helper.call(unconfirmed_user)
+
+ click_button 'Confirm user'
+
+ within_modal do
+ expect(page).to have_content("Confirm user #{unconfirmed_user.name}?")
+ expect(page).to(
+ have_content(
+ "This user has an unconfirmed email address (#{unconfirmed_email}). You may force a confirmation.")
+ )
+
+ click_button 'Confirm user'
+ end
+
+ expect(page).to have_content('Successfully confirmed')
+ expect(page).not_to have_button('Confirm user')
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/users/admin_sees_user_spec.rb b/spec/features/admin/users/admin_sees_user_spec.rb
new file mode 100644
index 00000000000..d8c142d402f
--- /dev/null
+++ b/spec/features/admin/users/admin_sees_user_spec.rb
@@ -0,0 +1,248 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin::Users::User', feature_category: :user_management do
+ include Features::AdminUsersHelpers
+ include Spec::Support::Helpers::ModalHelpers
+
+ let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ let_it_be(:current_user) { create(:admin) }
+
+ before do
+ sign_in(current_user)
+ enable_admin_mode!(current_user, use_ui: true)
+ end
+
+ describe 'GET /admin/users/:id' do
+ it 'has user info', :js, :aggregate_failures do
+ visit admin_user_path(user)
+
+ expect(page).to have_content(user.email)
+ expect(page).to have_content(user.name)
+ expect(page).to have_content("ID: #{user.id}")
+ expect(page).to have_content("Namespace ID: #{user.namespace_id}")
+
+ click_user_dropdown_toggle(user.id)
+
+ expect(page).to have_button('Block')
+ expect(page).to have_button('Deactivate')
+ expect(page).to have_button('Delete user')
+ expect(page).to have_button('Delete user and contributions')
+ end
+
+ context 'when blocking/unblocking the user' do
+ it 'shows confirmation and allows blocking and unblocking', :js do
+ visit admin_user_path(user)
+
+ click_action_in_user_dropdown(user.id, 'Block')
+
+ expect(page).to have_content('Block user')
+ expect(page).to have_content('You can always unblock their account, their data will remain intact.')
+
+ find('.modal-footer button', text: 'Block').click
+
+ wait_for_requests
+
+ expect(page).to have_content('Successfully blocked')
+
+ click_action_in_user_dropdown(user.id, 'Unblock')
+
+ expect(page).to have_content('Unblock user')
+ expect(page).to have_content('You can always block their account again if needed.')
+
+ find('.modal-footer button', text: 'Unblock').click
+
+ expect(page).to have_content('Successfully unblocked')
+
+ click_user_dropdown_toggle(user.id)
+ expect(page).to have_content('Block')
+ end
+ end
+
+ context 'when deactivating/re-activating the user' do
+ it 'shows confirmation and allows deactivating/re-activating', :js do
+ visit admin_user_path(user)
+
+ click_action_in_user_dropdown(user.id, 'Deactivate')
+
+ expect(page).to have_content('Deactivate user')
+ expect(page).to have_content('You can always re-activate their account, their data will remain intact.')
+
+ find('.modal-footer button', text: 'Deactivate').click
+
+ wait_for_requests
+
+ expect(page).to have_content('Successfully deactivated')
+
+ click_action_in_user_dropdown(user.id, 'Activate')
+
+ expect(page).to have_content('Activate user')
+ expect(page).to have_content('You can always deactivate their account again if needed.')
+
+ find('.modal-footer button', text: 'Activate').click
+
+ wait_for_requests
+
+ expect(page).to have_content('Successfully activated')
+
+ click_user_dropdown_toggle(user.id)
+ expect(page).to have_content('Deactivate')
+ end
+ end
+
+ context 'when user is the sole owner of a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user_sole_owner_of_group) { create(:user) }
+
+ before do
+ group.add_owner(user_sole_owner_of_group)
+ end
+
+ it 'shows `Delete user and contributions` action but not `Delete user` action', :js do
+ visit admin_user_path(user_sole_owner_of_group)
+
+ click_user_dropdown_toggle(user_sole_owner_of_group.id)
+
+ expect(page).to have_button('Delete user and contributions')
+ expect(page).not_to have_button('Delete user', exact: true)
+ end
+
+ it 'allows user to be deleted by using the `Delete user and contributions` action', :js do
+ visit admin_user_path(user_sole_owner_of_group)
+
+ click_action_in_user_dropdown(user_sole_owner_of_group.id, 'Delete user and contributions')
+
+ within_modal do
+ fill_in('username', with: user_sole_owner_of_group.name)
+ click_button('Delete user and contributions')
+ end
+
+ wait_for_requests
+
+ expect(page).to have_content('The user is being deleted.')
+ end
+ end
+
+ context 'when a user is locked', time_travel_to: '2020-02-02 10:30:45 -0700' do
+ let_it_be(:locked_user) { create(:user, locked_at: DateTime.parse('2020-02-02 10:30:00 -0700')) }
+
+ before do
+ visit admin_user_path(locked_user)
+ end
+
+ it "displays `(Locked)` next to user's name" do
+ expect(page).to have_content("#{locked_user.name} (Locked)")
+ end
+
+ it 'allows a user to be unlocked from the `User administration dropdown', :js do
+ accept_gl_confirm("Unlock user #{locked_user.name}?", button_text: 'Unlock') do
+ click_action_in_user_dropdown(locked_user.id, 'Unlock')
+ end
+
+ expect(page).not_to have_content("#{locked_user.name} (Locked)")
+ end
+ end
+
+ describe 'Two-factor Authentication status' do
+ it 'shows when enabled' do
+ user.update!(otp_required_for_login: true)
+
+ visit admin_user_path(user)
+
+ expect_two_factor_status('Enabled')
+ end
+
+ it 'shows when disabled' do
+ user.update!(otp_required_for_login: false)
+
+ visit admin_user_path(user)
+
+ expect_two_factor_status('Disabled')
+ end
+
+ def expect_two_factor_status(status)
+ page.within('.two-factor-status') do
+ expect(page).to have_content(status)
+ end
+ end
+ end
+
+ describe 'Email verification status' do
+ let_it_be(:secondary_email) do
+ create :email, email: 'secondary@example.com', user: user
+ end
+
+ it 'displays the correct status for an unverified email address', :aggregate_failures do
+ user.update!(confirmed_at: nil, unconfirmed_email: user.email)
+ visit admin_user_path(user)
+
+ expect(page).to have_content("#{user.email} Unverified")
+ expect(page).to have_content("#{secondary_email.email} Unverified")
+ end
+
+ it 'displays the correct status for a verified email address' do
+ visit admin_user_path(user)
+ expect(page).to have_content("#{user.email} Verified")
+
+ secondary_email.confirm
+ expect(secondary_email.confirmed?).to be_truthy
+
+ visit admin_user_path(user)
+ expect(page).to have_content("#{secondary_email.email} Verified")
+ end
+ end
+
+ describe 'remove users secondary email', :js do
+ let_it_be(:secondary_email) do
+ create :email, email: 'secondary@example.com', user: user
+ end
+
+ it do
+ visit admin_user_path(user.username)
+
+ expect(page).to have_content("Secondary email: #{secondary_email.email}")
+
+ accept_gl_confirm { find("#remove_email_#{secondary_email.id}").click }
+
+ expect(page).not_to have_content(secondary_email.email)
+ end
+ end
+
+ describe 'show user keys', :js do
+ it do
+ key1 = create(:key, user: user, title: 'ssh-rsa Key1')
+ key2 = create(:key, user: user, title: 'ssh-rsa Key2')
+
+ visit admin_user_path(user)
+
+ click_link 'SSH keys'
+
+ expect(page).to have_content(key1.title)
+ expect(page).to have_content(key2.title)
+
+ click_link key2.title
+
+ expect(page).to have_content(key2.title)
+ expect(page).to have_content(key2.key)
+
+ click_button 'Delete'
+
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
+
+ expect(page).not_to have_content(key2.title)
+ end
+ end
+
+ describe 'show user attributes' do
+ it 'has expected attributes', :aggregate_failures do
+ visit admin_user_path(user)
+
+ expect(page).to have_content 'Account'
+ expect(page).to have_content 'Personal projects limit'
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
deleted file mode 100644
index d0110b3e013..00000000000
--- a/spec/features/admin/users/user_spec.rb
+++ /dev/null
@@ -1,537 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Admin::Users::User', feature_category: :user_management do
- include Features::AdminUsersHelpers
- include Spec::Support::Helpers::ModalHelpers
-
- let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:current_user) { create(:admin) }
-
- before do
- sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user, use_mock_admin_mode: false)
- end
-
- describe 'GET /admin/users/:id' do
- it 'has user info', :js, :aggregate_failures do
- visit admin_user_path(user)
-
- expect(page).to have_content(user.email)
- expect(page).to have_content(user.name)
- expect(page).to have_content("ID: #{user.id}")
- expect(page).to have_content("Namespace ID: #{user.namespace_id}")
-
- click_user_dropdown_toggle(user.id)
-
- expect(page).to have_button('Block')
- expect(page).to have_button('Deactivate')
- expect(page).to have_button('Delete user')
- expect(page).to have_button('Delete user and contributions')
- end
-
- context 'when blocking/unblocking the user' do
- it 'shows confirmation and allows blocking and unblocking', :js do
- visit admin_user_path(user)
-
- click_action_in_user_dropdown(user.id, 'Block')
-
- expect(page).to have_content('Block user')
- expect(page).to have_content('You can always unblock their account, their data will remain intact.')
-
- find('.modal-footer button', text: 'Block').click
-
- wait_for_requests
-
- expect(page).to have_content('Successfully blocked')
-
- click_action_in_user_dropdown(user.id, 'Unblock')
-
- expect(page).to have_content('Unblock user')
- expect(page).to have_content('You can always block their account again if needed.')
-
- find('.modal-footer button', text: 'Unblock').click
-
- expect(page).to have_content('Successfully unblocked')
-
- click_user_dropdown_toggle(user.id)
- expect(page).to have_content('Block')
- end
- end
-
- context 'when deactivating/re-activating the user' do
- it 'shows confirmation and allows deactivating/re-activating', :js do
- visit admin_user_path(user)
-
- click_action_in_user_dropdown(user.id, 'Deactivate')
-
- expect(page).to have_content('Deactivate user')
- expect(page).to have_content('You can always re-activate their account, their data will remain intact.')
-
- find('.modal-footer button', text: 'Deactivate').click
-
- wait_for_requests
-
- expect(page).to have_content('Successfully deactivated')
-
- click_action_in_user_dropdown(user.id, 'Activate')
-
- expect(page).to have_content('Activate user')
- expect(page).to have_content('You can always deactivate their account again if needed.')
-
- find('.modal-footer button', text: 'Activate').click
-
- wait_for_requests
-
- expect(page).to have_content('Successfully activated')
-
- click_user_dropdown_toggle(user.id)
- expect(page).to have_content('Deactivate')
- end
- end
-
- context 'when user is the sole owner of a group' do
- let_it_be(:group) { create(:group) }
- let_it_be(:user_sole_owner_of_group) { create(:user) }
-
- before do
- group.add_owner(user_sole_owner_of_group)
- end
-
- it 'shows `Delete user and contributions` action but not `Delete user` action', :js do
- visit admin_user_path(user_sole_owner_of_group)
-
- click_user_dropdown_toggle(user_sole_owner_of_group.id)
-
- expect(page).to have_button('Delete user and contributions')
- expect(page).not_to have_button('Delete user', exact: true)
- end
-
- it 'allows user to be deleted by using the `Delete user and contributions` action', :js do
- visit admin_user_path(user_sole_owner_of_group)
-
- click_action_in_user_dropdown(user_sole_owner_of_group.id, 'Delete user and contributions')
-
- within_modal do
- fill_in('username', with: user_sole_owner_of_group.name)
- click_button('Delete user and contributions')
- end
-
- wait_for_requests
-
- expect(page).to have_content('The user is being deleted.')
- end
- end
-
- context 'when a user is locked', time_travel_to: '2020-02-02 10:30:45 -0700' do
- let_it_be(:locked_user) { create(:user, locked_at: DateTime.parse('2020-02-02 10:30:00 -0700')) }
-
- before do
- visit admin_user_path(locked_user)
- end
-
- it "displays `(Locked)` next to user's name" do
- expect(page).to have_content("#{locked_user.name} (Locked)")
- end
-
- it 'allows a user to be unlocked from the `User administration dropdown', :js do
- accept_gl_confirm("Unlock user #{locked_user.name}?", button_text: 'Unlock') do
- click_action_in_user_dropdown(locked_user.id, 'Unlock')
- end
-
- expect(page).not_to have_content("#{locked_user.name} (Locked)")
- end
- end
-
- describe 'Impersonation' do
- let_it_be(:another_user) { create(:user) }
-
- context 'before impersonating' do
- subject { visit admin_user_path(user_to_visit) }
-
- let_it_be(:user_to_visit) { another_user }
-
- shared_examples "user that cannot be impersonated" do
- it 'disables impersonate button' do
- subject
-
- impersonate_btn = find_by_testid('impersonate-user-link')
-
- expect(impersonate_btn).not_to be_nil
- expect(impersonate_btn['disabled']).not_to be_nil
- end
-
- it "shows tooltip with correct error message" do
- subject
-
- expect(find("span[title='#{impersonation_error_msg}']")).not_to be_nil
- end
- end
-
- context 'for other users' do
- it 'shows impersonate button for other users' do
- subject
-
- expect(page).to have_content('Impersonate')
- impersonate_btn = find_by_testid('impersonate-user-link')
- expect(impersonate_btn['disabled']).to be_nil
- end
- end
-
- context 'for admin itself' do
- let(:user_to_visit) { current_user }
-
- it 'does not show impersonate button for admin itself' do
- subject
-
- expect(page).not_to have_content('Impersonate')
- end
- end
-
- context 'for blocked user' do
- let_it_be(:blocked_user) { create(:user, :blocked) }
- let(:user_to_visit) { blocked_user }
- let(:impersonation_error_msg) { _('You cannot impersonate a blocked user') }
-
- it_behaves_like "user that cannot be impersonated"
- end
-
- context 'for user with expired password' do
- let(:user_to_visit) do
- another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
- another_user
- end
-
- let(:impersonation_error_msg) { _("You cannot impersonate a user with an expired password") }
-
- it_behaves_like "user that cannot be impersonated"
- end
-
- context 'for internal user' do
- let_it_be(:internal_user) { create(:user, :bot) }
- let(:user_to_visit) { internal_user }
- let(:impersonation_error_msg) { _("You cannot impersonate an internal user") }
-
- it_behaves_like "user that cannot be impersonated"
- end
-
- context 'for locked user' do
- let_it_be(:locked_user) { create(:user, :locked) }
- let(:user_to_visit) { locked_user }
- let(:impersonation_error_msg) { _("You cannot impersonate a user who cannot log in") }
-
- it_behaves_like "user that cannot be impersonated"
- end
-
- context 'when already impersonating another user' do
- let_it_be(:admin_user) { create(:user, :admin) }
- let(:impersonation_error_msg) { _("You are already impersonating another user") }
-
- subject do
- visit admin_user_path(admin_user)
- click_link 'Impersonate'
- visit admin_user_path(another_user)
- end
-
- it_behaves_like "user that cannot be impersonated"
- end
-
- context 'when impersonation is disabled' do
- before do
- stub_config_setting(impersonation_enabled: false)
- end
-
- it 'does not show impersonate button' do
- subject
-
- expect(page).not_to have_content('Impersonate')
- end
- end
- end
-
- context 'when impersonating' do
- subject { click_link 'Impersonate' }
-
- before do
- visit admin_user_path(another_user)
- end
-
- it 'logs in as the user when impersonate is clicked', :js do
- subject
-
- expect(page).to have_button("#{another_user.name} user’s menu")
- end
-
- it 'sees impersonation log out icon', :js do
- subject
-
- icon = first('[data-testid="incognito-icon"]')
- expect(icon).not_to be nil
- end
-
- context 'when viewing the confirm email warning', :js do
- before do
- stub_application_setting_enum('email_confirmation_setting', 'soft')
- end
-
- let_it_be(:another_user) { create(:user, :unconfirmed) }
- let(:warning_alert) { page.find(:css, '[data-testid="alert-warning"]') }
- let(:expected_styling) { { 'pointer-events' => 'none', 'cursor' => 'default' } }
-
- context 'with an email that does not contain HTML' do
- before do
- subject
- end
-
- it 'displays the warning alert including the email' do
- expect(warning_alert.text).to include("Please check your email (#{another_user.email}) to verify")
- end
- end
-
- context 'with an email that contains HTML' do
- let(:malicious_email) { "malicious@test.com<form><input/title='<script>alert(document.domain)</script>'>" }
- let(:another_user) { create(:user, confirmed_at: nil, unconfirmed_email: malicious_email) }
-
- before do
- subject
- end
-
- it 'displays the impersonation alert, excludes email, and disables links' do
- expect(warning_alert.text).to include("check your email (#{another_user.unconfirmed_email}) to verify")
- end
- end
- end
- end
-
- context 'ending impersonation', :js do
- subject { click_on 'Stop impersonating' }
-
- before do
- visit admin_user_path(another_user)
- click_link 'Impersonate'
- end
-
- it 'logs out of impersonated user back to original user' do
- subject
-
- expect(page).to have_button("#{current_user.name} user’s menu")
- end
-
- it 'is redirected back to the impersonated users page in the admin after stopping' do
- subject
-
- expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
- end
-
- context 'a user with an expired password' do
- before do
- another_user.update!(password_expires_at: Time.zone.now - 5.minutes)
- end
-
- it 'is redirected back to the impersonated users page in the admin after stopping' do
- subject
-
- expect(page).to have_current_path("/admin/users/#{another_user.username}", ignore_query: true)
- end
- end
- end
- end
-
- describe 'Two-factor Authentication status' do
- it 'shows when enabled' do
- user.update!(otp_required_for_login: true)
-
- visit admin_user_path(user)
-
- expect_two_factor_status('Enabled')
- end
-
- it 'shows when disabled' do
- user.update!(otp_required_for_login: false)
-
- visit admin_user_path(user)
-
- expect_two_factor_status('Disabled')
- end
-
- def expect_two_factor_status(status)
- page.within('.two-factor-status') do
- expect(page).to have_content(status)
- end
- end
- end
-
- describe 'Email verification status' do
- let_it_be(:secondary_email) do
- create :email, email: 'secondary@example.com', user: user
- end
-
- it 'displays the correct status for an unverified email address', :aggregate_failures do
- user.update!(confirmed_at: nil, unconfirmed_email: user.email)
- visit admin_user_path(user)
-
- expect(page).to have_content("#{user.email} Unverified")
- expect(page).to have_content("#{secondary_email.email} Unverified")
- end
-
- it 'displays the correct status for a verified email address' do
- visit admin_user_path(user)
- expect(page).to have_content("#{user.email} Verified")
-
- secondary_email.confirm
- expect(secondary_email.confirmed?).to be_truthy
-
- visit admin_user_path(user)
- expect(page).to have_content("#{secondary_email.email} Verified")
- end
- end
-
- describe 'show user identities' do
- it 'shows user identities', :aggregate_failures do
- visit admin_user_identities_path(user)
-
- expect(page).to have_content(user.name)
- expect(page).to have_content('twitter')
- end
- end
-
- describe 'update user identities' do
- before do
- allow(Gitlab::Auth::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated])
- end
-
- it 'modifies twitter identity', :aggregate_failures do
- visit admin_user_identities_path(user)
-
- find('.table').find(:link, 'Edit').click
- fill_in 'identity_extern_uid', with: '654321'
- select 'twitter_updated', from: 'identity_provider'
- click_button 'Save changes'
-
- expect(page).to have_content(user.name)
- expect(page).to have_content('twitter_updated')
- expect(page).to have_content('654321')
- end
- end
-
- describe 'remove users secondary email', :js do
- let_it_be(:secondary_email) do
- create :email, email: 'secondary@example.com', user: user
- end
-
- it do
- visit admin_user_path(user.username)
-
- expect(page).to have_content("Secondary email: #{secondary_email.email}")
-
- accept_gl_confirm { find("#remove_email_#{secondary_email.id}").click }
-
- expect(page).not_to have_content(secondary_email.email)
- end
- end
-
- describe 'remove user with identities' do
- it 'removes user with twitter identity', :aggregate_failures do
- visit admin_user_identities_path(user)
-
- click_link 'Delete'
-
- expect(page).to have_content(user.name)
- expect(page).not_to have_content('twitter')
- end
- end
-
- describe 'show user keys', :js do
- it do
- key1 = create(:key, user: user, title: 'ssh-rsa Key1')
- key2 = create(:key, user: user, title: 'ssh-rsa Key2')
-
- visit admin_user_path(user)
-
- click_link 'SSH keys'
-
- expect(page).to have_content(key1.title)
- expect(page).to have_content(key2.title)
-
- click_link key2.title
-
- expect(page).to have_content(key2.title)
- expect(page).to have_content(key2.key)
-
- click_button 'Delete'
-
- page.within('.modal') do
- page.click_button('Delete')
- end
-
- expect(page).not_to have_content(key2.title)
- end
- end
-
- describe 'show user attributes' do
- it 'has expected attributes', :aggregate_failures do
- visit admin_user_path(user)
-
- expect(page).to have_content 'Account'
- expect(page).to have_content 'Personal projects limit'
- end
- end
- end
-
- describe 'GET /admin/users', :js do
- context 'user pending approval' do
- it 'shows user info', :aggregate_failures do
- user = create(:user, :blocked_pending_approval)
-
- visit admin_users_path
- click_link 'Pending approval'
- click_link user.name
-
- expect(page).to have_content(user.name)
- expect(page).to have_content('Pending approval')
-
- click_user_dropdown_toggle(user.id)
-
- expect(page).to have_button('Approve')
- expect(page).to have_button('Reject')
- end
- end
- end
-
- context 'when user has an unconfirmed email', :js do
- # Email address contains HTML to ensure email address is displayed in an HTML safe way.
- let_it_be(:unconfirmed_email) { "#{generate(:email)}<h2>testing<img/src=http://localhost:8000/test.png>" }
- let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, unconfirmed_email: unconfirmed_email) }
-
- where(:path_helper) do
- [
- [-> (user) { admin_user_path(user) }],
- [-> (user) { projects_admin_user_path(user) }],
- [-> (user) { keys_admin_user_path(user) }],
- [-> (user) { admin_user_identities_path(user) }],
- [-> (user) { admin_user_impersonation_tokens_path(user) }]
- ]
- end
-
- with_them do
- it "allows an admin to force confirmation of the user's email", :aggregate_failures do
- visit path_helper.call(unconfirmed_user)
-
- click_button 'Confirm user'
-
- within_modal do
- expect(page).to have_content("Confirm user #{unconfirmed_user.name}?")
- expect(page).to have_content(
- "This user has an unconfirmed email address (#{unconfirmed_email}). You may force a confirmation."
- )
-
- click_button 'Confirm user'
- end
-
- expect(page).to have_content('Successfully confirmed')
- expect(page).not_to have_button('Confirm user')
- end
- end
- end
-end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 4e988674858..20cedda626b 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
before do
sign_in(current_user)
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
end
describe 'GET /admin/users', :js do
@@ -381,6 +381,24 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
expect(find_by_testid("user-group-count-#{current_user.id}").text).to eq("2")
end
end
+
+ context 'user pending approval' do
+ it 'shows user info', :aggregate_failures do
+ user = create(:user, :blocked_pending_approval)
+
+ visit admin_users_path
+ click_link 'Pending approval'
+ click_link user.name
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('Pending approval')
+
+ click_user_dropdown_toggle(user.id)
+
+ expect(page).to have_button('Approve')
+ expect(page).to have_button('Reject')
+ end
+ end
end
describe 'GET /admin/users/new' do
diff --git a/spec/features/admin_variables_spec.rb b/spec/features/admin_variables_spec.rb
index caa94209e50..0fc0ae9e199 100644
--- a/spec/features/admin_variables_spec.rb
+++ b/spec/features/admin_variables_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Instance variables', :js, feature_category: :secrets_management
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit page_path
wait_for_requests
diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb
index a6d5d4926ff..92cb688e4e3 100644
--- a/spec/features/boards/board_filters_spec.rb
+++ b/spec/features/boards/board_filters_spec.rb
@@ -194,8 +194,6 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
let_it_be(:child_project_member) { create(:user).tap { |u| project.add_maintainer(u) } }
before do
- stub_feature_flags(apollo_boards: false)
-
group.add_maintainer(user)
sign_in(user)
end
diff --git a/spec/features/boards/keyboard_shortcut_spec.rb b/spec/features/boards/keyboard_shortcut_spec.rb
index 6f03f6db3ab..d4777fa671c 100644
--- a/spec/features/boards/keyboard_shortcut_spec.rb
+++ b/spec/features/boards/keyboard_shortcut_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Issue Boards shortcut', :js, feature_category: :team_planning do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit project_path(project)
end
@@ -30,7 +30,7 @@ RSpec.describe 'Issue Boards shortcut', :js, feature_category: :team_planning do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit project_path(project)
end
diff --git a/spec/features/broadcast_messages_spec.rb b/spec/features/broadcast_messages_spec.rb
index f887242384c..867ed3c3acb 100644
--- a/spec/features/broadcast_messages_spec.rb
+++ b/spec/features/broadcast_messages_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe 'Broadcast Messages', feature_category: :onboarding do
stub_const('Gitlab::Cache::JsonCaches::JsonKeyed::STRATEGY_KEY_COMPONENTS', original_strategy_value)
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit admin_broadcast_messages_path
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 31dec5e38da..78ac752a375 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe 'Clusterable > Show page', feature_category: :deployment_manageme
let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
before do
- gitlab_enable_admin_mode_sign_in(current_user)
+ enable_admin_mode!(current_user)
end
it_behaves_like 'show page' do
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 21dfa1cbc0b..88b3635153c 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -33,11 +33,11 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do
sign_in(user)
end
- it 'shows "Are you looking for things to do?" message' do
+ it 'shows "Not sure where to go next?" message' do
create(:todo, :assigned, :done, user: user, project: project, target: issue, author: user2)
visit dashboard_todos_path
- expect(page).to have_content 'Are you looking for things to do? Take a look at open issues, contribute to a merge request, or mention someone in a comment to automatically assign them a new to-do item.'
+ expect(page).to have_content 'Not sure where to go next? Take a look at your assigned issues or merge requests.'
end
end
end
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index 7fbd6c4e235..523ef8990bb 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Expand and collapse diffs', :js, feature_category: :source_code_
allow(Gitlab::CurrentSettings).to receive(:diff_max_patch_bytes).and_return(100.kilobytes)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
wait_for_requests
diff --git a/spec/features/explore/catalog/catalog_details_spec.rb b/spec/features/explore/catalog/catalog_details_spec.rb
new file mode 100644
index 00000000000..8def565ac01
--- /dev/null
+++ b/spec/features/explore/catalog/catalog_details_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CI/CD Catalog details page', :js, feature_category: :pipeline_composition do
+ let_it_be(:namespace) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository, namespace: namespace) }
+
+ shared_examples_for 'has correct viewing permissions' do
+ context 'when the resource is published' do
+ let(:published_catalog_resource) { create(:ci_catalog_resource, :published, project: project) }
+
+ before do
+ visit explore_catalog_path(published_catalog_resource)
+ end
+
+ it 'navigates to the details page' do
+ expect(page).to have_content('Go to the project')
+ end
+ end
+
+ context 'when the resource is not published' do
+ let(:draft_catalog_resource) { create(:ci_catalog_resource, project: project, state: :draft) }
+
+ before do
+ visit explore_catalog_path(draft_catalog_resource)
+ end
+
+ it 'returns a 404' do
+ expect(page).to have_title('Not Found')
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+ end
+
+ context 'when authenticated' do
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'has correct viewing permissions'
+ end
+
+ context 'when unauthenticated' do
+ it_behaves_like 'has correct viewing permissions'
+ end
+end
diff --git a/spec/features/explore/catalog/catalog_releases_spec.rb b/spec/features/explore/catalog/catalog_releases_spec.rb
new file mode 100644
index 00000000000..27b7aa17551
--- /dev/null
+++ b/spec/features/explore/catalog/catalog_releases_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CI/CD Catalog releases', :js, feature_category: :pipeline_composition do
+ let_it_be(:tag_name) { 'catalog_release_tag' }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:namespace) { create(:group) }
+ let_it_be_with_reload(:project) do
+ create(
+ :project,
+ :catalog_resource_with_components,
+ description: 'Brand new thing',
+ namespace: namespace
+ )
+ end
+
+ let_it_be(:draft_catalog_resource) do
+ create(:ci_catalog_resource, project: project)
+ end
+
+ before_all do
+ namespace.add_owner(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when a resource is in draft' do
+ it 'does not render it in the Catalog', :aggregate_failures do
+ visit explore_catalog_index_path
+
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(0)
+ expect(page).not_to have_content(project.name)
+ end
+ end
+
+ describe 'when releasing a Catalog resource' do
+ before do
+ visit new_project_tag_path(project)
+ fill_in('tag_name', with: tag_name)
+ click_button 'Create tag'
+
+ # Click on the option to create release from the tags page
+ find('a', text: 'Create release').click
+
+ # Makes the actual release
+ click_button 'Create release'
+ wait_for_requests
+
+ visit explore_catalog_index_path
+ end
+
+ it 'appears in the CI/CD Catalog', :aggregate_failures do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(1)
+ within_testid('catalog-list-container') do
+ expect(page).to have_content(project.name)
+ expect(page).to have_content(tag_name)
+ expect(page).to have_content("Released")
+ end
+
+ visit explore_catalog_path(draft_catalog_resource)
+
+ expect(page).to have_content("Last release at")
+ expect(page).to have_content(tag_name)
+ end
+ end
+
+ describe 'when a resource has multiple releases' do
+ let_it_be(:project_with_components) do
+ create(
+ :project,
+ :catalog_resource_with_components,
+ description: 'Brand new thing',
+ namespace: namespace
+ )
+ end
+
+ let_it_be(:ci_resource) do
+ create(:ci_catalog_resource, :published, project: project_with_components)
+ end
+
+ let_it_be(:old_tag_name) { 'v0.5' }
+ let_it_be(:new_tag_name) { 'v1.0' }
+
+ let_it_be(:release_1) do
+ create(:release, :with_catalog_resource_version, project: project_with_components, tag: old_tag_name,
+ author: user)
+ end
+
+ let_it_be(:release_2) do
+ create(:release, :with_catalog_resource_version, project: project_with_components, tag: new_tag_name,
+ author: user)
+ end
+
+ it 'renders the last version on the catalog list item' do
+ visit explore_catalog_index_path
+
+ expect(page).to have_content(release_2.tag)
+ expect(page).not_to have_content(release_1.tag)
+ end
+
+ it 'renders the last version on the catalog details page' do
+ visit explore_catalog_path(ci_resource)
+
+ expect(page).to have_content(release_2.tag)
+ expect(page).not_to have_content(release_1.tag)
+ end
+ end
+end
diff --git a/spec/features/explore/catalog/catalog_settings_spec.rb b/spec/features/explore/catalog/catalog_settings_spec.rb
index bf324eafd7f..edaa8951a27 100644
--- a/spec/features/explore/catalog/catalog_settings_spec.rb
+++ b/spec/features/explore/catalog/catalog_settings_spec.rb
@@ -5,13 +5,22 @@ require 'spec_helper'
RSpec.describe 'CI/CD Catalog settings', :js, feature_category: :pipeline_composition do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:namespace) { create(:group) }
- let_it_be_with_reload(:new_project) { create(:project, :repository, namespace: namespace) }
+ let_it_be_with_reload(:project_with_ci_components) do
+ create(
+ :project,
+ :catalog_resource_with_components,
+ description: "catalog resource description",
+ namespace: namespace
+ )
+ end
context 'when user is not the owner' do
+ before_all do
+ namespace.add_maintainer(user)
+ end
+
before do
sign_in(user)
- visit edit_project_path(new_project)
- wait_for_requests
end
it 'does not show the CI/CD toggle settings' do
@@ -29,50 +38,96 @@ RSpec.describe 'CI/CD Catalog settings', :js, feature_category: :pipeline_compos
end
it 'shows the CI/CD toggle settings' do
- visit edit_project_path(new_project)
+ visit edit_project_path(project_with_ci_components)
wait_for_requests
expect(page).to have_content('CI/CD Catalog resource')
end
- describe 'when setting a project as a Catalog resource' do
+ context 'when a project is not a Catalog resource' do
before do
- visit project_path(new_project)
- wait_for_requests
+ visit project_path(project_with_ci_components)
end
- it 'adds the project to the CI/CD Catalog' do
+ it 'does not render the CI/CD resource badge' do
+ expect(page).to have_content(project_with_ci_components.name)
expect(page).not_to have_content('CI/CD catalog resource')
+ end
+ end
- visit edit_project_path(new_project)
+ describe 'when listing a project as a Catalog resource' do
+ let_it_be(:tag_name) { 'v0.1' }
+ before do
+ visit edit_project_path(project_with_ci_components)
find('[data-testid="catalog-resource-toggle"] button').click
+ wait_for_requests
+ end
- visit project_path(new_project)
+ it 'marks the project as a CI/CD Catalog' do
+ visit project_path(project_with_ci_components)
expect(page).to have_content('CI/CD catalog resource')
end
+
+ context 'and there are no releases' do
+ before do
+ visit explore_catalog_index_path
+ end
+
+ it 'does not add the resource to the catalog', :aggregate_failures do
+ expect(page).to have_content("CI/CD Catalog")
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(0)
+ end
+ end
+
+ context 'and there is a release' do
+ before do
+ create(:release, :with_catalog_resource_version, tag: tag_name, author: user,
+ project: project_with_ci_components)
+ # This call to `publish` is necessary to simulate what creating a release would really do
+ project_with_ci_components.catalog_resource.publish!
+ visit explore_catalog_index_path
+ end
+
+ it 'adds the resource to the catalog', :aggregate_failures do
+ expect(page).to have_content("CI/CD Catalog")
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(1)
+ expect(page).to have_content(tag_name)
+ end
+ end
end
describe 'when unlisting a project from the CI/CD Catalog' do
before do
- create(:ci_catalog_resource, project: new_project, state: :published)
- visit project_path(new_project)
- wait_for_requests
- end
+ create(:ci_catalog_resource, project: project_with_ci_components)
+ create(:release, :with_catalog_resource_version, tag: 'v0.1', author: user, project: project_with_ci_components)
+ project_with_ci_components.catalog_resource.publish!
- it 'removes the project to the CI/CD Catalog' do
- expect(page).to have_content('CI/CD catalog resource')
-
- visit edit_project_path(new_project)
+ visit edit_project_path(project_with_ci_components)
find('[data-testid="catalog-resource-toggle"] button').click
click_button 'Remove from the CI/CD catalog'
+ end
- visit project_path(new_project)
+ it 'removes the CI/CD Catalog tag on the project' do
+ visit project_path(project_with_ci_components)
expect(page).not_to have_content('CI/CD catalog resource')
end
+
+ it 'removes the resource from the catalog' do
+ visit explore_catalog_index_path
+
+ expect(page).not_to have_content(project_with_ci_components.name)
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(0)
+ end
+
+ it 'does not destroy existing releases' do
+ visit project_releases_path(project_with_ci_components)
+
+ expect(page).to have_content(project_with_ci_components.releases.last.name)
+ end
end
end
end
diff --git a/spec/features/explore/catalog/catalog_spec.rb b/spec/features/explore/catalog/catalog_spec.rb
index 00bbb02ebbf..1ad0e9679b8 100644
--- a/spec/features/explore/catalog/catalog_spec.rb
+++ b/spec/features/explore/catalog/catalog_spec.rb
@@ -5,52 +5,123 @@ require 'spec_helper'
RSpec.describe 'CI/CD Catalog', :js, feature_category: :pipeline_composition do
let_it_be(:namespace) { create(:group) }
let_it_be(:user) { create(:user) }
+ let_it_be(:public_projects_with_components) do
+ create_list(
+ :project,
+ 3,
+ :catalog_resource_with_components,
+ :public,
+ description: 'A simple component',
+ namespace: namespace
+ )
+ end
before_all do
- namespace.add_developer(user)
+ public_projects_with_components.map do |current_project|
+ create(:ci_catalog_resource, :published, project: current_project)
+ end
end
- before do
- sign_in(user)
- end
+ shared_examples 'basic page viewing' do
+ it 'shows CI Catalog title and description', :aggregate_failures do
+ expect(page).to have_content('CI/CD Catalog')
+ expect(page).to have_content(
+ 'Discover CI/CD components that can improve your pipeline with additional functionality'
+ )
+ end
- describe 'GET explore/catalog' do
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ it 'renders CI Catalog resources list' do
+ expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
+ end
- let_it_be(:ci_resource_projects) do
- create_list(
- :project,
- 3,
- :repository,
- description: 'A simple component',
- namespace: namespace
- )
+ it 'renders resource details', :aggregate_failures do
+ within_testid('catalog-resource-item', match: :first) do
+ expect(page).to have_content(public_projects_with_components[2].name)
+ expect(page).to have_content(public_projects_with_components[2].description)
+ expect(page).to have_content(namespace.name)
+ end
end
+ end
- let_it_be(:ci_catalog_resources) do
- ci_resource_projects.map do |current_project|
- create(:ci_catalog_resource, :published, project: current_project)
+ shared_examples 'navigates to the details page' do
+ context 'when clicking on a resource' do
+ before do
+ find_by_testid('ci-resource-link', match: :first).click
+ end
+
+ it 'navigates to the details page' do
+ expect(page).to have_content('Go to the project')
end
end
+ end
+ context 'when unauthenticated' do
before do
visit explore_catalog_index_path
- wait_for_requests
end
- it 'shows CI Catalog title and description', :aggregate_failures do
- expect(page).to have_content('CI/CD Catalog')
- expect(page).to have_content(
- 'Discover CI/CD components that can improve your pipeline with additional functionality'
+ it_behaves_like 'basic page viewing'
+ it_behaves_like 'navigates to the details page'
+ end
+
+ context 'when authenticated' do
+ before do
+ sign_in(user)
+ visit explore_catalog_index_path
+ end
+
+ it_behaves_like 'basic page viewing'
+ it_behaves_like 'navigates to the details page'
+ end
+
+ context 'for private catalog resources' do
+ let_it_be(:private_project) do
+ create(
+ :project,
+ :catalog_resource_with_components,
+ description: 'Our private project',
+ namespace: namespace
)
end
- it 'renders CI Catalog resources list' do
- expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
+ let_it_be(:catalog_resource) { create(:ci_catalog_resource, :published, project: private_project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:browsing_user) { create(:user) }
+
+ context 'when browsing as a developer + member' do
+ before_all do
+ namespace.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+ visit explore_catalog_index_path
+ end
+
+ it 'shows the catalog resource' do
+ expect(page).to have_content(private_project.name)
+ end
+ end
+
+ context 'when browsing as a non-member of the project' do
+ before do
+ sign_in(browsing_user)
+ visit explore_catalog_index_path
+ end
+
+ it 'does not show the catalog resource' do
+ expect(page).not_to have_content(private_project.name)
+ end
+ end
+ end
+
+ describe 'Search and sorting' do
+ before do
+ visit explore_catalog_index_path
end
context 'when searching for a resource' do
- let(:project_name) { ci_resource_projects[0].name }
+ let(:project_name) { public_projects_with_components[0].name }
before do
find('input[data-testid="catalog-search-bar"]').send_keys project_name
@@ -70,8 +141,12 @@ RSpec.describe 'CI/CD Catalog', :js, feature_category: :pipeline_composition do
context 'with the creation date option' do
it 'sorts resources from last to first by default' do
expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
- expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(ci_resource_projects[2].name)
- expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(ci_resource_projects[0].name)
+ expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(
+ public_projects_with_components[2].name
+ )
+ expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(
+ public_projects_with_components[0].name
+ )
end
context 'when changing the sort direction' do
@@ -82,56 +157,15 @@ RSpec.describe 'CI/CD Catalog', :js, feature_category: :pipeline_composition do
it 'sorts resources from first to last' do
expect(find_all('[data-testid="catalog-resource-item"]').length).to be(3)
- expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(ci_resource_projects[0].name)
- expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(ci_resource_projects[2].name)
+ expect(find_all('[data-testid="catalog-resource-item"]')[0]).to have_content(
+ public_projects_with_components[0].name
+ )
+ expect(find_all('[data-testid="catalog-resource-item"]')[2]).to have_content(
+ public_projects_with_components[2].name
+ )
end
end
end
end
-
- context 'for a single CI/CD catalog resource' do
- it 'renders resource details', :aggregate_failures do
- within_testid('catalog-resource-item', match: :first) do
- expect(page).to have_content(ci_resource_projects[2].name)
- expect(page).to have_content(ci_resource_projects[2].description)
- expect(page).to have_content(namespace.name)
- end
- end
-
- context 'when clicked' do
- before do
- find_by_testid('ci-resource-link', match: :first).click
- end
-
- it 'navigates to the details page' do
- expect(page).to have_content('Go to the project')
- end
- end
- end
- end
-
- describe 'GET explore/catalog/:id' do
- let_it_be(:project) { create(:project, :repository, namespace: namespace) }
-
- before do
- visit explore_catalog_path(new_ci_resource)
- end
-
- context 'when the resource is published' do
- let(:new_ci_resource) { create(:ci_catalog_resource, :published, project: project) }
-
- it 'navigates to the details page' do
- expect(page).to have_content('Go to the project')
- end
- end
-
- context 'when the resource is not published' do
- let(:new_ci_resource) { create(:ci_catalog_resource, project: project, state: :draft) }
-
- it 'returns a 404' do
- expect(page).to have_title('Not Found')
- expect(page).to have_content('Page Not Found')
- end
- end
end
end
diff --git a/spec/features/gitlab_experiments_spec.rb b/spec/features/gitlab_experiments_spec.rb
index facf4994c44..0d0afa801c8 100644
--- a/spec/features/gitlab_experiments_spec.rb
+++ b/spec/features/gitlab_experiments_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe "Gitlab::Experiment", :js, feature_category: :activation do
before do
admin = create(:admin)
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
stub_experiments(null_hypothesis: :candidate)
end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 9089fba1886..24552daa11b 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe 'User Cluster', :js, feature_category: :environment_management do
gitlab_sign_out
gitlab_sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit group_clusters_path(group)
end
diff --git a/spec/features/groups/members/list_members_spec.rb b/spec/features/groups/members/list_members_spec.rb
index b16d61a5fe4..b6e0deb2e73 100644
--- a/spec/features/groups/members/list_members_spec.rb
+++ b/spec/features/groups/members/list_members_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'Groups > Members > List members', :js, feature_category: :groups
it 'shows 2FA badge to admins' do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit group_group_members_path(group)
diff --git a/spec/features/groups/settings/user_enables_namespace_hierarchy_cache_spec.rb b/spec/features/groups/settings/user_enables_namespace_hierarchy_cache_spec.rb
new file mode 100644
index 00000000000..4b362a9561d
--- /dev/null
+++ b/spec/features/groups/settings/user_enables_namespace_hierarchy_cache_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Groups > Permission and group features > Enable caching of hierarchical objects', :js, feature_category: :value_stream_management do
+ include ListboxHelpers
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be(:user) { create(:user).tap { |u| group.add_owner(u) } }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'with the group_hierarchy_optimization feature flag enabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: true)
+ end
+
+ it 'enables the setting' do
+ visit edit_group_path(group)
+
+ page.within('#js-permissions-settings') do
+ check 'group[enable_namespace_descendants_cache]'
+
+ click_on 'Save changes'
+ end
+
+ expect(group.namespace_descendants).to be_present
+ end
+
+ it 'disables the setting' do
+ create(:namespace_descendants, namespace: group)
+
+ visit edit_group_path(group)
+
+ page.within('#js-permissions-settings') do
+ uncheck 'group[enable_namespace_descendants_cache]'
+
+ click_on 'Save changes'
+ end
+
+ expect(group.reload.namespace_descendants).not_to be_present
+ end
+ end
+
+ context 'with the group_hierarchy_optimization feature flag disabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: false)
+ end
+
+ it 'does not render the setting' do
+ visit edit_group_path(group)
+
+ expect(page).not_to have_selector('group[enable_namespace_descendants_cache]')
+ end
+ end
+end
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index c2ab5edf79c..553c2f0266e 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe 'Group show page', feature_category: :groups_and_projects do
expect(find('.group-row:nth-child(1) .namespace-title > a')).to have_content(project2.title)
expect(find('.group-row:nth-child(2) .namespace-title > a')).to have_content(project1.title)
expect(find('.group-row:nth-child(3) .namespace-title > a')).to have_content(project3.title)
+ expect(page).to have_selector('button[data-testid="base-dropdown-toggle"]', text: 'Stars')
end
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index c0aaa7f818a..aac2e3c4e46 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -205,7 +205,10 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
describe 'not showing personalization questions on group creation when it is enabled' do
before do
stub_application_setting(hide_third_party_offers: true)
- visit new_group_path(anchor: 'create-group-pane')
+
+ # If visiting directly via path, personalization setting is not being picked up correctly
+ visit new_group_path
+ click_link 'Create group'
end
it 'does not render personalization questions' do
@@ -350,10 +353,16 @@ RSpec.describe 'Group', feature_category: :groups_and_projects do
visit path
end
- it_behaves_like 'dirty submit form', [{ form: '.js-general-settings-form', input: 'input[name="group[name]"]', submit: 'button[type="submit"]' },
- { form: '.js-general-settings-form', input: '#group_visibility_level_0', submit: 'button[type="submit"]' },
- { form: '.js-general-permissions-form', input: '#group_request_access_enabled', submit: 'button[type="submit"]' },
- { form: '.js-general-permissions-form', input: 'input[name="group[two_factor_grace_period]"]', submit: 'button[type="submit"]' }]
+ it_behaves_like 'dirty submit form', [
+ { form: '.js-general-settings-form', input: 'input[name="group[name]"]', submit: 'button[type="submit"]' },
+ { form: '.js-general-settings-form', input: '#group_visibility_level_0', submit: 'button[type="submit"]' },
+ { form: '.js-general-permissions-form', input: '#group_request_access_enabled', submit: 'button[type="submit"]' },
+ {
+ form: '.js-general-permissions-form',
+ input: 'input[name="group[two_factor_grace_period]"]',
+ submit: 'button[type="submit"]'
+ }
+ ]
it 'saves new settings' do
page.within('.gs-general') do
diff --git a/spec/features/help_dropdown_spec.rb b/spec/features/help_dropdown_spec.rb
index 3e4c0bc55fe..89040ec9ab6 100644
--- a/spec/features/help_dropdown_spec.rb
+++ b/spec/features/help_dropdown_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe "Help Dropdown", :js, feature_category: :shared do
context "when severity is #{severity}" do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
allow_next_instance_of(VersionCheck) do |instance|
allow(instance).to receive(:response).and_return({ "severity" => severity })
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 1020ea341ce..fee0f8a8f32 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -16,11 +16,11 @@ RSpec.describe 'issuable list', :js, feature_category: :team_planning do
issuable_types.each do |issuable_type|
it "avoids N+1 database queries for #{issuable_type.to_s.humanize.pluralize}", quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/231426' } do
- control_count = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }.count
+ control = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }
create_issuables(issuable_type)
- expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control_count)
+ expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control)
end
it "counts upvotes, downvotes and notes count for each #{issuable_type.to_s.humanize}" do
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 728ba07e5c4..58c84d26fea 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
end
end
- describe 'assignees' do
+ shared_examples 'autocomplete user mentions' do
it 'does not wrap with quotes for assignee values' do
fill_in 'Comment', with: "@#{user.username}"
@@ -252,6 +252,16 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
end
end
+ it_behaves_like 'autocomplete user mentions'
+
+ context 'when mention_autocomplete_backend_filtering is disabled' do
+ before do
+ stub_feature_flags(mention_autocomplete_backend_filtering: false)
+ end
+
+ it_behaves_like 'autocomplete user mentions'
+ end
+
context 'if a selected value has special characters' do
it 'wraps the result in double quotes' do
fill_in 'Comment', with: "~#{label.title[0..2]}"
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index e4df106de07..1a51dd0bc01 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -73,7 +73,6 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
page.within '.dropdown-menu-user' do
expect(page).not_to have_content 'Unassigned'
- click_button user2.name
end
find('.participants').click
@@ -82,7 +81,7 @@ RSpec.describe 'Issue Sidebar', feature_category: :team_planning do
open_assignees_dropdown
page.within('.assignee') do
- expect(page.all('[data-testid="selected-participant"]').length).to eq(1)
+ expect(page.all('[data-testid="unselected-participant"]').length).to eq(1)
end
expect(find('[data-testid="user-search-input"]').value).to eq(user2.name)
diff --git a/spec/features/merge_request/user_creates_mr_spec.rb b/spec/features/merge_request/user_creates_mr_spec.rb
index 950b64bb395..5f6e465d011 100644
--- a/spec/features/merge_request/user_creates_mr_spec.rb
+++ b/spec/features/merge_request/user_creates_mr_spec.rb
@@ -74,6 +74,9 @@ RSpec.describe 'Merge request > User creates MR', feature_category: :code_review
visit project_new_merge_request_path(source_project)
+ find('.js-source-branch').click
+ select_listbox_item('master')
+
first('.js-target-project').click
select_listbox_item(target_project.full_path)
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index daa84227adc..a68bdfd7356 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -43,6 +43,9 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_
expect(page).to have_content('Source branch')
expect(page).to have_content('Target branch')
+ find('.js-source-branch').click
+ select_listbox_item('master')
+
first('.js-target-branch').click
find('.gl-listbox-search-input').native.send_keys 'v1.1.0'
select_listbox_item('v1.1.0')
diff --git a/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
index 7e33946f713..f52520313b2 100644
--- a/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
+++ b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Admin views hidden merge requests', feature_category: :insider_t
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit(project_merge_requests_path(project))
end
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index cea3172898f..bc6f88474a9 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -63,6 +63,35 @@ RSpec.describe 'Two factor auths', feature_category: :system_access do
end
end
+ context 'when two factor is enforced for administrator users' do
+ let_it_be(:admin) { create(:admin) }
+
+ before do
+ stub_application_setting(require_admin_two_factor_authentication: require_admin_two_factor_authentication)
+ sign_in(admin)
+ end
+
+ context 'when visiting root dashboard path' do
+ let(:require_admin_two_factor_authentication) { true }
+
+ it 'renders alert for administrator users' do
+ visit profile_two_factor_auth_path
+ expect(page).to have_content('Administrator users are required to enable Two-Factor Authentication for their account. You need to do this before ')
+ end
+ end
+ end
+
+ context 'when two factor is disabled for administrator users' do
+ context 'when visiting root dashboard path' do
+ let(:require_admin_two_factor_authentication) { false }
+
+ it 'does not render an alert for administrator users' do
+ visit profile_two_factor_auth_path
+ expect(page).not_to have_content('Administrator users are required to enable Two-Factor Authentication for their account. You need to do this before ')
+ end
+ end
+ end
+
context 'when two factor is enforced in global settings' do
before do
stub_application_setting(require_two_factor_authentication: true)
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index 033d69d29b9..fc0992973e8 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
end
def expect_preferences_saved_message
- page.within('.flash-container') do
+ page.within('.b-toaster') do
expect(page).to have_content('Preferences saved.')
end
end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index a29d643b15b..41b8ad7825c 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -171,12 +171,12 @@ RSpec.describe 'Branches', feature_category: :source_code_management do
new_branches_count = 20
sql_queries_count_threshold = 10
- control_count = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) }.count
+ control = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) }
(1..new_branches_count).each { |number| repository.add_branch(user, "new-branch-#{number}", 'master') }
expect { visit project_branches_filtered_path(project, state: 'all') }
- .not_to exceed_query_limit(control_count).with_threshold(sql_queries_count_threshold)
+ .not_to exceed_query_limit(control).with_threshold(sql_queries_count_threshold)
end
end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index c223053606b..e7e419a4c60 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe 'Gcp Cluster', :js, feature_category: :deployment_management do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
visit general_admin_application_settings_path
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 067963e06e0..bc4d64dad21 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe 'User Cluster', :js, feature_category: :deployment_management do
gitlab_sign_out
gitlab_sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit project_clusters_path(project)
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index de4ce3a23b6..57eaef2621e 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -319,7 +319,7 @@ RSpec.describe 'Environment', feature_category: :environment_management do
params = {
change: {
oldrev: project.commit(branch).id,
- newrev: Gitlab::Git::BLANK_SHA,
+ newrev: Gitlab::Git::SHA1_BLANK_SHA,
ref: "refs/heads/#{branch}"
}
}
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index c6a770cee9e..5f1bc4f86c1 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -153,7 +153,7 @@ RSpec.describe 'Edit Project Settings', feature_category: :groups_and_projects d
before do
non_member.update_attribute(:admin, true)
sign_in(non_member)
- gitlab_enable_admin_mode_sign_in(non_member)
+ enable_admin_mode!(non_member)
end
it 'renders 404 if feature is disabled' do
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 3423c636c2b..e6b6241c2fe 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -93,7 +93,8 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
end
end
- context 'uses ProjectMember valid_access_level_roles for the invite members modal options', :aggregate_failures do
+ context 'uses ProjectMember valid_access_level_roles for the invite members modal options', :aggregate_failures,
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/436958' do
before do
sign_in(current_user)
@@ -265,7 +266,7 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
it 'shows 2FA badge to admins' do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit_members_page
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 348a661855c..9babe3c3b71 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Project navbar', :with_license, :js, feature_category: :groups_a
stub_config(registry: { enabled: false })
stub_feature_flags(ml_experiment_tracking: false)
+ stub_feature_flags(model_registry: false)
insert_package_nav
insert_infrastructure_registry_nav
insert_infrastructure_google_cloud_nav
@@ -93,4 +94,16 @@ RSpec.describe 'Project navbar', :with_license, :js, feature_category: :groups_a
it_behaves_like 'verified navigation bar'
end
+
+ context 'when model registry is available' do
+ before do
+ stub_feature_flags(model_registry: true)
+
+ insert_model_registry_nav(_('Package Registry'))
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index a0df18ea6f3..d6b27d8c618 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
+ include ListboxHelpers
+
before do
stub_application_setting(import_sources: Gitlab::ImportSources.values)
end
@@ -311,7 +313,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
it 'does not select the user namespace' do
click_on 'Pick a group or namespace'
- expect(page).to have_button user.username
+ expect_listbox_item(user.username)
end
end
@@ -359,28 +361,28 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
it 'enables the correct visibility options' do
click_button public_group.full_path
- click_button user.username
+ select_listbox_item user.username
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
click_button user.username
- click_button public_group.full_path
+ select_listbox_item public_group.full_path
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
click_button public_group.full_path
- click_button internal_group.full_path
+ select_listbox_item internal_group.full_path
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
click_button internal_group.full_path
- click_button private_group.full_path
+ select_listbox_item private_group.full_path
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).to be_disabled
@@ -467,7 +469,7 @@ RSpec.describe 'New project', :js, feature_category: :groups_and_projects do
)
click_on 'Pick a group or namespace'
- click_on user.username
+ select_listbox_item user.username
click_on 'Create project'
expect(page).to have_css('#import-project-pane.active')
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index d6e08628721..dc4251aa92f 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Project > Settings > Packages and registries > Container registr
wait_for_requests
expect(page).to be_axe_clean.within('[data-testid="packages-and-registries-project-settings"]')
- .skipping :'link-in-text-block', :'heading-order'
+ .skipping :'heading-order'
end
it 'shows active tab on sidebar' do
diff --git a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
index 9305467cbe4..3d9addfe456 100644
--- a/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
+++ b/spec/features/projects/settings/user_interacts_with_deploy_keys_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require "spec_helper"
+require 'spec_helper'
RSpec.describe "User interacts with deploy keys", :js, feature_category: :continuous_delivery do
let(:project) { create(:project, :repository) }
@@ -10,43 +10,59 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :contin
sign_in(user)
end
- shared_examples "attaches a key" do
- it "attaches key" do
+ shared_examples 'attaches a key' do
+ it 'attaches key' do
visit(project_deploy_keys_path(project))
- page.within(".deploy-keys") do
- find(".badge", text: "1").click
+ page.within('.deploy-keys') do
+ click_link(scope)
- click_button("Enable")
+ click_button('Enable')
- expect(page).not_to have_selector(".gl-spinner")
+ expect(page).not_to have_selector('.gl-spinner')
expect(page).to have_current_path(project_settings_repository_path(project), ignore_query: true)
- find(".js-deployKeys-tab-enabled_keys").click
+ click_link('Enabled deploy keys')
expect(page).to have_content(deploy_key.title)
end
end
end
- context "viewing deploy keys" do
+ context 'viewing deploy keys' do
let(:deploy_key) { create(:deploy_key) }
- context "when project has keys" do
+ context 'when project has keys' do
before do
create(:deploy_keys_project, project: project, deploy_key: deploy_key)
end
- it "shows deploy keys" do
+ it 'shows deploy keys' do
visit(project_deploy_keys_path(project))
- page.within(".deploy-keys") do
+ page.within('.deploy-keys') do
expect(page).to have_content(deploy_key.title)
end
end
end
- context "when another project has keys" do
+ context 'when the project has many deploy keys' do
+ before do
+ create(:deploy_keys_project, project: project, deploy_key: deploy_key)
+ create_list(:deploy_keys_project, 5, project: project)
+ end
+
+ it 'shows pagination' do
+ visit(project_deploy_keys_path(project))
+
+ page.within('.deploy-keys') do
+ expect(page).to have_link('Next')
+ expect(page).to have_link('2')
+ end
+ end
+ end
+
+ context 'when another project has keys' do
let(:another_project) { create(:project) }
before do
@@ -55,26 +71,25 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :contin
another_project.add_maintainer(user)
end
- it "shows deploy keys" do
+ it 'shows deploy keys' do
visit(project_deploy_keys_path(project))
- page.within(".deploy-keys") do
- find('.js-deployKeys-tab-available_project_keys').click
+ page.within('.deploy-keys') do
+ click_link('Privately accessible deploy keys')
expect(page).to have_content(deploy_key.title)
- expect(find(".js-deployKeys-tab-available_project_keys .badge")).to have_content("1")
end
end
end
- context "when there are public deploy keys" do
+ context 'when there are public deploy keys' do
let!(:deploy_key) { create(:deploy_key, public: true) }
- it "shows public deploy keys" do
+ it 'shows public deploy keys' do
visit(project_deploy_keys_path(project))
- page.within(".deploy-keys") do
- find(".js-deployKeys-tab-public_keys").click
+ page.within('.deploy-keys') do
+ click_link('Publicly accessible deploy keys')
expect(page).to have_content(deploy_key.title)
end
@@ -82,43 +97,44 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :contin
end
end
- context "adding deploy keys" do
+ context 'adding deploy keys' do
before do
visit(project_deploy_keys_path(project))
end
- it "adds new key" do
+ it 'adds new key' do
deploy_key_title = attributes_for(:key)[:title]
deploy_key_body = attributes_for(:key)[:key]
- click_button("Add new key")
- fill_in("deploy_key_title", with: deploy_key_title)
- fill_in("deploy_key_key", with: deploy_key_body)
+ click_button('Add new key')
+ fill_in('deploy_key_title', with: deploy_key_title)
+ fill_in('deploy_key_key', with: deploy_key_body)
- click_button("Add key")
+ click_button('Add key')
expect(page).to have_current_path(project_settings_repository_path(project), ignore_query: true)
- page.within(".deploy-keys") do
+ page.within('.deploy-keys') do
expect(page).to have_content(deploy_key_title)
end
end
- it "click on cancel hides the form" do
- click_button("Add new key")
+ it 'click on cancel hides the form' do
+ click_button('Add new key')
expect(page).to have_css('.gl-new-card-add-form')
- click_button("Cancel")
+ click_button('Cancel')
expect(page).not_to have_css('.gl-new-card-add-form')
end
end
- context "attaching existing keys" do
- context "from another project" do
+ context 'attaching existing keys' do
+ context 'from another project' do
let(:another_project) { create(:project) }
let(:deploy_key) { create(:deploy_key) }
+ let(:scope) { 'Privately accessible deploy keys' }
before do
create(:deploy_keys_project, project: another_project, deploy_key: deploy_key)
@@ -126,13 +142,14 @@ RSpec.describe "User interacts with deploy keys", :js, feature_category: :contin
another_project.add_maintainer(user)
end
- it_behaves_like "attaches a key"
+ it_behaves_like 'attaches a key'
end
- context "when keys are public" do
+ context 'when keys are public' do
let!(:deploy_key) { create(:deploy_key, public: true) }
+ let(:scope) { 'Publicly accessible deploy keys' }
- it_behaves_like "attaches a key"
+ it_behaves_like 'attaches a key'
end
end
end
diff --git a/spec/features/projects/show/clone_button_spec.rb b/spec/features/projects/show/clone_button_spec.rb
index e3964a37bcf..83e75101427 100644
--- a/spec/features/projects/show/clone_button_spec.rb
+++ b/spec/features/projects/show/clone_button_spec.rb
@@ -19,8 +19,10 @@ RSpec.describe 'Projects > Show > Clone button', feature_category: :groups_and_p
expect(page).to have_content project.name
end
- it 'sees clone button' do
+ it 'sees clone button', :js do
+ find_by_testid('clone-dropdown').click
expect(page).to have_content _('Clone')
+ expect(page).to be_axe_clean.within('.clone-options-dropdown')
end
end
diff --git a/spec/features/projects/show/redirects_spec.rb b/spec/features/projects/show/redirects_spec.rb
index ef326b92b98..a0424831973 100644
--- a/spec/features/projects/show/redirects_spec.rb
+++ b/spec/features/projects/show/redirects_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Projects > Show > Redirects', feature_category: :groups_and_proj
it 'shows public project page' do
visit project_path(public_project)
- page.within '.breadcrumbs .breadcrumb-item-text' do
+ page.within '.breadcrumbs .js-breadcrumb-item-text' do
expect(page).to have_content(public_project.name)
end
end
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index 4933b3f239c..40549beae9f 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'Projects > Show > User sees Git instructions', feature_category:
let(:user_has_ssh_key) { false }
it 'shows details' do
- page.within('.breadcrumbs .breadcrumb-item-text') do
+ page.within('.breadcrumbs .js-breadcrumb-item-text') do
expect(page).to have_content(project.title)
end
diff --git a/spec/features/projects/user_creates_project_spec.rb b/spec/features/projects/user_creates_project_spec.rb
index a590d637801..96156f14cfc 100644
--- a/spec/features/projects/user_creates_project_spec.rb
+++ b/spec/features/projects/user_creates_project_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'User creates a project', :js, feature_category: :groups_and_projects do
+ include ListboxHelpers
+
let(:user) { create(:user) }
before do
@@ -110,7 +112,7 @@ RSpec.describe 'User creates a project', :js, feature_category: :groups_and_proj
fill_in :project_path, with: 'a-subgroup-project'
click_on 'Pick a group or namespace'
- click_button subgroup.full_path
+ select_listbox_item subgroup.full_path
click_button('Create project')
diff --git a/spec/features/projects/user_sorts_projects_spec.rb b/spec/features/projects/user_sorts_projects_spec.rb
index b80caca5810..3576225a417 100644
--- a/spec/features/projects/user_sorts_projects_spec.rb
+++ b/spec/features/projects/user_sorts_projects_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe 'User sorts projects and order persists', feature_category: :grou
let_it_be(:group_member) { create(:group_member, :maintainer, user: user, group: group) }
let_it_be(:project) { create(:project, :public, group: group) }
+ def find_dropdown_toggle
+ find('button[data-testid=base-dropdown-toggle]')
+ end
+
shared_examples_for "sort order persists across all views" do |project_paths_label, group_paths_label|
it "is set on the dashboard_projects_path" do
visit(dashboard_projects_path)
@@ -27,7 +31,7 @@ RSpec.describe 'User sorts projects and order persists', feature_category: :grou
visit(group_canonical_path(group))
within '[data-testid=group_sort_by_dropdown]' do
- expect(find('.gl-dropdown-toggle')).to have_content(group_paths_label)
+ expect(find_dropdown_toggle).to have_content(group_paths_label)
end
end
@@ -35,7 +39,7 @@ RSpec.describe 'User sorts projects and order persists', feature_category: :grou
visit(details_group_path(group))
within '[data-testid=group_sort_by_dropdown]' do
- expect(find('.gl-dropdown-toggle')).to have_content(group_paths_label)
+ expect(find_dropdown_toggle).to have_content(group_paths_label)
end
end
end
@@ -67,8 +71,8 @@ RSpec.describe 'User sorts projects and order persists', feature_category: :grou
sign_in(user)
visit(group_canonical_path(group))
within '[data-testid=group_sort_by_dropdown]' do
- find('button.gl-dropdown-toggle').click
- first(:button, 'Created').click
+ find_dropdown_toggle.click
+ find('li', text: 'Created').click
wait_for_requests
end
end
@@ -81,8 +85,8 @@ RSpec.describe 'User sorts projects and order persists', feature_category: :grou
sign_in(user)
visit(details_group_path(group))
within '[data-testid=group_sort_by_dropdown]' do
- find('button.gl-dropdown-toggle').click
- first(:button, 'Updated').click
+ find_dropdown_toggle.click
+ find('li', text: 'Updated').click
wait_for_requests
end
end
diff --git a/spec/features/projects/user_views_empty_project_spec.rb b/spec/features/projects/user_views_empty_project_spec.rb
index ed34b109d29..7dc3dd1da1e 100644
--- a/spec/features/projects/user_views_empty_project_spec.rb
+++ b/spec/features/projects/user_views_empty_project_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'User views an empty project', feature_category: :groups_and_proj
context 'when admin mode is enabled' do
before do
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
it_behaves_like 'allowing push to default branch'
diff --git a/spec/features/projects/work_items/linked_work_items_spec.rb b/spec/features/projects/work_items/linked_work_items_spec.rb
index 49f723c3055..f9cdd7b78ab 100644
--- a/spec/features/projects/work_items/linked_work_items_spec.rb
+++ b/spec/features/projects/work_items/linked_work_items_spec.rb
@@ -9,8 +9,16 @@ RSpec.describe 'Work item linked items', :js, feature_category: :team_planning d
let_it_be(:work_item) { create(:work_item, project: project) }
let(:work_items_path) { project_work_item_path(project, work_item.iid) }
let_it_be(:task) { create(:work_item, :task, project: project, title: 'Task 1') }
+ let_it_be(:milestone) { create(:milestone, project: project, title: '1.0') }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:objective) do
+ create(:work_item, :objective, project: project, milestone: milestone,
+ title: 'Objective 1', labels: [label])
+ end
context 'for signed in user' do
+ let(:token_input_selector) { '[data-testid="work-item-token-select-input"] .gl-token-selector-input' }
+
before_all do
project.add_developer(user)
end
@@ -62,7 +70,27 @@ RSpec.describe 'Work item linked items', :js, feature_category: :team_planning d
end
end
- it 'links a new item', :aggregate_failures do
+ it 'links a new item with work item text', :aggregate_failures do
+ verify_linked_item_added(task.title)
+ end
+
+ it 'links a new item with work item iid', :aggregate_failures do
+ verify_linked_item_added(task.iid)
+ end
+
+ it 'links a new item with work item wildcard iid', :aggregate_failures do
+ verify_linked_item_added("##{task.iid}")
+ end
+
+ it 'links a new item with work item reference', :aggregate_failures do
+ verify_linked_item_added(task.to_reference(full: true))
+ end
+
+ it 'links a new item with work item url', :aggregate_failures do
+ verify_linked_item_added("#{task.project.web_url}/-/work_items/#{task.iid}")
+ end
+
+ it 'removes a linked item', :aggregate_failures do
page.within('.work-item-relationships') do
click_button 'Add'
@@ -80,35 +108,64 @@ RSpec.describe 'Work item linked items', :js, feature_category: :team_planning d
end
expect(find('.work-items-list')).to have_content('Task 1')
+
+ find_by_testid('links-child').hover
+ find_by_testid('remove-work-item-link').click
+
+ wait_for_all_requests
+
+ expect(page).not_to have_content('Task 1')
end
end
- it 'removes a linked item', :aggregate_failures do
+ it 'passes axe automated accessibility testing for linked items empty state' do
+ expect(page).to be_axe_clean.within('.work-item-relationships').skipping :'link-in-text-block'
+ end
+
+ it 'passes axe automated accessibility testing for linked items' do
page.within('.work-item-relationships') do
click_button 'Add'
- within_testid('link-work-item-form') do
- expect(page).to have_button('Add', disabled: true)
- find_by_testid('work-item-token-select-input').set(task.title)
- wait_for_all_requests
- click_button task.title
+ find_by_testid('work-item-token-select-input').set(objective.title)
+ wait_for_all_requests
- expect(page).to have_button('Add', disabled: false)
+ form_selector = '.work-item-relationships'
+ expect(page).to be_axe_clean.within(form_selector).skipping :'aria-input-field-name',
+ :'aria-required-children'
- click_button 'Add'
+ within_testid('link-work-item-form') do
+ click_button objective.title
- wait_for_all_requests
+ click_button 'Add'
end
- expect(find('.work-items-list')).to have_content('Task 1')
+ wait_for_all_requests
- find_by_testid('links-child').hover
- find_by_testid('remove-work-item-link').click
+ expect(page).to be_axe_clean.within(form_selector)
+ end
+ end
+ end
+ def verify_linked_item_added(input)
+ page.within('.work-item-relationships') do
+ click_button 'Add'
+
+ within_testid('link-work-item-form') do
+ expect(page).to have_button('Add', disabled: true)
+
+ find(token_input_selector).set(input)
wait_for_all_requests
- expect(page).not_to have_content('Task 1')
+ click_button task.title
+
+ expect(page).to have_button('Add', disabled: false)
+
+ click_button 'Add'
+
+ wait_for_all_requests
end
+
+ expect(find('.work-items-list')).to have_content('Task 1')
end
end
end
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index 41105630204..c29323cba27 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
context 'logged in as admin' do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
describe "explicit protected branches" do
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 9329b1f2a5e..f893bf9b608 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -74,6 +74,20 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
it_behaves_like 'code highlight' do
subject { page }
end
+
+ context 'no search term' do
+ before do
+ submit_dashboard_search('dashboard_search')
+ # fill_in('dashboard_search', with: '')
+ # find('.gl-search-box-by-click-search-button').click
+ end
+
+ it 'shows scopes' do
+ page.within('[data-testid="search-filter"]') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+ end
end
it 'search multiple words with refs switching' do
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
index f7af1797c71..92e9174295b 100644
--- a/spec/features/search/user_searches_for_comments_spec.rb
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -33,6 +33,14 @@ RSpec.describe 'User searches for comments', :js, :disable_rate_limiter, feature
end
end
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ within_testid('search-filter') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
context 'when a comment is in a snippet' do
let(:snippet) { create(:project_snippet, :private, project: project, author: user, title: 'Some title') }
let(:comment) { create(:note, noteable: snippet, author: user, note: 'Supercalifragilisticexpialidocious', project: project) }
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index 724daf9277d..2510a7f9b20 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -19,6 +19,14 @@ RSpec.describe 'User searches for commits', :js, :clean_gitlab_redis_rate_limiti
let(:additional_params) { { project_id: project.id } }
end
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ within_testid('search-filter') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
context 'when searching by SHA' do
it 'finds a commit and redirects to its page' do
submit_search(sha)
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index caddf8b698e..610b9e2f09d 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -103,6 +103,14 @@ RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limitin
end
end
end
+
+ it 'shows scopes when there is no search term' do
+ search_for_issue('')
+
+ page.within('[data-testid="search-filter"]') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
end
context 'when signed out' do
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 7819e036f21..faefa55586d 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -23,6 +23,14 @@ RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate
include_examples 'top right search form'
include_examples 'search timeouts', 'merge_requests'
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ page.within('[data-testid="search-filter"]') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
it 'finds a merge request' do
search_for_mr(merge_request1.title)
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 334fe6f0170..2700785ac1a 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -20,6 +20,14 @@ RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_lim
include_examples 'top right search form'
include_examples 'search timeouts', 'milestones'
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ page.within('[data-testid="search-filter"]') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
it 'finds a milestone' do
submit_dashboard_search(milestone1.title)
select_search_scope('Milestones')
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index ee5a3ec9806..8d94ed2a08e 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -14,6 +14,14 @@ RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature
include_examples 'top right search form'
include_examples 'search timeouts', 'projects'
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ within_testid('search-filter') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
it 'finds a project' do
visit(search_path)
submit_dashboard_search(project.name[0..3])
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
index e0a07c5103d..2628b329d96 100644
--- a/spec/features/search/user_searches_for_users_spec.rb
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -17,6 +17,14 @@ RSpec.describe 'User searches for users', :js, :clean_gitlab_redis_rate_limiting
end
end
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ within_testid('search-filter') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
context 'when on the dashboard' do
it 'finds the user' do
visit dashboard_projects_path
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 4de28a99c21..85cc3900fad 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -23,6 +23,14 @@ RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_lim
let(:additional_params) { { project_id: project.id } }
end
+ it 'shows scopes when there is no search term' do
+ submit_dashboard_search('')
+
+ page.within('[data-testid="search-filter"]') do
+ expect(page).to have_selector('[data-testid="nav-item"]', minimum: 5)
+ end
+ end
+
shared_examples 'search wiki blobs' do
it 'finds a page' do
find('[data-testid="project-filter"]').click
diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb
index 154311853f8..bc2d33b3a02 100644
--- a/spec/features/tags/developer_views_tags_spec.rb
+++ b/spec/features/tags/developer_views_tags_spec.rb
@@ -41,11 +41,11 @@ RSpec.describe 'Developer views tags', feature_category: :source_code_management
end
it 'avoids a N+1 query in branches index' do
- control_count = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }.count
+ control = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }
%w[one two three four five].each { |tag| repository.add_tag(user, tag, 'master', 'foo') }
- expect { visit project_tags_path(project) }.not_to exceed_query_limit(control_count)
+ expect { visit project_tags_path(project) }.not_to exceed_query_limit(control)
end
it 'views the tags list page' do
diff --git a/spec/features/usage_stats_consent_spec.rb b/spec/features/usage_stats_consent_spec.rb
index ebf1cd9e143..5436018250d 100644
--- a/spec/features/usage_stats_consent_spec.rb
+++ b/spec/features/usage_stats_consent_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Usage stats consent', feature_category: :service_ping do
end
gitlab_sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
shared_examples 'dismissible banner' do |button_text|
diff --git a/spec/features/user_settings/active_sessions_spec.rb b/spec/features/user_settings/active_sessions_spec.rb
index 5d1d4bc6490..bc0693d79e1 100644
--- a/spec/features/user_settings/active_sessions_spec.rb
+++ b/spec/features/user_settings/active_sessions_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state, fe
)
gitlab_sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
visit admin_user_path(user)
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 87ed4ced684..c5ad7bca824 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -789,7 +789,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
visit new_user_session_path
image = find('img.js-portrait-logo-detection')
- expect(image['class']).to include('gl-h-9')
+ expect(image['class']).to include('gl-h-10')
end
it 'renders link to sign up path' do
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index bb08ed7d07d..6f13dfa08cc 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -3,54 +3,40 @@
require 'spec_helper'
RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
- shared_examples 'signup validation' do
- before do
- visit new_user_registration_path
- end
-
- describe "#{field} validation" do
- it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do
- fill_in field, with: 'u' * max_length
-
- expect(find('.name')).not_to have_css '.gl-field-error-outline'
- end
+ before do
+ visit new_user_registration_path
+ end
- it 'shows an error border if the user\'s fullname contains an emoji' do
- simulate_input("##{field}", 'Ehsan 🦋')
+ describe "#{field} validation" do
+ it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do
+ fill_in field, with: 'u' * max_length
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
+ expect(find('.name')).not_to have_css '.gl-field-error-outline'
+ end
- it "shows an error border if the user\'s fullname is longer than #{max_length} characters" do
- fill_in field, with: 'n' * (max_length + 1)
+ it 'shows an error border if the user\'s fullname contains an emoji' do
+ simulate_input("##{field}", 'Ehsan 🦋')
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
- it "shows an error message if the user\'s #{label} is longer than #{max_length} characters" do
- fill_in field, with: 'n' * (max_length + 1)
+ it "shows an error border if the user\'s fullname is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
- expect(page).to have_content("#{label} is too long (maximum is #{max_length} characters).")
- end
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
- it 'shows an error message if the username contains emojis' do
- simulate_input("##{field}", 'Ehsan 🦋')
+ it "shows an error message if the user\'s #{label} is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
- expect(page).to have_content("Invalid input, please avoid emoji")
- end
+ expect(page).to have_content("#{label} is too long (maximum is #{max_length} characters).")
end
- end
- include_examples 'signup validation'
+ it 'shows an error message if the username contains emojis' do
+ simulate_input("##{field}", 'Ehsan 🦋')
- # Inline `shared_example 'signup validation'` again after feature flag
- # `restyle_login_page` was removed.
- context 'with feature flag restyle_login_page disabled' do
- before do
- stub_feature_flags(restyle_login_page: false)
+ expect(page).to have_content("Invalid input, please avoid emoji")
end
-
- include_examples 'signup validation'
end
end
@@ -66,344 +52,323 @@ RSpec.describe 'Signup', :js, feature_category: :user_management do
TEXT
end
- shared_examples 'signup process' do
+ before do
+ stub_feature_flags(arkose_labs_signup_challenge: false)
+ stub_application_setting(require_admin_approval_after_user_signup: false)
+ end
+
+ describe 'username validation' do
before do
- stub_feature_flags(arkose_labs_signup_challenge: false)
- stub_application_setting(require_admin_approval_after_user_signup: false)
+ visit new_user_registration_path
end
- describe 'username validation' do
- before do
- visit new_user_registration_path
- end
+ it 'does not show an error border if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'does not show an error border if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'does not show an error border if the username contains dots (.)' do
+ simulate_input('#new_user_username', 'new.user.username')
+ wait_for_requests
- it 'does not show an error border if the username contains dots (.)' do
- simulate_input('#new_user_username', 'new.user.username')
- wait_for_requests
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'does not show an error border if the username length is not longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 255
+ wait_for_requests
- it 'does not show an error border if the username length is not longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 255
- wait_for_requests
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'shows an error border if the username already exists' do
+ existing_user = create(:user)
- it 'shows an error border if the username already exists' do
- existing_user = create(:user)
+ fill_in 'new_user_username', with: existing_user.username
+ wait_for_requests
- fill_in 'new_user_username', with: existing_user.username
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows a success border if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'shows a success border if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-success-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-success-outline'
- end
+ it 'shows an error border if the username contains special characters' do
+ fill_in 'new_user_username', with: 'new$user!username'
+ wait_for_requests
- it 'shows an error border if the username contains special characters' do
- fill_in 'new_user_username', with: 'new$user!username'
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error border if the username is longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 256
+ wait_for_requests
- it 'shows an error border if the username is longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 256
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error message if the username is longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 256
+ wait_for_requests
- it 'shows an error message if the username is longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 256
- wait_for_requests
+ expect(page).to have_content("Username is too long (maximum is 255 characters).")
+ end
- expect(page).to have_content("Username is too long (maximum is 255 characters).")
- end
+ it 'shows an error message if the username is less than 2 characters' do
+ fill_in 'new_user_username', with: 'u'
+ wait_for_requests
- it 'shows an error message if the username is less than 2 characters' do
- fill_in 'new_user_username', with: 'u'
- wait_for_requests
+ expect(page).to have_content("Username is too short (minimum is 2 characters).")
+ end
- expect(page).to have_content("Username is too short (minimum is 2 characters).")
- end
+ it 'shows an error message on submit if the username contains special characters' do
+ fill_in 'new_user_username', with: 'new$user!username'
+ wait_for_requests
- it 'shows an error message on submit if the username contains special characters' do
- fill_in 'new_user_username', with: 'new$user!username'
- wait_for_requests
+ click_button "Register"
- click_button "Register"
+ expect(page).to have_content("Please create a username with only alphanumeric characters.")
+ end
- expect(page).to have_content("Please create a username with only alphanumeric characters.")
- end
+ it 'shows an error border if the username contains emojis' do
+ simulate_input('#new_user_username', 'ehsan😀')
- it 'shows an error border if the username contains emojis' do
- simulate_input('#new_user_username', 'ehsan😀')
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error message if the username contains emojis' do
+ simulate_input('#new_user_username', 'ehsan😀')
- it 'shows an error message if the username contains emojis' do
- simulate_input('#new_user_username', 'ehsan😀')
+ expect(page).to have_content("Invalid input, please avoid emoji")
+ end
- expect(page).to have_content("Invalid input, please avoid emoji")
- end
+ it 'shows a pending message if the username availability is being fetched',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
+ fill_in 'new_user_username', with: 'new-user'
- it 'shows a pending message if the username availability is being fetched',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
- fill_in 'new_user_username', with: 'new-user'
+ expect(find('.username > .validation-pending')).not_to have_css '.hide'
+ end
- expect(find('.username > .validation-pending')).not_to have_css '.hide'
- end
+ it 'shows a success message if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'shows a success message if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username > .validation-success')).not_to have_css '.hide'
+ end
- expect(find('.username > .validation-success')).not_to have_css '.hide'
- end
+ it 'shows an error message if the username is unavailable' do
+ existing_user = create(:user)
- it 'shows an error message if the username is unavailable' do
- existing_user = create(:user)
+ fill_in 'new_user_username', with: existing_user.username
+ wait_for_requests
- fill_in 'new_user_username', with: existing_user.username
- wait_for_requests
+ expect(find('.username > .validation-error')).not_to have_css '.hide'
+ end
- expect(find('.username > .validation-error')).not_to have_css '.hide'
- end
+ it 'shows a success message if the username is corrected and then available' do
+ fill_in 'new_user_username', with: 'new-user$'
+ wait_for_requests
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'shows a success message if the username is corrected and then available' do
- fill_in 'new_user_username', with: 'new-user$'
- wait_for_requests
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(page).to have_content("Username is available.")
+ end
+ end
- expect(page).to have_content("Username is available.")
+ context 'with no errors' do
+ context 'when sending confirmation email' do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'hard')
end
- end
- context 'with no errors' do
- context 'when sending confirmation email' do
+ context 'when email confirmation setting is not `soft`' do
before do
- stub_application_setting_enum('email_confirmation_setting', 'hard')
+ stub_feature_flags(identity_verification: false)
end
- context 'when email confirmation setting is not `soft`' do
- before do
- stub_feature_flags(identity_verification: false)
- end
-
- it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
- visit new_user_registration_path
-
- expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
- expect(page).to have_current_path users_almost_there_path, ignore_query: true
- expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
-
- confirm_email(new_user)
-
- expect(find_field('Username or primary email').value).to eq(new_user.email)
- end
- end
+ it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
+ visit new_user_registration_path
- context 'when email confirmation setting is `soft`' do
- before do
- stub_application_setting_enum('email_confirmation_setting', 'soft')
- end
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
+ expect(page).to have_current_path users_almost_there_path, ignore_query: true
+ expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
- it 'creates the user account and sends a confirmation email' do
- visit new_user_registration_path
+ confirm_email(new_user)
- expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
- expect(page).to have_current_path dashboard_projects_path
- end
+ expect(find_field('Username or primary email').value).to eq(new_user.email)
end
end
- context "when not sending confirmation email" do
+ context 'when email confirmation setting is `soft`' do
before do
- stub_application_setting_enum('email_confirmation_setting', 'off')
+ stub_application_setting_enum('email_confirmation_setting', 'soft')
end
- it 'creates the user account and goes to dashboard' do
+ it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
- fill_in_sign_up_form(new_user)
-
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
expect(page).to have_current_path dashboard_projects_path
end
end
+ end
- context 'with required admin approval enabled' do
- before do
- stub_application_setting(require_admin_approval_after_user_signup: true)
- end
+ context "when not sending confirmation email" do
+ before do
+ stub_application_setting_enum('email_confirmation_setting', 'off')
+ end
- it 'creates the user but does not sign them in' do
- visit new_user_registration_path
+ it 'creates the user account and goes to dashboard' do
+ visit new_user_registration_path
- expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
- expect(page).to have_current_path new_user_session_path, ignore_query: true
- expect(page).to have_content(<<~TEXT.squish)
+ fill_in_sign_up_form(new_user)
+
+ expect(page).to have_current_path dashboard_projects_path
+ end
+ end
+
+ context 'with required admin approval enabled' do
+ before do
+ stub_application_setting(require_admin_approval_after_user_signup: true)
+ end
+
+ it 'creates the user but does not sign them in' do
+ visit new_user_registration_path
+
+ expect { fill_in_sign_up_form(new_user) }.to change { User.count }.by(1)
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
+ expect(page).to have_content(<<~TEXT.squish)
You have signed up successfully. However, we could not sign you in
because your account is awaiting approval from your GitLab administrator
- TEXT
- end
+ TEXT
end
end
+ end
- context 'with errors' do
- it "displays the errors" do
- create(:user, email: new_user.email)
- visit new_user_registration_path
+ context 'with errors' do
+ it "displays the errors" do
+ create(:user, email: new_user.email)
+ visit new_user_registration_path
- fill_in_sign_up_form(new_user)
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path user_registration_path, ignore_query: true
- expect(page).to have_content("error prohibited this user from being saved")
- expect(page).to have_content("Email has already been taken")
- end
+ expect(page).to have_current_path user_registration_path, ignore_query: true
+ expect(page).to have_content("error prohibited this user from being saved")
+ expect(page).to have_content("Email has already been taken")
+ end
- it 'redisplays all fields except password' do
- create(:user, email: new_user.email)
- visit new_user_registration_path
+ it 'redisplays all fields except password' do
+ create(:user, email: new_user.email)
+ visit new_user_registration_path
- fill_in_sign_up_form(new_user)
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path user_registration_path, ignore_query: true
- expect(page.body).not_to match(/#{new_user.password}/)
+ expect(page).to have_current_path user_registration_path, ignore_query: true
+ expect(page.body).not_to match(/#{new_user.password}/)
- expect(find_field('First name').value).to eq(new_user.first_name)
- expect(find_field('Last name').value).to eq(new_user.last_name)
- expect(find_field('Username').value).to eq(new_user.username)
- expect(find_field('Email').value).to eq(new_user.email)
- end
+ expect(find_field('First name').value).to eq(new_user.first_name)
+ expect(find_field('Last name').value).to eq(new_user.last_name)
+ expect(find_field('Username').value).to eq(new_user.username)
+ expect(find_field('Email').value).to eq(new_user.email)
end
+ end
- context 'when terms are enforced' do
- before do
- enforce_terms
- end
+ context 'when terms are enforced' do
+ before do
+ enforce_terms
+ end
- it 'renders text that the user confirms terms by signing in' do
- visit new_user_registration_path
- expect(page).to have_content(terms_text)
+ it 'renders text that the user confirms terms by signing in' do
+ visit new_user_registration_path
+ expect(page).to have_content(terms_text)
- fill_in_sign_up_form(new_user)
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path(dashboard_projects_path)
- end
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
- it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
- it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
+ end
+
+ context 'when reCAPTCHA and invisible captcha are enabled' do
+ before do
+ stub_application_setting(invisible_captcha_enabled: true)
+ stub_application_setting(recaptcha_enabled: true)
+ allow_next_instance_of(RegistrationsController) do |instance|
+ allow(instance).to receive(:verify_recaptcha).and_return(true)
+ end
end
- context 'when reCAPTCHA and invisible captcha are enabled' do
+ context 'when reCAPTCHA detects malicious behaviour' do
before do
- stub_application_setting(invisible_captcha_enabled: true)
- stub_application_setting(recaptcha_enabled: true)
allow_next_instance_of(RegistrationsController) do |instance|
- allow(instance).to receive(:verify_recaptcha).and_return(true)
+ allow(instance).to receive(:verify_recaptcha).and_return(false)
end
end
- context 'when reCAPTCHA detects malicious behaviour' do
- before do
- allow_next_instance_of(RegistrationsController) do |instance|
- allow(instance).to receive(:verify_recaptcha).and_return(false)
- end
- end
+ it 'prevents from signing up' do
+ visit new_user_registration_path
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
+ expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ expect(page).to have_content(
+ "Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
+ end
+ end
+
+ context 'when invisible captcha detects malicious behaviour' do
+ context 'with form submitted quicker than timestamp_threshold', :freeze_time do
it 'prevents from signing up' do
visit new_user_registration_path
expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
- expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
- expect(page).to have_content(
- "Minimum length is #{Gitlab::CurrentSettings.minimum_password_length} characters")
+ expect(page).to have_content('That was a bit too quick! Please resubmit.')
end
end
- context 'when invisible captcha detects malicious behaviour' do
- context 'with form submitted quicker than timestamp_threshold', :freeze_time do
- it 'prevents from signing up' do
- visit new_user_registration_path
-
- expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
- expect(page).to have_content('That was a bit too quick! Please resubmit.')
- end
- end
-
- context 'with honeypot field is filled' do
- it 'prevents from signing up' do
- visit new_user_registration_path
+ context 'with honeypot field is filled' do
+ it 'prevents from signing up' do
+ visit new_user_registration_path
- find_field('If you are human, please ignore this field.',
- visible: false).execute_script("this.value = 'bot'")
+ find_field('If you are human, please ignore this field.',
+ visible: false).execute_script("this.value = 'bot'")
- expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
- end
+ expect { fill_in_sign_up_form(new_user) }.not_to change { User.count }
end
end
end
+ end
- it 'allows visiting of a page after initial registration' do
- visit new_user_registration_path
-
- fill_in_sign_up_form(new_user)
+ it 'allows visiting of a page after initial registration' do
+ visit new_user_registration_path
- visit new_project_path
+ fill_in_sign_up_form(new_user)
- expect(page).to have_current_path(new_project_path)
- end
+ visit new_project_path
- it 'does not redisplay the password' do
- create(:user, email: new_user.email)
- visit new_user_registration_path
+ expect(page).to have_current_path(new_project_path)
+ end
- fill_in_sign_up_form(new_user)
+ it 'does not redisplay the password' do
+ create(:user, email: new_user.email)
+ visit new_user_registration_path
- expect(page).to have_current_path user_registration_path, ignore_query: true
- expect(page.body).not_to match(/#{new_user.password}/)
- end
+ fill_in_sign_up_form(new_user)
- context 'with invalid email' do
- it_behaves_like 'user email validation' do
- let(:path) { new_user_registration_path }
- end
- end
+ expect(page).to have_current_path user_registration_path, ignore_query: true
+ expect(page.body).not_to match(/#{new_user.password}/)
end
- include_examples 'signup process'
-
- # Inline `shared_example 'signup process'` again after feature flag
- # `restyle_login_page` was removed.
- context 'with feature flag restyle_login_page disabled' do
- let(:terms_text) do
- <<~TEXT.squish
- By clicking Register, I agree that I have read and accepted the Terms of
- Use and Privacy Statement
- TEXT
+ context 'with invalid email' do
+ it_behaves_like 'user email validation' do
+ let(:path) { new_user_registration_path }
end
-
- before do
- stub_feature_flags(restyle_login_page: false)
- end
-
- include_examples 'signup process'
end
end
diff --git a/spec/finders/ci/catalog/resources/versions_finder_spec.rb b/spec/finders/ci/catalog/resources/versions_finder_spec.rb
index b541b84f198..dbde77101ee 100644
--- a/spec/finders/ci/catalog/resources/versions_finder_spec.rb
+++ b/spec/finders/ci/catalog/resources/versions_finder_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeline_composition do
include_context 'when there are catalog resources with versions'
+ let(:name) { nil }
let(:sort) { nil }
let(:latest) { nil }
- let(:params) { { sort: sort, latest: latest }.compact }
+ let(:params) { { name: name, sort: sort, latest: latest }.compact }
subject(:execute) { described_class.new([resource1, resource2], current_user, params).execute }
@@ -18,7 +19,7 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
new_user = create(:user)
expect do
- described_class.new([resource1, resource2, resource3], new_user, params).execute
+ described_class.new([resource1, resource2], new_user, params).execute
end.not_to exceed_query_limit(control_count)
end
@@ -37,6 +38,23 @@ RSpec.describe Ci::Catalog::Resources::VersionsFinder, feature_category: :pipeli
expect(execute).to match_array([v1_0, v1_1])
end
+ context 'with name parameter' do
+ let(:name) { 'v1.0' }
+
+ it 'returns the version that matches the name' do
+ expect(execute.count).to eq(1)
+ expect(execute.first.name).to eq('v1.0')
+ end
+
+ context 'when no version matches the name' do
+ let(:name) { 'does_not_exist' }
+
+ it 'returns empty response' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
context 'with sort parameter' do
it 'returns versions ordered by released_at descending by default' do
expect(execute).to eq([v1_1, v1_0])
diff --git a/spec/finders/ci/runner_jobs_finder_spec.rb b/spec/finders/ci/runner_jobs_finder_spec.rb
index 755b21ec08f..66cdde756be 100644
--- a/spec/finders/ci/runner_jobs_finder_spec.rb
+++ b/spec/finders/ci/runner_jobs_finder_spec.rb
@@ -2,26 +2,28 @@
require 'spec_helper'
-RSpec.describe Ci::RunnerJobsFinder do
- let(:project) { create(:project) }
- let(:runner) { create(:ci_runner, :instance) }
- let(:user) { create(:user) }
+RSpec.describe Ci::RunnerJobsFinder, feature_category: :fleet_visibility do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:runner) { create(:ci_runner, :instance) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) }
+ let_it_be(:jobs) { create_list(:ci_build, 5, runner_manager: runner_manager, project: project) }
+
let(:params) { {} }
- subject { described_class.new(runner, user, params).execute }
+ subject(:returned_jobs) { described_class.new(runner, user, params).execute }
- before do
+ before_all do
project.add_developer(user)
end
describe '#execute' do
context 'when params is empty' do
- let!(:job) { create(:ci_build, runner: runner, project: project) }
let!(:job1) { create(:ci_build, project: project) }
it 'returns all jobs assigned to Runner' do
- is_expected.to match_array(job)
- is_expected.not_to match_array(job1)
+ is_expected.to match_array(jobs)
+ is_expected.not_to include(job1)
end
end
@@ -36,35 +38,34 @@ RSpec.describe Ci::RunnerJobsFinder do
end
end
- context 'when the user has permission to read all resources' do
- let(:user) { create(:user, :admin) }
+ context 'when the user is admin', :enable_admin_mode do
+ let_it_be(:user) { create(:user, :admin) }
- it 'returns all the jobs assigned to a runner' do
- jobs = create_list(:ci_build, 5, runner: runner, project: project)
+ it { is_expected.to match_array(jobs) }
+ end
- is_expected.to match_array(jobs)
+ context 'when user is developer' do
+ before_all do
+ project.add_developer(user)
end
+
+ it { is_expected.to match_array(jobs) }
end
context 'when the user has different access levels in different projects' do
- it 'returns only the jobs the user has permission to see' do
- guest_project = create(:project)
- reporter_project = create(:project)
-
- _guest_jobs = create_list(:ci_build, 2, runner: runner, project: guest_project)
- reporter_jobs = create_list(:ci_build, 3, runner: runner, project: reporter_project)
-
- guest_project.add_guest(user)
- reporter_project.add_reporter(user)
-
- is_expected.to match_array(reporter_jobs)
+ let_it_be(:guest_project) { create(:project).tap { |p| p.add_guest(user) } }
+ let_it_be(:guest_jobs) { create_list(:ci_build, 2, runner: runner, project: guest_project) }
+ let_it_be(:reporter_project) { create(:project).tap { |p| p.add_reporter(user) } }
+ let_it_be(:reporter_jobs) { create_list(:ci_build, 3, runner: runner, project: reporter_project) }
+
+ it 'returns only the jobs the user has permission to see', :aggregate_failures do
+ is_expected.to include(*reporter_jobs)
+ is_expected.not_to include(*guest_jobs)
end
end
context 'when the user has reporter access level or greater' do
- it 'returns jobs assigned to the Runner that the user has accesss to' do
- jobs = create_list(:ci_build, 3, runner: runner, project: project)
-
+ it 'returns jobs assigned to the Runner that the user has access to' do
is_expected.to match_array(jobs)
end
end
@@ -73,24 +74,38 @@ RSpec.describe Ci::RunnerJobsFinder do
Ci::HasStatus::AVAILABLE_STATUSES.each do |target_status|
context "when status is #{target_status}" do
let(:params) { { status: target_status } }
+ let(:exception_status) { (Ci::HasStatus::AVAILABLE_STATUSES - [target_status]).first }
let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) }
+ let!(:other_job) { create(:ci_build, runner: runner, project: project, status: exception_status) }
- before do
- exception_status = Ci::HasStatus::AVAILABLE_STATUSES - [target_status]
- create(:ci_build, runner: runner, project: project, status: exception_status.first)
- end
-
- it 'returns matched job' do
- is_expected.to eq([job])
+ it 'returns matched job', :aggregate_failures do
+ is_expected.to include(job)
+ is_expected.not_to include(other_job)
end
end
end
end
+ context 'when system_id is specified' do
+ let_it_be(:runner_manager2) { create(:ci_runner_machine, runner: runner) }
+ let_it_be(:job2) { create(:ci_build, runner_manager: runner_manager2, project: project) }
+
+ let(:params) { { system_id: runner_manager.system_xid } }
+
+ it 'returns jobs from the specified system' do
+ expect(returned_jobs).to match_array(jobs)
+ end
+
+ context 'when specified system_id does not exist' do
+ let(:params) { { system_id: 'unknown_system' } }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
context 'when order_by and sort are specified' do
context 'when order_by id and sort is asc' do
let(:params) { { order_by: 'id', sort: 'asc' } }
- let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
it 'sorts as id: :asc' do
is_expected.to eq(jobs.sort_by(&:id))
@@ -101,7 +116,6 @@ RSpec.describe Ci::RunnerJobsFinder do
context 'when order_by is specified and sort is not specified' do
context 'when order_by id and sort is not specified' do
let(:params) { { order_by: 'id' } }
- let!(:jobs) { create_list(:ci_build, 2, runner: runner, project: project, user: create(:user)) }
it 'sorts as id: :desc' do
is_expected.to eq(jobs.sort_by(&:id).reverse)
diff --git a/spec/finders/ci/runner_managers_finder_spec.rb b/spec/finders/ci/runner_managers_finder_spec.rb
new file mode 100644
index 00000000000..c62c05d415e
--- /dev/null
+++ b/spec/finders/ci/runner_managers_finder_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerManagersFinder, '#execute', feature_category: :fleet_visibility do
+ subject(:runner_managers) { described_class.new(runner: runner, params: params).execute }
+
+ let_it_be(:runner) { create(:ci_runner) }
+
+ describe 'filter by status' do
+ before_all do
+ freeze_time
+ end
+
+ after :all do
+ unfreeze_time
+ end
+
+ let_it_be(:offline_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: 2.hours.ago) }
+ let_it_be(:online_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: 1.second.ago) }
+ let_it_be(:never_contacted_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: nil) }
+ let_it_be(:stale_runner_manager) do
+ create(
+ :ci_runner_machine,
+ runner: runner,
+ created_at: Ci::RunnerManager.stale_deadline - 1.second,
+ contacted_at: nil
+ )
+ end
+
+ let(:params) { { status: status } }
+
+ context 'for offline' do
+ let(:status) { :offline }
+
+ it { is_expected.to contain_exactly(offline_runner_manager) }
+ end
+
+ context 'for online' do
+ let(:status) { :online }
+
+ it { is_expected.to contain_exactly(online_runner_manager) }
+ end
+
+ context 'for stale' do
+ let(:status) { :stale }
+
+ it { is_expected.to contain_exactly(stale_runner_manager) }
+ end
+
+ context 'for never_contacted' do
+ let(:status) { :never_contacted }
+
+ it { is_expected.to contain_exactly(never_contacted_runner_manager, stale_runner_manager) }
+ end
+
+ context 'for invalid status' do
+ let(:status) { :invalid_status }
+
+ it 'returns all runner managers' do
+ expect(runner_managers).to contain_exactly(
+ offline_runner_manager, online_runner_manager, never_contacted_runner_manager, stale_runner_manager
+ )
+ end
+ end
+ end
+
+ context 'without any filters' do
+ let(:params) { {} }
+
+ let_it_be(:runner_manager) { create(:ci_runner_machine, runner: runner) }
+
+ it 'returns all runner managers' do
+ expect(runner_managers).to contain_exactly(runner_manager)
+ end
+ end
+end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index fbe44244dba..7e9ef2139c9 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Ci::RunnersFinder, feature_category: :fleet_visibility do
context 'by status' do
Ci::Runner::AVAILABLE_STATUSES.each do |status|
it "calls the corresponding :#{status} scope on Ci::Runner" do
- expect(Ci::Runner).to receive(status.to_sym).and_call_original
+ expect(Ci::Runner).to receive(:with_status).with(status).and_call_original
described_class.new(current_user: admin, params: { status_status: status }).execute
end
@@ -134,10 +134,14 @@ RSpec.describe Ci::RunnersFinder, feature_category: :fleet_visibility do
end
context 'by runner type' do
- it 'calls the corresponding scope on Ci::Runner' do
- expect(Ci::Runner).to receive(:project_type).and_call_original
+ Ci::Runner.runner_types.each_key do |runner_type|
+ context "when runner type is #{runner_type}" do
+ it "calls the corresponding scope on Ci::Runner" do
+ expect(Ci::Runner).to receive(:with_runner_type).with(runner_type).and_call_original
- described_class.new(current_user: admin, params: { type_type: 'project_type' }).execute
+ described_class.new(current_user: admin, params: { type_type: runner_type }).execute
+ end
+ end
end
end
@@ -656,12 +660,13 @@ RSpec.describe Ci::RunnersFinder, feature_category: :fleet_visibility do
end
context 'by creator' do
- let_it_be(:runner_creator_1) { create(:ci_runner, creator_id: '1') }
+ let_it_be(:creator) { create(:user) }
+ let_it_be(:runner_with_creator) { create(:ci_runner, creator: creator) }
- let(:extra_params) { { creator_id: '1' } }
+ let(:extra_params) { { creator_id: creator.id } }
it 'returns correct runners' do
- is_expected.to contain_exactly(runner_creator_1)
+ is_expected.to contain_exactly(runner_with_creator)
end
end
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index 807a7ca8e26..f45042d9c36 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -343,14 +343,14 @@ RSpec.describe DeploymentsFinder, feature_category: :deployment_management do
it 'avoids N+1 queries' do
execute_queries = -> { described_class.new({ group: group }).execute.first }
- control_count = ActiveRecord::QueryRecorder.new { execute_queries }.count
+ control = ActiveRecord::QueryRecorder.new { execute_queries }
new_project = create(:project, :repository, group: group)
new_env = create(:environment, project: new_project, name: "production")
create_list(:deployment, 2, status: :success, project: new_project, environment: new_env)
group.reload
- expect { execute_queries }.not_to exceed_query_limit(control_count)
+ expect { execute_queries }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index e0fc494d033..9c8b8658538 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -166,12 +166,12 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do
# warm up
# We need this warm up because there is 1 query being fired in one of the policies,
- # and policy results are cached. Without a warm up, the control_count will be X queries
+ # and policy results are cached. Without a warm up, the control.count will be X queries
# but the test phase will only fire X-1 queries, due the fact that the
# result of the policy is already available in the cache.
described_class.new(project, user2).execute.map(&:user)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
described_class.new(project, user2).execute.map(&:user)
end
@@ -179,7 +179,7 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do
expect do
described_class.new(project, user2).execute.map(&:user)
- end.to issue_same_number_of_queries_as(control_count)
+ end.to issue_same_number_of_queries_as(control)
end
context 'with :shared_into_ancestors' do
diff --git a/spec/finders/packages/terraform_module/packages_finder_spec.rb b/spec/finders/packages/terraform_module/packages_finder_spec.rb
new file mode 100644
index 00000000000..4550b3be055
--- /dev/null
+++ b/spec/finders/packages/terraform_module/packages_finder_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::TerraformModule::PackagesFinder, feature_category: :package_registry do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package1) { create(:terraform_module_package, project: project, version: '1.0.0') }
+ let_it_be(:package2) { create(:terraform_module_package, project: project, version: '2.0.0', name: package1.name) }
+
+ let(:params) { {} }
+
+ subject { described_class.new(project, params).execute }
+
+ describe '#execute' do
+ context 'without project' do
+ let(:project) { nil }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'without package_name' do
+ let(:params) { { package_name: nil } }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with package_name' do
+ let(:params) { { package_name: package1.name } }
+
+ it 'returns packages with the given name ordered by version desc' do
+ is_expected.to eq([package2, package1])
+ end
+
+ context 'with package_version' do
+ let(:params) { { package_name: package1.name, package_version: package1.version } }
+
+ it { is_expected.to eq([package1]) }
+ end
+
+ context 'when package is not installable' do
+ before do
+ package1.update_column(:status, 3)
+ end
+
+ it { is_expected.to eq([package2]) }
+ end
+
+ context 'when package has no version' do
+ before do
+ package1.update_column(:version, nil)
+ end
+
+ it { is_expected.to eq([package2]) }
+ end
+
+ context 'when package is not a terraform module' do
+ before do
+ package1.update_column(:package_type, 1)
+ end
+
+ it { is_expected.to eq([package2]) }
+ end
+ end
+ end
+end
diff --git a/spec/finders/projects/ml/experiment_finder_spec.rb b/spec/finders/projects/ml/experiment_finder_spec.rb
new file mode 100644
index 00000000000..2cf5086d5db
--- /dev/null
+++ b/spec/finders/projects/ml/experiment_finder_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Ml::ExperimentFinder, feature_category: :mlops do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:experiment1) { create(:ml_experiments, project: project) }
+ let_it_be(:experiment2) { create(:ml_experiments, project: project) }
+ let_it_be(:experiment3) do
+ create(:ml_experiments, name: "#{experiment1.name}_1", project: project, updated_at: 1.week.ago)
+ end
+
+ let_it_be(:other_experiment) { create(:ml_experiments) }
+ let_it_be(:project_experiments) { [experiment1, experiment2, experiment3] }
+
+ let(:params) { {} }
+
+ subject(:experiments) { described_class.new(project, params).execute.to_a }
+
+ describe 'default params' do
+ it 'returns models for project ordered by id, descending' do
+ is_expected.to eq([experiment3, experiment2, experiment1])
+ end
+
+ it 'including the latest version and project', :aggregate_failures do
+ expect(experiments[0].association_cached?(:project)).to be(true)
+ end
+
+ it 'does not return models belonging to a different project' do
+ is_expected.not_to include(other_experiment)
+ end
+ end
+
+ describe 'sorting' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:test_case, :order_by, :direction, :expected_order) do
+ 'default params' | nil | nil | [2, 1, 0]
+ 'ascending order' | 'id' | 'ASC' | [0, 1, 2]
+ 'by column' | 'name' | 'ASC' | [0, 2, 1]
+ 'invalid sort' | nil | 'UP' | [2, 1, 0]
+ 'invalid order by' | 'INVALID' | nil | [2, 1, 0]
+ 'order by updated_at' | 'updated_at' | nil | [1, 0, 2]
+ end
+ with_them do
+ let(:params) { { order_by: order_by, sort: direction } }
+
+ it { is_expected.to eq(project_experiments.values_at(*expected_order)) }
+ end
+ end
+end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index e570b49e1da..f991ecd369c 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -482,11 +482,11 @@ RSpec.describe ProjectsFinder, feature_category: :groups_and_projects do
it { is_expected.to match_array([internal_project]) }
end
- describe 'filter by organization_id' do
+ describe 'filter by organization' do
let_it_be(:organization) { create(:organization) }
let_it_be(:organization_project) { create(:project, organization: organization) }
- let(:params) { { organization_id: organization.id } }
+ let(:params) { { organization: organization } }
before do
organization_project.add_maintainer(current_user)
diff --git a/spec/finders/releases/group_releases_finder_spec.rb b/spec/finders/releases/group_releases_finder_spec.rb
index daefc94828b..3430fe834d1 100644
--- a/spec/finders/releases/group_releases_finder_spec.rb
+++ b/spec/finders/releases/group_releases_finder_spec.rb
@@ -168,9 +168,9 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj
let(:params) { query_params }
it 'subgroups avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
releases
- end.count
+ end
subgroups = create_list(:group, 10, parent: group)
projects = create_list(:project, 10, namespace: subgroups[0])
@@ -178,7 +178,7 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj
expect do
releases
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/finders/resource_milestone_event_finder_spec.rb b/spec/finders/resource_milestone_event_finder_spec.rb
index 27e124afe2e..a05059328e3 100644
--- a/spec/finders/resource_milestone_event_finder_spec.rb
+++ b/spec/finders/resource_milestone_event_finder_spec.rb
@@ -49,8 +49,8 @@ RSpec.describe ResourceMilestoneEventFinder do
milestone1 = create(:milestone, project: issue_project)
milestone2 = create(:milestone, project: issue_project)
- control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute }.count
- expect(control_count).to eq(1) # 1 events query
+ control = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute }
+ expect(control.count).to eq(1) # 1 events query
create_event(milestone1, :add)
create_event(milestone1, :remove)
@@ -60,7 +60,7 @@ RSpec.describe ResourceMilestoneEventFinder do
create_event(milestone2, :remove)
# 1 milestones + 1 project + 1 user + 4 ability
- expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control_count + 6)
+ expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control).with_threshold(6)
end
end
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
index c931de92d1c..4cd3e13d4fa 100644
--- a/spec/finders/users_finder_spec.rb
+++ b/spec/finders/users_finder_spec.rb
@@ -180,6 +180,31 @@ RSpec.describe UsersFinder do
let_it_be(:user) { create(:user) }
it_behaves_like 'executes users finder as normal user'
+
+ context 'with group argument is passed' do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:subgroup) { create(:group, :private, parent: group) }
+ let_it_be(:not_group_member) { create(:user) }
+
+ let_it_be(:indirect_group_member) do
+ create(:user).tap { |u| subgroup.add_developer(u) }
+ end
+
+ let_it_be(:direct_group_members) do
+ [user, omniauth_user, internal_user].each { |u| group.add_developer(u) }
+ end
+
+ it 'filtered by search' do
+ users = described_class.new(user, group: group).execute
+ expect(users).to contain_exactly(indirect_group_member, *direct_group_members)
+ end
+
+ context 'when user cannot read group' do
+ it 'filtered by search' do
+ expect { described_class.new(not_group_member, group: group).execute }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+ end
end
context 'with an admin user' do
diff --git a/spec/fixtures/api/schemas/ml/get_latest_versions.json b/spec/fixtures/api/schemas/ml/get_latest_versions.json
index cb2308fa637..3bf9a43a795 100644
--- a/spec/fixtures/api/schemas/ml/get_latest_versions.json
+++ b/spec/fixtures/api/schemas/ml/get_latest_versions.json
@@ -21,7 +21,7 @@
"run_id",
"status",
"status_message",
- "metadata",
+ "tags",
"run_link",
"aliases"
],
@@ -59,7 +59,7 @@
"status_message": {
"type": "string"
},
- "metadata": {
+ "tags": {
"type": "array",
"items": {
}
diff --git a/spec/fixtures/api/schemas/ml/get_model_version.json b/spec/fixtures/api/schemas/ml/get_model_version.json
index 214c993ed73..89779428edf 100644
--- a/spec/fixtures/api/schemas/ml/get_model_version.json
+++ b/spec/fixtures/api/schemas/ml/get_model_version.json
@@ -18,7 +18,7 @@
"run_id",
"status",
"status_message",
- "metadata",
+ "tags",
"run_link",
"aliases"
],
@@ -56,9 +56,22 @@
"status_message": {
"type": "string"
},
- "metadata": {
+ "tags": {
"type": "array",
"items": {
+ "type": "object",
+ "required": [
+ "key",
+ "value"
+ ],
+ "properties": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ }
}
},
"run_link": {
diff --git a/spec/fixtures/api/schemas/ml/search_experiments.json b/spec/fixtures/api/schemas/ml/search_experiments.json
new file mode 100644
index 00000000000..3df7ff1a676
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/search_experiments.json
@@ -0,0 +1,39 @@
+{
+ "type": "object",
+ "required": [
+ "experiments",
+ "next_page_token"
+ ],
+ "properties": {
+ "experiments": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "experiment_id",
+ "name",
+ "artifact_location",
+ "lifecycle_stage"
+ ],
+ "properties": {
+ "experiment_id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "artifact_location": {
+ "type": "string"
+ },
+ "lifecycle_stage": {
+ "type": "string",
+ "enum": [
+ "active",
+ "deleted"
+ ]
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/job.json b/spec/fixtures/api/schemas/public_api/v4/job.json
index 6265fbcff69..3a0c69786e9 100644
--- a/spec/fixtures/api/schemas/public_api/v4/job.json
+++ b/spec/fixtures/api/schemas/public_api/v4/job.json
@@ -22,7 +22,8 @@
"artifacts_expire_at",
"tag_list",
"runner",
- "project"
+ "project",
+ "archived"
],
"properties": {
"id": { "type": "integer" },
@@ -70,7 +71,8 @@
},
"project": {
"ci_job_token_scope_enabled": { "type": "boolean" }
- }
+ },
+ "archived": { "type": "boolean" }
},
"additionalProperties":false
}
diff --git a/spec/fixtures/api/schemas/variable.json b/spec/fixtures/api/schemas/variable.json
index 440e812d95b..b708c6ec14f 100644
--- a/spec/fixtures/api/schemas/variable.json
+++ b/spec/fixtures/api/schemas/variable.json
@@ -11,6 +11,12 @@
"id": {
"type": "integer"
},
+ "description": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
"key": {
"type": "string"
},
@@ -35,4 +41,4 @@
}
},
"additionalProperties": false
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project.json b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project.json
new file mode 100644
index 00000000000..a522590d037
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project.json
@@ -0,0 +1,11 @@
+{
+ "id": 5,
+ "description": "Nisi et repellendus ut enim quo accusamus vel magnam.",
+ "import_type": "gitlab_project",
+ "creator_id": 999,
+ "visibility_level": 10,
+ "archived": false,
+ "hooks": [
+
+ ]
+}
diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/issues.ndjson b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/issues.ndjson
new file mode 100644
index 00000000000..cf7d9211460
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/issues.ndjson
@@ -0,0 +1,2 @@
+{"id":39,"author_id":22,"project_id":null,"created_at":"2016-06-14T15:02:08.233Z","updated_at":"2016-06-14T15:02:48.194Z","position":0,"branch_name":null,"description":"Voluptate vel reprehenderit facilis omnis voluptas magnam tenetur.","state":"opened","iid":9,"updated_by_id":null,"confidential":false,"due_date":"2020-08-14","moved_to_id":null,"issue_assignees":[],"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"notes":[{"id":359,"note":"Quo eius velit quia et id quam.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.009Z","updated_at":"2016-06-14T15:02:48.009Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":360,"note":"Nulla commodi ratione cumque id autem.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.032Z","updated_at":"2016-06-14T15:02:48.032Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":361,"note":"Illum non ea sed dolores corrupti.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.056Z","updated_at":"2016-06-14T15:02:48.056Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":362,"note":"Facere dolores ipsum dolorum maiores omnis occaecati ab.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.082Z","updated_at":"2016-06-14T15:02:48.082Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":363,"note":"Quod laudantium similique sint aut est ducimus.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.113Z","updated_at":"2016-06-14T15:02:48.113Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":364,"note":"Aut omnis eos esse incidunt vero reiciendis.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:48.139Z","updated_at":"2016-06-14T15:02:48.139Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":365,"note":"Beatae dolore et doloremque asperiores sunt.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:48.162Z","updated_at":"2016-06-14T15:02:48.162Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":366,"note":"Doloribus ipsam ex delectus rerum libero recusandae modi repellendus.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:48.192Z","updated_at":"2016-06-14T15:02:48.192Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":39,"author_id":22,"project_id":null,"created_at":"2016-06-14T15:02:08.233Z","updated_at":"2016-06-14T15:02:48.194Z","position":0,"branch_name":null,"description":"Voluptate vel reprehenderit facilis omnis voluptas magnam tenetur.","state":"opened","updated_by_id":null,"confidential":false,"due_date":"2020-08-14","moved_to_id":null,"issue_assignees":[],"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"notes":[{"id":359,"note":"Quo eius velit quia et id quam.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.009Z","updated_at":"2016-06-14T15:02:48.009Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":360,"note":"Nulla commodi ratione cumque id autem.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.032Z","updated_at":"2016-06-14T15:02:48.032Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":361,"note":"Illum non ea sed dolores corrupti.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.056Z","updated_at":"2016-06-14T15:02:48.056Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":362,"note":"Facere dolores ipsum dolorum maiores omnis occaecati ab.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.082Z","updated_at":"2016-06-14T15:02:48.082Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":363,"note":"Quod laudantium similique sint aut est ducimus.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.113Z","updated_at":"2016-06-14T15:02:48.113Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":364,"note":"Aut omnis eos esse incidunt vero reiciendis.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:48.139Z","updated_at":"2016-06-14T15:02:48.139Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":365,"note":"Beatae dolore et doloremque asperiores sunt.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:48.162Z","updated_at":"2016-06-14T15:02:48.162Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":366,"note":"Doloribus ipsam ex delectus rerum libero recusandae modi repellendus.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:48.192Z","updated_at":"2016-06-14T15:02:48.192Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
diff --git a/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/milestones.ndjson b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/milestones.ndjson
new file mode 100644
index 00000000000..28e737fa43c
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/with_invalid_issues_and_milestones/tree/project/milestones.ndjson
@@ -0,0 +1,2 @@
+{"id":1,"title":null,"project_id":8,"description":123,"due_date":null,"created_at":"NOT A DATE","updated_at":"NOT A DATE","state":"active","iid":1,"group_id":null}
+{"id":42,"title":"A valid milestone","project_id":8,"description":"Project-level milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"group_id":null}
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml
index ba56d782871..5238e997044 100644
--- a/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_28d_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 28d
data_source: internal_events
data_category: optional
-instrumentation_class: TotalCountMetric
distribution:
- ee
tier:
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml
index e6bdcb9d2ae..fdbf137f699 100644
--- a/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_7d_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 7d
data_source: internal_events
data_category: optional
-instrumentation_class: TotalCountMetric
distribution:
- ee
tier:
diff --git a/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
index b1bf89dc095..e928869ca9a 100644
--- a/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/ee_total_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: all
data_source: internal_events
data_category: optional
-instrumentation_class: TotalCountMetric
distribution:
- ee
tier:
diff --git a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
index 8476cb8561b..4d40e2122cb 100644
--- a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_28d.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 28d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
index b4cc2fc8b55..166cef90412 100644
--- a/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/keyboard_smashed_metric_7d.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 7d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml b/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
index 754702c8c74..122043e6cc0 100644
--- a/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/project_id_28d_multiple_events.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 28d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml b/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
index 95f429e9b40..11a4ba41c07 100644
--- a/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/project_id_7d_multiple_events.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 7d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
index 5bdb4c45a52..038fc738f25 100644
--- a/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/total_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: all
data_source: internal_events
data_category: optional
-instrumentation_class: TotalCountMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
index b176b23b46a..b27e69bd43b 100644
--- a/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/user_id_28d_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 28d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml b/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
index 8a0fca2cbdc..e08733a6bc9 100644
--- a/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
+++ b/spec/fixtures/scripts/internal_events/metrics/user_id_7d_single_event.yml
@@ -12,7 +12,6 @@ introduced_by_url: TODO
time_frame: 7d
data_source: internal_events
data_category: optional
-instrumentation_class: RedisHLLMetric
distribution:
- ce
- ee
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report-with-unicode-null-character.json b/spec/fixtures/security_reports/master/gl-common-scanning-report-with-unicode-null-character.json
new file mode 100644
index 00000000000..852327fc084
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report-with-unicode-null-character.json
@@ -0,0 +1,79 @@
+{
+ "vulnerabilities": [
+ {
+ "id": "vulnerability-1",
+ "category": "dependency_scanning",
+ "name": "Vulnerability for remediation testing 1",
+ "message": "This vulnerability should have ONE remediation",
+ "description": "",
+ "severity": "High",
+ "solution": "Upgrade to latest version.\u0000",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "cvss_vectors": [
+ {
+ "vendor": "GitLab",
+ "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H"
+ }
+ ],
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "type": "GitLab",
+ "name": "Foo vulnerability",
+ "value": "foo"
+ }
+ ],
+ "links": [
+ {
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-2137"
+ }
+ ],
+ "details": {
+ "commit": {
+ "name": "the commit",
+ "description": "description",
+ "type": "commit",
+ "value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
+ }
+ }
+ }
+ ],
+ "remediations": [],
+ "dependency_files": [],
+ "scan": {
+ "analyzer": {
+ "id": "common-analyzer",
+ "name": "Common Analyzer",
+ "url": "https://site.com/analyzer/common",
+ "version": "2.0.1",
+ "vendor": {
+ "name": "Common"
+ }
+ },
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium top-level",
+ "url": "https://gitlab.com/gitlab-org/security-products/analyzers/gemnasium-maven",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "2.18.0"
+ },
+ "type": "dependency_scanning",
+ "start_time": "2022-08-10T21:37:00",
+ "end_time": "2022-08-10T21:38:00",
+ "status": "success"
+ },
+ "version": "14.0.2"
+}
diff --git a/spec/frontend/admin/abuse_report/components/user_details_spec.js b/spec/frontend/admin/abuse_report/components/user_details_spec.js
index 24ec0cdb1b2..42c219b1b11 100644
--- a/spec/frontend/admin/abuse_report/components/user_details_spec.js
+++ b/spec/frontend/admin/abuse_report/components/user_details_spec.js
@@ -1,6 +1,5 @@
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { sprintf } from '~/locale';
import UserDetails from '~/admin/abuse_report/components/user_details.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import { USER_DETAILS_I18N } from '~/admin/abuse_report/constants';
@@ -61,7 +60,7 @@ describe('UserDetails', () => {
describe('verification', () => {
it('renders the users verification with the correct label', () => {
expect(findUserDetailLabel('verification')).toBe(USER_DETAILS_I18N.verification);
- expect(findUserDetailValue('verification')).toBe('Email, Credit card');
+ expect(findUserDetailValue('verification')).toBe('Email, Phone, Credit card');
});
});
@@ -73,7 +72,7 @@ describe('UserDetails', () => {
describe('similar credit cards', () => {
it('renders the number of similar records', () => {
expect(findUserDetail('credit-card-verification').text()).toContain(
- sprintf('Card matches %{similarRecordsCount} accounts', { ...user.creditCard }),
+ `Card matches ${user.creditCard.similarRecordsCount} accounts`,
);
});
@@ -83,7 +82,7 @@ describe('UserDetails', () => {
);
expect(findLinkFor('credit-card-verification').text()).toBe(
- sprintf('%{similarRecordsCount} accounts', { ...user.creditCard }),
+ `${user.creditCard.similarRecordsCount} accounts`,
);
expect(findLinkFor('credit-card-verification').text()).toContain(
@@ -100,7 +99,7 @@ describe('UserDetails', () => {
it('does not render the number of similar records', () => {
expect(findUserDetail('credit-card-verification').text()).not.toContain(
- sprintf('Card matches %{similarRecordsCount} accounts', { ...user.creditCard }),
+ `Card matches ${user.creditCard.similarRecordsCount} accounts`,
);
});
@@ -123,6 +122,60 @@ describe('UserDetails', () => {
});
});
+ describe('phoneNumber', () => {
+ it('renders the correct label', () => {
+ expect(findUserDetailLabel('phone-number-verification')).toBe(USER_DETAILS_I18N.phoneNumber);
+ });
+
+ describe('similar phone numbers', () => {
+ it('renders the number of similar records', () => {
+ expect(findUserDetail('phone-number-verification').text()).toContain(
+ `Phone matches ${user.phoneNumber.similarRecordsCount} accounts`,
+ );
+ });
+
+ it('renders a link to the matching phone numbers', () => {
+ expect(findLinkFor('phone-number-verification').attributes('href')).toBe(
+ user.phoneNumber.phoneMatchesLink,
+ );
+
+ expect(findLinkFor('phone-number-verification').text()).toBe(
+ `${user.phoneNumber.similarRecordsCount} accounts`,
+ );
+ });
+
+ describe('when the number of similar phone numbers is less than 2', () => {
+ beforeEach(() => {
+ createComponent({
+ user: { ...user, phoneNumber: { ...user.phoneNumber, similarRecordsCount: 1 } },
+ });
+ });
+
+ it('does not render the number of similar records', () => {
+ expect(findUserDetail('phone-number-verification').text()).not.toContain(
+ `Phone matches ${user.phoneNumber.similarRecordsCount} accounts`,
+ );
+ });
+
+ it('does not render a link to the matching phone numbers', () => {
+ expect(findLinkFor('phone-number-verification').exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('when the users phoneNumber is blank', () => {
+ beforeEach(() => {
+ createComponent({
+ user: { ...user, phoneNumber: undefined },
+ });
+ });
+
+ it('does not render the users phoneNumber', () => {
+ expect(findUserDetail('phone-number-verification').exists()).toBe(false);
+ });
+ });
+ });
+
describe('otherReports', () => {
it('renders the correct label', () => {
expect(findUserDetailLabel('past-closed-reports')).toBe(USER_DETAILS_I18N.pastReports);
@@ -132,9 +185,7 @@ describe('UserDetails', () => {
const index = user.pastClosedReports.indexOf(pastReport);
it('renders the category', () => {
- expect(findPastReport(index).text()).toContain(
- sprintf('Reported for %{category}', { ...pastReport }),
- );
+ expect(findPastReport(index).text()).toContain(`Reported for ${pastReport.category}`);
});
it('renders a link to the report', () => {
diff --git a/spec/frontend/admin/abuse_report/mock_data.js b/spec/frontend/admin/abuse_report/mock_data.js
index 9790b44c976..f02986fb5bb 100644
--- a/spec/frontend/admin/abuse_report/mock_data.js
+++ b/spec/frontend/admin/abuse_report/mock_data.js
@@ -9,12 +9,16 @@ export const mockAbuseReport = {
path: '/spamuser417',
adminPath: '/admin/users/spamuser417',
plan: 'Free',
- verificationState: { email: true, phone: false, creditCard: true },
+ verificationState: { email: true, phone: true, creditCard: true },
creditCard: {
name: 'S. User',
similarRecordsCount: 2,
cardMatchesLink: '/admin/users/spamuser417/card_match',
},
+ phoneNumber: {
+ similarRecordsCount: 2,
+ phoneMatchesLink: '/admin/users/spamuser417/phone_match',
+ },
pastClosedReports: [
{
category: 'offensive',
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index e6b38a1e824..41690e1b5be 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -6,30 +6,48 @@ import {
GlFormTextarea,
GlTab,
GlLink,
+ GlModal,
} from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import MappingBuilder from '~/alerts_settings/components/alert_mapping_builder.vue';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
import { typeSet } from '~/alerts_settings/constants';
-import alertFields from '../mocks/alert_fields.json';
+import createMockApollo from 'helpers/mock_apollo_helper';
import parsedMapping from '../mocks/parsed_mapping.json';
+import alertFields from '../mocks/alert_fields.json';
const scrollIntoViewMock = jest.fn();
HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
+Vue.use(VueApollo);
+
describe('AlertsSettingsForm', () => {
let wrapper;
const mockToastShow = jest.fn();
+ let apolloProvider;
+
+ const createComponent = async ({
+ props = {},
+ multiIntegrations = true,
+ currentIntegration = null,
+ } = {}) => {
+ const mockResolvers = {
+ Query: {
+ currentIntegration() {
+ return currentIntegration;
+ },
+ },
+ };
+
+ apolloProvider = createMockApollo([], mockResolvers);
- const createComponent = ({ data = {}, props = {}, multiIntegrations = true } = {}) => {
wrapper = extendedWrapper(
mount(AlertsSettingsForm, {
- data() {
- return { ...data };
- },
+ apolloProvider,
propsData: {
loading: false,
canAddIntegration: true,
@@ -39,15 +57,14 @@ describe('AlertsSettingsForm', () => {
multiIntegrations,
},
mocks: {
- $apollo: {
- query: jest.fn(),
- },
$toast: {
show: mockToastShow,
},
},
}),
);
+
+ await waitForPromises();
};
const findForm = () => wrapper.findComponent(GlForm);
@@ -55,6 +72,7 @@ describe('AlertsSettingsForm', () => {
const findFormFields = () => wrapper.findAllComponents(GlFormInput);
const findFormToggle = () => wrapper.findComponent(GlToggle);
const findSamplePayloadSection = () => wrapper.findByTestId('sample-payload-section');
+ const findResetPayloadModal = () => wrapper.findComponent(GlModal);
const findMappingBuilder = () => wrapper.findComponent(MappingBuilder);
const findSubmitButton = () => wrapper.findByTestId('integration-form-submit');
const findMultiSupportText = () => wrapper.findByTestId('multi-integrations-not-supported');
@@ -76,9 +94,13 @@ describe('AlertsSettingsForm', () => {
findFormToggle().vm.$emit('change', true);
};
+ afterEach(() => {
+ apolloProvider = null;
+ });
+
describe('with default values', () => {
- beforeEach(() => {
- createComponent();
+ beforeEach(async () => {
+ await createComponent();
});
it('render the initial form with only an integration type dropdown', () => {
@@ -94,21 +116,23 @@ describe('AlertsSettingsForm', () => {
expect(findFormFields().at(0).isVisible()).toBe(true);
});
- it('disables the dropdown and shows help text when multi integrations are not supported', () => {
- createComponent({ props: { canAddIntegration: false } });
+ it('disables the dropdown and shows help text when multi integrations are not supported', async () => {
+ await createComponent({ props: { canAddIntegration: false } });
+
expect(findSelect().attributes('disabled')).toBeDefined();
expect(findMultiSupportText().exists()).toBe(true);
});
it('hides the name input when the selected value is prometheus', async () => {
- createComponent();
+ await createComponent();
await selectOptionAtIndex(2);
expect(findFormFields()).toHaveLength(0);
});
- it('verify pricing link url', () => {
- createComponent({ props: { canAddIntegration: false } });
+ it('verify pricing link url', async () => {
+ await createComponent({ props: { canAddIntegration: false } });
+
const link = findMultiSupportText().findComponent(GlLink);
expect(link.attributes('href')).toMatch(/https:\/\/about.gitlab.(com|cn)\/pricing/);
});
@@ -118,24 +142,19 @@ describe('AlertsSettingsForm', () => {
expect(findTabs()).toHaveLength(3);
});
- it('only first tab is enabled on integration create', () => {
- createComponent({
- data: {
- currentIntegration: null,
- },
- });
+ it('only first tab is enabled on integration create', async () => {
+ await createComponent();
+
const tabs = findTabs();
expect(tabs.at(0).find('[role="tabpanel"]').classes('disabled')).toBe(false);
expect(tabs.at(1).find('[role="tabpanel"]').classes('disabled')).toBe(true);
expect(tabs.at(2).find('[role="tabpanel"]').classes('disabled')).toBe(true);
});
- it('all tabs are enabled on integration edit', () => {
- createComponent({
- data: {
- currentIntegration: { id: 1 },
- },
- });
+ it('all tabs are enabled on integration edit', async () => {
+ const currentIntegration = { id: 1 };
+ await createComponent({ currentIntegration });
+
const tabs = findTabs();
expect(tabs.at(0).find('[role="tabpanel"]').classes('disabled')).toBe(false);
expect(tabs.at(1).find('[role="tabpanel"]').classes('disabled')).toBe(false);
@@ -147,10 +166,7 @@ describe('AlertsSettingsForm', () => {
describe('submitting integration form', () => {
describe('HTTP', () => {
it('create with custom mapping', async () => {
- createComponent({
- multiIntegrations: true,
- props: { alertFields },
- });
+ await createComponent({ props: { alertFields } });
const integrationName = 'Test integration';
await selectOptionAtIndex(1);
@@ -172,25 +188,23 @@ describe('AlertsSettingsForm', () => {
});
});
- it('update', () => {
- createComponent({
- data: {
- integrationForm: { id: '1', name: 'Test integration pre', type: typeSet.http },
- currentIntegration: { id: '1' },
- },
- props: {
- loading: false,
- },
- });
+ it('update', async () => {
+ const currentIntegration = {
+ id: '1',
+ name: 'Test integration pre',
+ type: typeSet.http,
+ };
+ await createComponent({ currentIntegration });
const updatedIntegrationName = 'Test integration post';
enableIntegration(0, updatedIntegrationName);
- const submitBtn = findSubmitButton();
- expect(submitBtn.exists()).toBe(true);
- expect(submitBtn.text()).toBe('Save integration');
+ expect(findSubmitButton().exists()).toBe(true);
+ expect(findSubmitButton().text()).toBe('Save integration');
+
+ await nextTick();
+ await findSubmitButton().trigger('click');
- submitBtn.trigger('click');
expect(wrapper.emitted('update-integration')[0][0]).toMatchObject({
type: typeSet.http,
variables: {
@@ -205,13 +219,12 @@ describe('AlertsSettingsForm', () => {
describe('PROMETHEUS', () => {
it('create', async () => {
- createComponent();
+ await createComponent();
await selectOptionAtIndex(2);
enableIntegration(0);
- const submitBtn = findSubmitButton();
- expect(submitBtn.exists()).toBe(true);
- expect(submitBtn.text()).toBe('Save integration');
+ expect(findSubmitButton().exists()).toBe(true);
+ expect(findSubmitButton().text()).toBe('Save integration');
findForm().trigger('submit');
@@ -221,22 +234,17 @@ describe('AlertsSettingsForm', () => {
});
});
- it('update', () => {
- createComponent({
- data: {
- integrationForm: { id: '1', type: typeSet.prometheus },
- currentIntegration: { id: '1' },
- },
- props: {
- loading: false,
- },
- });
+ it('update', async () => {
+ const currentIntegration = {
+ id: '1',
+ type: typeSet.prometheus,
+ };
+ await createComponent({ currentIntegration });
enableIntegration(0);
- const submitBtn = findSubmitButton();
- expect(submitBtn.exists()).toBe(true);
- expect(submitBtn.text()).toBe('Save integration');
+ expect(findSubmitButton().exists()).toBe(true);
+ expect(findSubmitButton().text()).toBe('Save integration');
findForm().trigger('submit');
@@ -249,16 +257,9 @@ describe('AlertsSettingsForm', () => {
});
describe('submitting the integration with a JSON test payload', () => {
- beforeEach(() => {
- createComponent({
- data: {
- currentIntegration: { id: '1', name: 'Test' },
- active: true,
- },
- props: {
- loading: false,
- },
- });
+ beforeEach(async () => {
+ const currentIntegration = { id: '1', name: 'Test' };
+ await createComponent({ currentIntegration });
});
it('should not allow a user to test invalid JSON', async () => {
@@ -285,17 +286,18 @@ describe('AlertsSettingsForm', () => {
describe('Test payload section for HTTP integration', () => {
const validSamplePayload = JSON.stringify(alertFields);
const emptySamplePayload = '{}';
- beforeEach(() => {
- createComponent({
- multiIntegrations: true,
- data: {
- integrationForm: { type: typeSet.http },
- currentIntegration: {
- payloadExample: emptySamplePayload,
- },
- active: false,
- resetPayloadAndMappingConfirmed: false,
- },
+
+ beforeEach(async () => {
+ const currentIntegration = {
+ id: '1',
+ name: 'Test',
+ type: typeSet.http,
+ payloadExample: emptySamplePayload,
+ payloadAttributeMappings: [],
+ };
+
+ await createComponent({
+ currentIntegration,
props: { alertFields },
});
});
@@ -314,14 +316,25 @@ describe('AlertsSettingsForm', () => {
const validPayloadMsg = payload === emptySamplePayload ? 'not valid' : 'valid';
it(`textarea should be ${enabledState} when payload reset ${payloadResetMsg} and payload is ${validPayloadMsg}`, async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- currentIntegration: { payloadExample: payload },
- resetPayloadAndMappingConfirmed,
+ const currentIntegration = {
+ id: '1',
+ name: 'Test',
+ type: typeSet.http,
+ payloadExample: payload,
+ payloadAttributeMappings: [],
+ };
+
+ await createComponent({
+ currentIntegration,
+ props: { alertFields },
});
+ if (resetPayloadAndMappingConfirmed) {
+ findResetPayloadModal().vm.$emit('ok');
+ }
+
await nextTick();
+
expect(
findSamplePayloadSection().findComponent(GlFormTextarea).attributes('disabled'),
).toBe(disabled);
@@ -342,14 +355,21 @@ describe('AlertsSettingsForm', () => {
: 'was not confirmed';
it(`shows ${caption} button when sample payload ${samplePayloadMsg} and payload reset ${payloadResetMsg}`, async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- currentIntegration: {
- payloadExample,
- },
- resetPayloadAndMappingConfirmed,
+ const currentIntegration = {
+ type: typeSet.http,
+ payloadExample,
+ payloadAttributeMappings: [],
+ };
+
+ await createComponent({
+ currentIntegration,
+ props: { alertFields },
});
+
+ if (resetPayloadAndMappingConfirmed) {
+ findResetPayloadModal().vm.$emit('ok');
+ }
+
await nextTick();
expect(findActionBtn().text()).toBe(caption);
});
@@ -357,14 +377,6 @@ describe('AlertsSettingsForm', () => {
});
describe('Parsing payload', () => {
- beforeEach(() => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- resetPayloadAndMappingConfirmed: true,
- });
- });
-
it('displays a toast message on successful parse', async () => {
jest.spyOn(wrapper.vm.$apollo, 'query').mockResolvedValue({
data: {
@@ -408,7 +420,7 @@ describe('AlertsSettingsForm', () => {
const multiIntegrationsEnabled = multiIntegrations ? 'enabled' : 'not enabled';
it(`is ${visibleMsg} when multiIntegrations are ${multiIntegrationsEnabled}, integration type is ${integrationType} and alert fields are ${alertFieldsMsg}`, async () => {
- createComponent({
+ await createComponent({
multiIntegrations,
props: {
alertFields: alertFieldsProvided ? alertFields : [],
@@ -423,8 +435,8 @@ describe('AlertsSettingsForm', () => {
});
describe('Form validation', () => {
- beforeEach(() => {
- createComponent();
+ beforeEach(async () => {
+ await createComponent();
});
it('should not be able to submit when no integration type is selected', async () => {
@@ -452,39 +464,29 @@ describe('AlertsSettingsForm', () => {
});
it('should be able to submit when form is dirty', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- currentIntegration: { type: typeSet.http, name: 'Existing integration' },
- });
- await nextTick();
- await findFormFields().at(0).vm.$emit('input', 'Updated name');
+ const currentIntegration = { type: typeSet.http, name: 'Existing integration' };
+ await createComponent({ currentIntegration });
+ await findFormFields().at(0).vm.$emit('input', 'Updated name');
expect(findSubmitButton().attributes('disabled')).toBe(undefined);
});
it('should not be able to submit when form is pristine', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- currentIntegration: { type: typeSet.http, name: 'Existing integration' },
- });
- await nextTick();
-
+ const currentIntegration = { type: typeSet.http, name: 'Existing integration' };
+ await createComponent({ currentIntegration });
expect(findSubmitButton().attributes('disabled')).toBeDefined();
});
it('should disable submit button after click on validation failure', async () => {
await selectOptionAtIndex(1);
- findSubmitButton().trigger('click');
- await nextTick();
+ await findSubmitButton().trigger('click');
expect(findSubmitButton().attributes('disabled')).toBeDefined();
});
it('should scroll to invalid field on validation failure', async () => {
await selectOptionAtIndex(1);
- findSubmitButton().trigger('click');
+ await findSubmitButton().trigger('click');
expect(scrollIntoViewMock).toHaveBeenCalledWith({ behavior: 'smooth', block: 'center' });
});
diff --git a/spec/frontend/behaviors/secret_values_spec.js b/spec/frontend/behaviors/secret_values_spec.js
deleted file mode 100644
index 06155017dd1..00000000000
--- a/spec/frontend/behaviors/secret_values_spec.js
+++ /dev/null
@@ -1,230 +0,0 @@
-import SecretValues from '~/behaviors/secret_values';
-
-function generateValueMarkup(
- secret,
- valueClass = 'js-secret-value',
- placeholderClass = 'js-secret-value-placeholder',
-) {
- return `
- <div class="${placeholderClass}">
- ***
- </div>
- <div class="hidden ${valueClass}">
- ${secret}
- </div>
- `;
-}
-
-function generateFixtureMarkup(secrets, isRevealed, valueClass, placeholderClass) {
- return `
- <div class="js-secret-container">
- ${secrets.map((secret) => generateValueMarkup(secret, valueClass, placeholderClass)).join('')}
- <button
- class="js-secret-value-reveal-button"
- data-secret-reveal-status="${isRevealed}"
- >
- ...
- </button>
- </div>
- `;
-}
-
-function setupSecretFixture(
- secrets,
- isRevealed,
- valueClass = 'js-secret-value',
- placeholderClass = 'js-secret-value-placeholder',
-) {
- const wrapper = document.createElement('div');
- wrapper.innerHTML = generateFixtureMarkup(secrets, isRevealed, valueClass, placeholderClass);
-
- const secretValues = new SecretValues({
- container: wrapper.querySelector('.js-secret-container'),
- valueSelector: `.${valueClass}`,
- placeholderSelector: `.${placeholderClass}`,
- });
- secretValues.init();
-
- return wrapper;
-}
-
-describe('setupSecretValues', () => {
- describe('with a single secret', () => {
- const secrets = ['mysecret123'];
-
- it('should have correct "Reveal" label when values are hidden', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
-
- expect(revealButton.textContent).toEqual('Reveal value');
- });
-
- it('should have correct "Hide" label when values are shown', () => {
- const wrapper = setupSecretFixture(secrets, true);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
-
- expect(revealButton.textContent).toEqual('Hide value');
- });
-
- it('should have value hidden initially', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const values = wrapper.querySelectorAll('.js-secret-value');
- const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
-
- expect(values.length).toEqual(1);
- expect(values[0].classList.contains('hide')).toEqual(true);
- expect(placeholders.length).toEqual(1);
- expect(placeholders[0].classList.contains('hide')).toEqual(false);
- });
-
- it('should toggle value and placeholder', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
- const values = wrapper.querySelectorAll('.js-secret-value');
- const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
-
- revealButton.click();
-
- expect(values.length).toEqual(1);
- expect(values[0].classList.contains('hide')).toEqual(false);
- expect(placeholders.length).toEqual(1);
- expect(placeholders[0].classList.contains('hide')).toEqual(true);
-
- revealButton.click();
-
- expect(values.length).toEqual(1);
- expect(values[0].classList.contains('hide')).toEqual(true);
- expect(placeholders.length).toEqual(1);
- expect(placeholders[0].classList.contains('hide')).toEqual(false);
- });
- });
-
- describe('with a multiple secrets', () => {
- const secrets = ['mysecret123', 'happygoat456', 'tanuki789'];
-
- it('should have correct "Reveal" label when values are hidden', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
-
- expect(revealButton.textContent).toEqual('Reveal values');
- });
-
- it('should have correct "Hide" label when values are shown', () => {
- const wrapper = setupSecretFixture(secrets, true);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
-
- expect(revealButton.textContent).toEqual('Hide values');
- });
-
- it('should have all values hidden initially', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const values = wrapper.querySelectorAll('.js-secret-value');
- const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
-
- expect(values.length).toEqual(3);
- values.forEach((value) => {
- expect(value.classList.contains('hide')).toEqual(true);
- });
-
- expect(placeholders.length).toEqual(3);
- placeholders.forEach((placeholder) => {
- expect(placeholder.classList.contains('hide')).toEqual(false);
- });
- });
-
- it('should toggle values and placeholders', () => {
- const wrapper = setupSecretFixture(secrets, false);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
- const values = wrapper.querySelectorAll('.js-secret-value');
- const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
-
- revealButton.click();
-
- expect(values.length).toEqual(3);
- values.forEach((value) => {
- expect(value.classList.contains('hide')).toEqual(false);
- });
-
- expect(placeholders.length).toEqual(3);
- placeholders.forEach((placeholder) => {
- expect(placeholder.classList.contains('hide')).toEqual(true);
- });
-
- revealButton.click();
-
- expect(values.length).toEqual(3);
- values.forEach((value) => {
- expect(value.classList.contains('hide')).toEqual(true);
- });
-
- expect(placeholders.length).toEqual(3);
- placeholders.forEach((placeholder) => {
- expect(placeholder.classList.contains('hide')).toEqual(false);
- });
- });
- });
-
- describe('with dynamic secrets', () => {
- const secrets = ['mysecret123', 'happygoat456', 'tanuki789'];
-
- it('should toggle values and placeholders', () => {
- const wrapper = setupSecretFixture(secrets, false);
- // Insert the new dynamic row
- wrapper
- .querySelector('.js-secret-container')
- .insertAdjacentHTML('afterbegin', generateValueMarkup('foobarbazdynamic'));
-
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
- const values = wrapper.querySelectorAll('.js-secret-value');
- const placeholders = wrapper.querySelectorAll('.js-secret-value-placeholder');
-
- revealButton.click();
-
- expect(values.length).toEqual(4);
- values.forEach((value) => {
- expect(value.classList.contains('hide')).toEqual(false);
- });
-
- expect(placeholders.length).toEqual(4);
- placeholders.forEach((placeholder) => {
- expect(placeholder.classList.contains('hide')).toEqual(true);
- });
-
- revealButton.click();
-
- expect(values.length).toEqual(4);
- values.forEach((value) => {
- expect(value.classList.contains('hide')).toEqual(true);
- });
-
- expect(placeholders.length).toEqual(4);
- placeholders.forEach((placeholder) => {
- expect(placeholder.classList.contains('hide')).toEqual(false);
- });
- });
- });
-
- describe('selector options', () => {
- const secrets = ['mysecret123'];
-
- it('should respect `valueSelector` and `placeholderSelector` options', () => {
- const valueClass = 'js-some-custom-placeholder-selector';
- const placeholderClass = 'js-some-custom-value-selector';
-
- const wrapper = setupSecretFixture(secrets, false, valueClass, placeholderClass);
- const values = wrapper.querySelectorAll(`.${valueClass}`);
- const placeholders = wrapper.querySelectorAll(`.${placeholderClass}`);
- const revealButton = wrapper.querySelector('.js-secret-value-reveal-button');
-
- expect(values.length).toEqual(1);
- expect(placeholders.length).toEqual(1);
-
- revealButton.click();
-
- expect(values.length).toEqual(1);
- expect(values[0].classList.contains('hide')).toEqual(false);
- expect(placeholders.length).toEqual(1);
- expect(placeholders[0].classList.contains('hide')).toEqual(true);
- });
- });
-});
diff --git a/spec/frontend/blob/openapi/index_spec.js b/spec/frontend/blob/openapi/index_spec.js
index c96a021550d..fe98f46d013 100644
--- a/spec/frontend/blob/openapi/index_spec.js
+++ b/spec/frontend/blob/openapi/index_spec.js
@@ -1,24 +1,25 @@
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
+import SwaggerClient from 'swagger-client';
import { TEST_HOST } from 'helpers/test_constants';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import renderOpenApi from '~/blob/openapi';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import setWindowLocation from 'helpers/set_window_location_helper';
describe('OpenAPI blob viewer', () => {
const id = 'js-openapi-viewer';
const mockEndpoint = 'some/endpoint';
- let mock;
beforeEach(() => {
+ jest.spyOn(SwaggerClient, 'resolve').mockReturnValue(Promise.resolve({ spec: 'some spec' }));
setHTMLFixture(`<div id="${id}" data-endpoint="${mockEndpoint}"></div>`);
- mock = new MockAdapter(axios).onGet().reply(HTTP_STATUS_OK);
});
afterEach(() => {
resetHTMLFixture();
- mock.restore();
+ });
+
+ it('bundles the spec file', async () => {
+ await renderOpenApi();
+ expect(SwaggerClient.resolve).toHaveBeenCalledWith({ url: mockEndpoint });
});
describe('without config options', () => {
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index e3afd2dec2f..1ee4a7353ce 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -64,7 +64,6 @@ export default function createComponent({
disabled: false,
boardType: 'group',
issuableType: 'issue',
- isApolloBoard: true,
...provide,
},
stubs,
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 8d59cb2692e..ad5804f6eb7 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -5,6 +5,7 @@ import { DraggableItemTypes, ListType } from 'ee_else_ce/boards/constants';
import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import waitForPromises from 'helpers/wait_for_promises';
import createComponent from 'jest/boards/board_list_helper';
+import { ESC_KEY_CODE } from '~/lib/utils/keycodes';
import BoardCard from '~/boards/components/board_card.vue';
import eventHub from '~/boards/eventhub';
import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_position.vue';
@@ -203,9 +204,38 @@ describe('Board list component', () => {
expect(document.body.classList.contains('is-dragging')).toBe(true);
});
+
+ it('attaches `keyup` event listener on document', async () => {
+ jest.spyOn(document, 'addEventListener');
+ findDraggable().vm.$emit('start', {
+ item: {
+ dataset: {
+ draggableItemType: DraggableItemTypes.card,
+ },
+ },
+ });
+ await nextTick();
+
+ expect(document.addEventListener).toHaveBeenCalledWith('keyup', expect.any(Function));
+ });
});
describe('handleDragOnEnd', () => {
+ const getDragEndParam = (draggableItemType) => ({
+ oldIndex: 1,
+ newIndex: 0,
+ item: {
+ dataset: {
+ draggableItemType,
+ itemId: mockIssues[0].id,
+ itemIid: mockIssues[0].iid,
+ itemPath: mockIssues[0].referencePath,
+ },
+ },
+ to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
+ from: { dataset: { listId: 'gid://gitlab/List/2' } },
+ });
+
beforeEach(() => {
startDrag();
});
@@ -213,42 +243,39 @@ describe('Board list component', () => {
it('removes class `is-dragging` from document body', () => {
document.body.classList.add('is-dragging');
- endDrag({
- oldIndex: 1,
- newIndex: 0,
- item: {
- dataset: {
- draggableItemType: DraggableItemTypes.card,
- itemId: mockIssues[0].id,
- itemIid: mockIssues[0].iid,
- itemPath: mockIssues[0].referencePath,
- },
- },
- to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
- from: { dataset: { listId: 'gid://gitlab/List/2' } },
- });
+ endDrag(getDragEndParam(DraggableItemTypes.card));
expect(document.body.classList.contains('is-dragging')).toBe(false);
});
it(`should not handle the event if the dragged item is not a "${DraggableItemTypes.card}"`, () => {
- endDrag({
- oldIndex: 1,
- newIndex: 0,
- item: {
- dataset: {
- draggableItemType: DraggableItemTypes.list,
- itemId: mockIssues[0].id,
- itemIid: mockIssues[0].iid,
- itemPath: mockIssues[0].referencePath,
- },
- },
- to: { children: [], dataset: { listId: 'gid://gitlab/List/1' } },
- from: { dataset: { listId: 'gid://gitlab/List/2' } },
- });
+ endDrag(getDragEndParam(DraggableItemTypes.list));
expect(document.body.classList.contains('is-dragging')).toBe(true);
});
+
+ it('detaches `keyup` event listener on document', async () => {
+ jest.spyOn(document, 'removeEventListener');
+
+ findDraggable().vm.$emit('end', getDragEndParam(DraggableItemTypes.card));
+ await nextTick();
+
+ expect(document.removeEventListener).toHaveBeenCalledWith('keyup', expect.any(Function));
+ });
+ });
+
+ describe('handleKeyUp', () => {
+ it('dispatches `mouseup` event when Escape key is pressed', () => {
+ jest.spyOn(document, 'dispatchEvent');
+
+ document.dispatchEvent(
+ new Event('keyup', {
+ keyCode: ESC_KEY_CODE,
+ }),
+ );
+
+ expect(document.dispatchEvent).toHaveBeenCalledWith(new Event('mouseup'));
+ });
});
});
diff --git a/spec/frontend/boards/components/board_add_new_column_form_spec.js b/spec/frontend/boards/components/board_add_new_column_form_spec.js
index 719e36629c2..406ce007088 100644
--- a/spec/frontend/boards/components/board_add_new_column_form_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_form_spec.js
@@ -1,37 +1,17 @@
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
-import defaultState from '~/boards/stores/state';
import { mockLabelList } from '../mock_data';
-Vue.use(Vuex);
-
describe('BoardAddNewColumnForm', () => {
let wrapper;
- const createStore = ({ actions = {}, getters = {}, state = {} } = {}) => {
- return new Vuex.Store({
- state: {
- ...defaultState,
- ...state,
- },
- actions,
- getters,
- });
- };
-
- const mountComponent = ({ searchLabel = '', selectedIdValid = true, actions, slots } = {}) => {
+ const mountComponent = ({ searchLabel = '', selectedIdValid = true, slots } = {}) => {
wrapper = shallowMountExtended(BoardAddNewColumnForm, {
propsData: {
searchLabel,
selectedIdValid,
},
slots,
- store: createStore({
- actions,
- }),
});
};
diff --git a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
index 396ec7d67cd..f536a1e6c64 100644
--- a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
@@ -1,14 +1,8 @@
import { GlButton } from '@gitlab/ui';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnTrigger from '~/boards/components/board_add_new_column_trigger.vue';
-import { createStore } from '~/boards/stores';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-Vue.use(Vuex);
-
describe('BoardAddNewColumnTrigger', () => {
let wrapper;
@@ -24,7 +18,6 @@ describe('BoardAddNewColumnTrigger', () => {
propsData: {
isNewListShowing,
},
- store: createStore(),
});
};
diff --git a/spec/frontend/boards/components/board_app_spec.js b/spec/frontend/boards/components/board_app_spec.js
index 157c76b4fff..9452e3e10c9 100644
--- a/spec/frontend/boards/components/board_app_spec.js
+++ b/spec/frontend/boards/components/board_app_spec.js
@@ -47,7 +47,7 @@ describe('BoardApp', () => {
beforeEach(async () => {
cacheUpdates.setError = jest.fn();
- createComponent({ isApolloBoard: true });
+ createComponent();
await nextTick();
});
@@ -60,7 +60,7 @@ describe('BoardApp', () => {
});
it('should not have is-compact class when no card is selected', async () => {
- createComponent({ isApolloBoard: true, issue: {} });
+ createComponent({ issue: {} });
await nextTick();
expect(wrapper.classes()).not.toContain('is-compact');
@@ -69,14 +69,14 @@ describe('BoardApp', () => {
it('refetches lists when updateBoard event is received', async () => {
jest.spyOn(eventHub, '$on').mockImplementation(() => {});
- createComponent({ isApolloBoard: true });
+ createComponent();
await waitForPromises();
expect(eventHub.$on).toHaveBeenCalledWith('updateBoard', wrapper.vm.refetchLists);
});
it('sets error on fetch lists failure', async () => {
- createComponent({ isApolloBoard: true, handler: boardListQueryHandlerFailure });
+ createComponent({ handler: boardListQueryHandlerFailure });
await waitForPromises();
diff --git a/spec/frontend/boards/components/board_card_move_to_position_spec.js b/spec/frontend/boards/components/board_card_move_to_position_spec.js
index d3c43a4e054..27cb575c067 100644
--- a/spec/frontend/boards/components/board_card_move_to_position_spec.js
+++ b/spec/frontend/boards/components/board_card_move_to_position_spec.js
@@ -1,7 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
import { GlDisclosureDropdown, GlDisclosureDropdownItem } from '@gitlab/ui';
import {
BOARD_CARD_MOVE_TO_POSITIONS_START_OPTION,
@@ -11,8 +8,6 @@ import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_posi
import { mockList, mockIssue2 } from 'jest/boards/mock_data';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-Vue.use(Vuex);
-
const dropdownOptions = [
{
text: BOARD_CARD_MOVE_TO_POSITIONS_START_OPTION,
@@ -27,15 +22,10 @@ const dropdownOptions = [
describe('Board Card Move to position', () => {
let wrapper;
let trackingSpy;
- let store;
const itemIndex = 1;
- const createComponent = (propsData, isApolloBoard = false) => {
+ const createComponent = (propsData) => {
wrapper = shallowMount(BoardCardMoveToPosition, {
- store,
- provide: {
- isApolloBoard,
- },
propsData: {
item: mockIssue2,
list: mockList,
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index dae0db27104..1781c58c11f 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -76,7 +76,6 @@ describe('Board card', () => {
isGroupBoard: true,
disabled: false,
allowSubEpics: false,
- isApolloBoard: true,
...provide,
},
});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 706f84ad319..3b02a33bf7d 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -147,7 +147,7 @@ describe('BoardContent', () => {
describe('when error is passed', () => {
beforeEach(async () => {
- createComponent({ props: { apolloError: 'Error' } });
+ createComponent({ props: { error: 'Error' } });
await waitForPromises();
});
diff --git a/spec/frontend/boards/components/board_top_bar_spec.js b/spec/frontend/boards/components/board_top_bar_spec.js
index 03526600114..477c504ecba 100644
--- a/spec/frontend/boards/components/board_top_bar_spec.js
+++ b/spec/frontend/boards/components/board_top_bar_spec.js
@@ -56,7 +56,6 @@ describe('BoardTopBar', () => {
isIssueBoard: true,
isEpicBoard: false,
isGroupBoard: true,
- // isApolloBoard: false,
...provide,
},
stubs: { IssueBoardFilteredSearch },
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index 8766b1c25f2..db5243732c6 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -94,7 +94,6 @@ describe('BoardsSelector', () => {
boardType: isGroupBoard ? 'group' : 'project',
isGroupBoard,
isProjectBoard,
- // isApolloBoard: false,
...provide,
},
});
diff --git a/spec/frontend/boards/components/config_toggle_spec.js b/spec/frontend/boards/components/config_toggle_spec.js
index 3d505038331..915dafc8a89 100644
--- a/spec/frontend/boards/components/config_toggle_spec.js
+++ b/spec/frontend/boards/components/config_toggle_spec.js
@@ -1,22 +1,15 @@
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import { __ } from '~/locale';
import ConfigToggle from '~/boards/components/config_toggle.vue';
import eventHub from '~/boards/eventhub';
-import store from '~/boards/stores';
import { mockTracking } from 'helpers/tracking_helper';
describe('ConfigToggle', () => {
let wrapper;
- Vue.use(Vuex);
-
const createComponent = (provide = {}, props = {}) =>
shallowMount(ConfigToggle, {
- store,
provide: {
canAdminList: true,
...provide,
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 3a5e108ac07..c2587b17409 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -1,6 +1,5 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { keyBy } from 'lodash';
-import { ListType } from '~/boards/constants';
import {
OPERATORS_IS,
OPERATORS_IS_NOT,
@@ -70,19 +69,6 @@ export const mockGroupBoardResponse = {
},
};
-export const mockBoardConfig = {
- milestoneId: 'gid://gitlab/Milestone/114',
- milestoneTitle: '14.9',
- iterationId: 'gid://gitlab/Iteration/124',
- iterationTitle: 'Iteration 9',
- iterationCadenceId: 'gid://gitlab/Iteration::Cadence/134',
- assigneeId: 'gid://gitlab/User/1',
- assigneeUsername: 'admin',
- labels: [{ id: 'gid://gitlab/Label/32', title: 'Deliverable' }],
- labelIds: ['gid://gitlab/Label/32'],
- weight: 2,
-};
-
export const boardObj = {
id: 1,
name: 'test',
@@ -238,17 +224,6 @@ export const mockMilestone = {
due_date: '2019-12-31',
};
-export const mockMilestones = [
- {
- id: 'gid://gitlab/Milestone/1',
- title: 'Milestone 1',
- },
- {
- id: 'gid://gitlab/Milestone/2',
- title: 'Milestone 2',
- },
-];
-
export const assignees = [
{
id: 'gid://gitlab/User/2',
@@ -405,14 +380,6 @@ export const mockEpic = {
},
};
-export const mockActiveIssue = {
- ...mockIssue,
- id: 'gid://gitlab/Issue/436',
- iid: '27',
- subscribed: false,
- emailsDisabled: false,
-};
-
export const mockIssue2 = {
...rawIssue,
id: 'gid://gitlab/Issue/437',
@@ -588,11 +555,6 @@ export const mockLists = [mockList, mockLabelList];
export const mockListsById = keyBy(mockLists, 'id');
-export const mockIssuesByListId = {
- 'gid://gitlab/List/1': [mockIssue.id, mockIssue3.id, mockIssue4.id],
- 'gid://gitlab/List/2': mockIssues.map(({ id }) => id),
-};
-
export const participants = [
{
id: '1',
@@ -633,21 +595,8 @@ export const mockGroupProject2 = {
archived: false,
};
-export const mockArchivedGroupProject = {
- id: 2,
- name: 'Archived Project',
- nameWithNamespace: 'Awesome Group / Archived Project',
- fullPath: 'awesome-group/archived-project',
- archived: true,
-};
-
export const mockGroupProjects = [mockGroupProject1, mockGroupProject2];
-export const mockActiveGroupProjects = [
- { ...mockGroupProject1, archived: false },
- { ...mockGroupProject2, archived: false },
-];
-
export const mockIssueGroupPath = 'gitlab-org';
export const mockIssueProjectPath = `${mockIssueGroupPath}/gitlab-test`;
@@ -778,33 +727,6 @@ export const mockMoveIssueParams = {
moveAfterId: undefined,
};
-export const mockMoveState = {
- boardLists: {
- 'gid://gitlab/List/1': {
- listType: ListType.backlog,
- },
- 'gid://gitlab/List/2': {
- listType: ListType.closed,
- },
- },
- boardItems: {
- [mockMoveIssueParams.itemId]: { foo: 'bar' },
- },
- boardItemsByListId: {
- [mockMoveIssueParams.fromListId]: [mockMoveIssueParams.itemId],
- [mockMoveIssueParams.toListId]: [],
- },
-};
-
-export const mockMoveData = {
- reordering: false,
- shouldClone: false,
- itemNotInToList: true,
- originalIndex: 0,
- originalIssue: { foo: 'bar' },
- ...mockMoveIssueParams,
-};
-
export const mockEmojiToken = {
type: TOKEN_TYPE_MY_REACTION,
icon: 'thumb-up',
diff --git a/spec/frontend/boards/project_select_spec.js b/spec/frontend/boards/project_select_spec.js
index f1daccfadda..6d2db10d7b8 100644
--- a/spec/frontend/boards/project_select_spec.js
+++ b/spec/frontend/boards/project_select_spec.js
@@ -69,6 +69,12 @@ describe('ProjectSelect component', () => {
expect(findGlCollapsibleListBox().exists()).toBe(true);
expect(findGlCollapsibleListBox().text()).toContain('Select a project');
});
+
+ it('passes down non archived projects to dropdown', async () => {
+ findGlCollapsibleListBox().vm.$emit('shown');
+ await nextTick();
+ expect(findGlCollapsibleListBox().props('items').length).toEqual(mockProjects.length - 1);
+ });
});
describe('when dropdown menu is open', () => {
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
deleted file mode 100644
index 616bb083211..00000000000
--- a/spec/frontend/boards/stores/actions_spec.js
+++ /dev/null
@@ -1,2098 +0,0 @@
-import { cloneDeep } from 'lodash';
-import Vue from 'vue';
-// eslint-disable-next-line no-restricted-imports
-import Vuex from 'vuex';
-import * as Sentry from '~/sentry/sentry_browser_wrapper';
-import { inactiveId, ISSUABLE, ListType, DraggableItemTypes } from 'ee_else_ce/boards/constants';
-import issueMoveListMutation from 'ee_else_ce/boards/graphql/issue_move_list.mutation.graphql';
-import testAction from 'helpers/vuex_action_helper';
-import {
- formatListIssues,
- formatBoardLists,
- formatIssueInput,
- formatIssue,
- getMoveData,
- updateListPosition,
-} from 'ee_else_ce/boards/boards_util';
-import { defaultClient as gqlClient } from '~/graphql_shared/issuable_client';
-import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
-import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
-import actions from '~/boards/stores/actions';
-import * as types from '~/boards/stores/mutation_types';
-import mutations from '~/boards/stores/mutations';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { TYPE_ISSUE, WORKSPACE_GROUP, WORKSPACE_PROJECT } from '~/issues/constants';
-
-import projectBoardMilestones from '~/boards/graphql/project_board_milestones.query.graphql';
-import groupBoardMilestones from '~/boards/graphql/group_board_milestones.query.graphql';
-import {
- mockBoard,
- mockBoardConfig,
- mockLists,
- mockListsById,
- mockIssue,
- mockIssue2,
- rawIssue,
- mockIssues,
- labels,
- mockActiveIssue,
- mockGroupProjects,
- mockMoveIssueParams,
- mockMoveState,
- mockMoveData,
- mockList,
- mockMilestones,
-} from '../mock_data';
-
-jest.mock('~/alert');
-
-// We need this helper to make sure projectPath is including
-// subgroups when the movIssue action is called.
-const getProjectPath = (path) => path.split('#')[0];
-
-Vue.use(Vuex);
-
-beforeEach(() => {
- window.gon = { features: {} };
-});
-
-describe('fetchBoard', () => {
- const payload = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'project',
- };
-
- const queryResponse = {
- data: {
- workspace: {
- board: mockBoard,
- },
- },
- };
-
- it('should commit mutation REQUEST_CURRENT_BOARD and dispatch setBoard on success', async () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- await testAction({
- action: actions.fetchBoard,
- payload,
- expectedMutations: [
- {
- type: types.REQUEST_CURRENT_BOARD,
- },
- ],
- expectedActions: [{ type: 'setBoard', payload: mockBoard }],
- });
- });
-
- it('should commit mutation RECEIVE_BOARD_FAILURE on failure', async () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
-
- await testAction({
- action: actions.fetchBoard,
- payload,
- expectedMutations: [
- {
- type: types.REQUEST_CURRENT_BOARD,
- },
- {
- type: types.RECEIVE_BOARD_FAILURE,
- },
- ],
- });
- });
-});
-
-describe('setInitialBoardData', () => {
- it('sets data object', () => {
- const mockData = {
- foo: 'bar',
- bar: 'baz',
- };
-
- return testAction({
- action: actions.setInitialBoardData,
- payload: mockData,
- expectedMutations: [{ type: types.SET_INITIAL_BOARD_DATA, payload: mockData }],
- });
- });
-});
-
-describe('setBoardConfig', () => {
- it('sets board config object from board object', () => {
- return testAction({
- action: actions.setBoardConfig,
- payload: mockBoard,
- expectedMutations: [{ type: types.SET_BOARD_CONFIG, payload: mockBoardConfig }],
- });
- });
-});
-
-describe('setBoard', () => {
- it('dispatches setBoardConfig', () => {
- return testAction({
- action: actions.setBoard,
- payload: mockBoard,
- expectedMutations: [{ type: types.RECEIVE_BOARD_SUCCESS, payload: mockBoard }],
- expectedActions: [
- { type: 'setBoardConfig', payload: mockBoard },
- { type: 'performSearch', payload: { resetLists: true } },
- ],
- });
- });
-});
-
-describe('setFilters', () => {
- it.each([
- [
- 'with correct filters as payload',
- {
- filters: { labelName: 'label', foobar: 'not-a-filter', search: 'quick brown fox' },
- filterVariables: { labelName: 'label', search: 'quick brown fox', not: {} },
- },
- ],
- [
- "and use 'assigneeWildcardId' as filter variable for 'assigneeId' param",
- {
- filters: { assigneeId: 'None' },
- filterVariables: { assigneeWildcardId: 'NONE', not: {} },
- },
- ],
- ])('should commit mutation SET_FILTERS %s', (_, { filters, filterVariables }) => {
- const state = {
- filters: {},
- issuableType: TYPE_ISSUE,
- };
-
- return testAction(
- actions.setFilters,
- filters,
- state,
- [{ type: types.SET_FILTERS, payload: filterVariables }],
- [],
- );
- });
-});
-
-describe('performSearch', () => {
- it('should dispatch setFilters, fetchLists and resetIssues action', () => {
- return testAction(
- actions.performSearch,
- {},
- {},
- [],
- [
- { type: 'setFilters', payload: {} },
- { type: 'fetchLists', payload: { resetLists: false } },
- { type: 'resetIssues' },
- ],
- );
- });
-});
-
-describe('setActiveId', () => {
- it('should commit mutation SET_ACTIVE_ID', () => {
- const state = {
- activeId: inactiveId,
- };
-
- return testAction(
- actions.setActiveId,
- { id: 1, sidebarType: 'something' },
- state,
- [{ type: types.SET_ACTIVE_ID, payload: { id: 1, sidebarType: 'something' } }],
- [],
- );
- });
-});
-
-describe('fetchLists', () => {
- let state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- filterParams: {},
- boardType: 'group',
- issuableType: 'issue',
- };
-
- let queryResponse = {
- data: {
- group: {
- board: {
- hideBacklogList: true,
- lists: {
- nodes: [mockLists[1]],
- },
- },
- },
- },
- };
-
- const formattedLists = formatBoardLists(queryResponse.data.group.board.lists);
-
- it('should commit mutations RECEIVE_BOARD_LISTS_SUCCESS on success', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- return testAction(
- actions.fetchLists,
- {},
- state,
- [
- {
- type: types.RECEIVE_BOARD_LISTS_SUCCESS,
- payload: formattedLists,
- },
- ],
- [],
- );
- });
-
- it('should commit mutations RECEIVE_BOARD_LISTS_FAILURE on failure', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
-
- return testAction(
- actions.fetchLists,
- {},
- state,
- [
- {
- type: types.RECEIVE_BOARD_LISTS_FAILURE,
- },
- ],
- [],
- );
- });
-
- it('dispatch createList action when backlog list does not exist and is not hidden', () => {
- queryResponse = {
- data: {
- group: {
- board: {
- hideBacklogList: false,
- lists: {
- nodes: [mockLists[1]],
- },
- },
- },
- },
- };
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- return testAction(
- actions.fetchLists,
- {},
- state,
- [
- {
- type: types.RECEIVE_BOARD_LISTS_SUCCESS,
- payload: formattedLists,
- },
- ],
- [{ type: 'createList', payload: { backlog: true } }],
- );
- });
-
- it.each`
- issuableType | boardType | fullBoardId | isGroup | isProject
- ${TYPE_ISSUE} | ${WORKSPACE_GROUP} | ${'gid://gitlab/Board/1'} | ${true} | ${false}
- ${TYPE_ISSUE} | ${WORKSPACE_PROJECT} | ${'gid://gitlab/Board/1'} | ${false} | ${true}
- `(
- 'calls $issuableType query with correct variables',
- async ({ issuableType, boardType, fullBoardId, isGroup, isProject }) => {
- const commit = jest.fn();
- const dispatch = jest.fn();
-
- state = {
- fullPath: 'gitlab-org',
- fullBoardId,
- filterParams: {},
- boardType,
- issuableType,
- };
-
- const variables = {
- fullPath: 'gitlab-org',
- boardId: fullBoardId,
- filters: {},
- isGroup,
- isProject,
- };
-
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- await actions.fetchLists({ commit, state, dispatch });
-
- expect(gqlClient.query).toHaveBeenCalledWith(expect.objectContaining({ variables }));
- },
- );
-});
-
-describe('fetchMilestones', () => {
- const queryResponse = {
- data: {
- workspace: {
- milestones: {
- nodes: mockMilestones,
- },
- },
- },
- };
-
- const queryErrors = {
- data: {
- workspace: {
- errors: ['You cannot view these milestones'],
- milestones: {},
- },
- },
- };
-
- function createStore({
- state = {
- boardType: 'project',
- fullPath: 'gitlab-org/gitlab',
- milestones: [],
- milestonesLoading: false,
- },
- } = {}) {
- return new Vuex.Store({
- state,
- mutations,
- });
- }
-
- it('throws error if state.boardType is not group or project', () => {
- const store = createStore({
- state: {
- boardType: 'invalid',
- },
- });
-
- expect(() => actions.fetchMilestones(store)).toThrow(new Error('Unknown board type'));
- });
-
- it.each([
- [
- 'project',
- {
- query: projectBoardMilestones,
- variables: { fullPath: 'gitlab-org/gitlab' },
- },
- ],
- [
- 'group',
- {
- query: groupBoardMilestones,
- variables: { fullPath: 'gitlab-org/gitlab' },
- },
- ],
- ])(
- 'when boardType is %s it calls fetchMilestones with the correct query and variables',
- (boardType, variables) => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- const store = createStore();
-
- store.state.boardType = boardType;
-
- actions.fetchMilestones(store);
-
- expect(gqlClient.query).toHaveBeenCalledWith(variables);
- },
- );
-
- it('sets milestonesLoading to true', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- const store = createStore();
-
- actions.fetchMilestones(store);
-
- expect(store.state.milestonesLoading).toBe(true);
- });
-
- describe('success', () => {
- it('sets state.milestones from query result', async () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- const store = createStore();
-
- await actions.fetchMilestones(store);
-
- expect(store.state.milestonesLoading).toBe(false);
- expect(store.state.milestones).toBe(mockMilestones);
- });
- });
-
- describe('failure', () => {
- it('sets state.milestones from query result', async () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryErrors);
-
- const store = createStore();
-
- await expect(actions.fetchMilestones(store)).rejects.toThrow();
-
- expect(store.state.milestonesLoading).toBe(false);
- expect(store.state.error).toBe('Failed to load milestones.');
- });
- });
-});
-
-describe('createList', () => {
- it('should dispatch createIssueList action', () => {
- return testAction({
- action: actions.createList,
- payload: { backlog: true },
- expectedActions: [{ type: 'createIssueList', payload: { backlog: true } }],
- });
- });
-});
-
-describe('createIssueList', () => {
- let commit;
- let dispatch;
- let getters;
- let state;
-
- beforeEach(() => {
- state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'group',
- disabled: false,
- boardLists: [{ type: 'closed' }],
- };
- commit = jest.fn();
- dispatch = jest.fn();
- getters = {
- getListByLabelId: jest.fn(),
- };
- });
-
- it('should dispatch addList action when creating backlog list', async () => {
- const backlogList = {
- id: 'gid://gitlab/List/1',
- listType: 'backlog',
- title: 'Open',
- position: 0,
- };
-
- jest.spyOn(gqlClient, 'mutate').mockReturnValue(
- Promise.resolve({
- data: {
- boardListCreate: {
- list: backlogList,
- errors: [],
- },
- },
- }),
- );
-
- await actions.createIssueList({ getters, state, commit, dispatch }, { backlog: true });
-
- expect(dispatch).toHaveBeenCalledWith('addList', backlogList);
- });
-
- it('dispatches highlightList after addList has succeeded', async () => {
- const list = {
- id: 'gid://gitlab/List/1',
- listType: 'label',
- title: 'Open',
- labelId: '4',
- };
-
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- boardListCreate: {
- list,
- errors: [],
- },
- },
- });
-
- await actions.createIssueList({ getters, state, commit, dispatch }, { labelId: '4' });
-
- expect(dispatch).toHaveBeenCalledWith('addList', list);
- expect(dispatch).toHaveBeenCalledWith('highlightList', list.id);
- });
-
- it('should commit CREATE_LIST_FAILURE mutation when API returns an error', async () => {
- jest.spyOn(gqlClient, 'mutate').mockReturnValue(
- Promise.resolve({
- data: {
- boardListCreate: {
- list: {},
- errors: ['foo'],
- },
- },
- }),
- );
-
- await actions.createIssueList({ getters, state, commit, dispatch }, { backlog: true });
-
- expect(commit).toHaveBeenCalledWith(types.CREATE_LIST_FAILURE, 'foo');
- });
-
- it('highlights list and does not re-query if it already exists', async () => {
- const existingList = {
- id: 'gid://gitlab/List/1',
- listType: 'label',
- title: 'Some label',
- position: 1,
- };
-
- getters = {
- getListByLabelId: jest.fn().mockReturnValue(existingList),
- };
-
- await actions.createIssueList({ getters, state, commit, dispatch }, { backlog: true });
-
- expect(dispatch).toHaveBeenCalledWith('highlightList', existingList.id);
- expect(dispatch).toHaveBeenCalledTimes(1);
- expect(commit).not.toHaveBeenCalled();
- });
-});
-
-describe('addList', () => {
- const getters = {
- getListByTitle: jest.fn().mockReturnValue(mockList),
- };
-
- it('should commit RECEIVE_ADD_LIST_SUCCESS mutation and dispatch fetchItemsForList action', () => {
- return testAction({
- action: actions.addList,
- payload: mockLists[1],
- state: { ...getters },
- expectedMutations: [
- { type: types.RECEIVE_ADD_LIST_SUCCESS, payload: updateListPosition(mockLists[1]) },
- ],
- expectedActions: [{ type: 'fetchItemsForList', payload: { listId: mockList.id } }],
- });
- });
-});
-
-describe('fetchLabels', () => {
- it('should commit mutation RECEIVE_LABELS_SUCCESS on success', async () => {
- const queryResponse = {
- data: {
- group: {
- labels: {
- nodes: labels,
- },
- },
- },
- };
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- const commit = jest.fn();
- const state = { boardType: 'group' };
-
- await actions.fetchLabels({ state, commit });
-
- expect(commit).toHaveBeenCalledWith(types.RECEIVE_LABELS_SUCCESS, labels);
- });
-});
-
-describe('moveList', () => {
- const backlogListId = 'gid://1';
- const closedListId = 'gid://5';
-
- const boardLists1 = {
- 'gid://3': { listType: '', position: 0 },
- 'gid://4': { listType: '', position: 1 },
- 'gid://5': { listType: '', position: 2 },
- };
-
- const boardLists2 = {
- [backlogListId]: { listType: ListType.backlog, position: -Infinity },
- [closedListId]: { listType: ListType.closed, position: Infinity },
- ...cloneDeep(boardLists1),
- };
-
- const movableListsOrder = ['gid://3', 'gid://4', 'gid://5'];
- const allListsOrder = [backlogListId, ...movableListsOrder, closedListId];
-
- it(`should not handle the event if the dragged item is not a "${DraggableItemTypes.list}"`, () => {
- return testAction({
- action: actions.moveList,
- payload: {
- item: { dataset: { listId: '', draggableItemType: DraggableItemTypes.card } },
- to: {
- children: [],
- },
- },
- state: {},
- expectedMutations: [],
- expectedActions: [],
- });
- });
-
- describe.each`
- draggableFrom | draggableTo | boardLists | boardListsOrder | expectedMovableListsOrder
- ${0} | ${2} | ${boardLists1} | ${movableListsOrder} | ${['gid://4', 'gid://5', 'gid://3']}
- ${2} | ${0} | ${boardLists1} | ${movableListsOrder} | ${['gid://5', 'gid://3', 'gid://4']}
- ${0} | ${1} | ${boardLists1} | ${movableListsOrder} | ${['gid://4', 'gid://3', 'gid://5']}
- ${1} | ${2} | ${boardLists1} | ${movableListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
- ${2} | ${1} | ${boardLists1} | ${movableListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
- ${1} | ${3} | ${boardLists2} | ${allListsOrder} | ${['gid://4', 'gid://5', 'gid://3']}
- ${3} | ${1} | ${boardLists2} | ${allListsOrder} | ${['gid://5', 'gid://3', 'gid://4']}
- ${1} | ${2} | ${boardLists2} | ${allListsOrder} | ${['gid://4', 'gid://3', 'gid://5']}
- ${2} | ${3} | ${boardLists2} | ${allListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
- ${3} | ${2} | ${boardLists2} | ${allListsOrder} | ${['gid://3', 'gid://5', 'gid://4']}
- `(
- 'when moving a list from position $draggableFrom to $draggableTo with lists $boardListsOrder',
- ({ draggableFrom, draggableTo, boardLists, boardListsOrder, expectedMovableListsOrder }) => {
- const movedListId = boardListsOrder[draggableFrom];
- const displacedListId = boardListsOrder[draggableTo];
- const buildDraggablePayload = () => {
- return {
- item: {
- dataset: {
- listId: boardListsOrder[draggableFrom],
- draggableItemType: DraggableItemTypes.list,
- },
- },
- newIndex: draggableTo,
- to: {
- children: boardListsOrder.map((listId) => ({ dataset: { listId } })),
- },
- };
- };
-
- it('should commit MOVE_LIST mutations and dispatch updateList action with correct payloads', () => {
- return testAction({
- action: actions.moveList,
- payload: buildDraggablePayload(),
- state: { boardLists },
- expectedMutations: [
- {
- type: types.MOVE_LISTS,
- payload: expectedMovableListsOrder.map((listId, i) => ({ listId, position: i })),
- },
- ],
- expectedActions: [
- {
- type: 'updateList',
- payload: {
- listId: movedListId,
- position: movableListsOrder.findIndex((i) => i === displacedListId),
- },
- },
- ],
- });
- });
- },
- );
-
- describe('when moving from and to the same position', () => {
- it('should not commit MOVE_LIST and should not dispatch updateList', () => {
- const listId = 'gid://1000';
-
- return testAction({
- action: actions.moveList,
- payload: {
- item: { dataset: { listId, draggbaleItemType: DraggableItemTypes.list } },
- newIndex: 0,
- to: {
- children: [{ dataset: { listId } }],
- },
- },
- state: { boardLists: { [listId]: { position: 0 } } },
- expectedMutations: [],
- expectedActions: [],
- });
- });
- });
-});
-
-describe('updateList', () => {
- const listId = 'gid://gitlab/List/1';
- const createState = (boardItemsByListId = {}) => ({
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- boardType: 'group',
- disabled: false,
- boardLists: [{ type: 'closed' }],
- issuableType: TYPE_ISSUE,
- boardItemsByListId,
- });
-
- describe('when state doesnt have list items', () => {
- it('calls fetchItemsByList', async () => {
- const dispatch = jest.fn();
-
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- updateBoardList: {
- errors: [],
- list: {
- id: listId,
- },
- },
- },
- });
-
- await actions.updateList({ commit: () => {}, state: createState(), dispatch }, { listId });
-
- expect(dispatch.mock.calls).toEqual([['fetchItemsForList', { listId }]]);
- });
- });
-
- describe('when state has list items', () => {
- it('doesnt call fetchItemsByList', async () => {
- const commit = jest.fn();
- const dispatch = jest.fn();
-
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- updateBoardList: {
- errors: [],
- list: {
- id: listId,
- },
- },
- },
- });
-
- await actions.updateList(
- { commit, state: createState({ [listId]: [] }), dispatch },
- { listId },
- );
-
- expect(dispatch.mock.calls).toEqual([]);
- });
- });
-
- it('should dispatch handleUpdateListFailure when API returns an error', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- updateBoardList: {
- list: {},
- errors: [{ foo: 'bar' }],
- },
- },
- });
-
- return testAction(
- actions.updateList,
- { listId: 'gid://gitlab/List/1', position: 1 },
- createState(),
- [],
- [{ type: 'handleUpdateListFailure' }],
- );
- });
-});
-
-describe('handleUpdateListFailure', () => {
- it('should dispatch fetchLists action and commit SET_ERROR mutation', async () => {
- await testAction({
- action: actions.handleUpdateListFailure,
- expectedMutations: [
- {
- type: types.SET_ERROR,
- payload: 'An error occurred while updating the board list. Please try again.',
- },
- ],
- expectedActions: [{ type: 'fetchLists' }],
- });
- });
-});
-
-describe('toggleListCollapsed', () => {
- it('should commit TOGGLE_LIST_COLLAPSED mutation', async () => {
- const payload = { listId: 'gid://gitlab/List/1', collapsed: true };
- await testAction({
- action: actions.toggleListCollapsed,
- payload,
- expectedMutations: [
- {
- type: types.TOGGLE_LIST_COLLAPSED,
- payload,
- },
- ],
- });
- });
-});
-
-describe('removeList', () => {
- let state;
- let getters;
- const list = mockLists[1];
- const listId = list.id;
- const mutationVariables = {
- mutation: destroyBoardListMutation,
- variables: {
- listId,
- },
- };
-
- beforeEach(() => {
- state = {
- boardLists: mockListsById,
- issuableType: TYPE_ISSUE,
- };
- getters = {
- getListByTitle: jest.fn().mockReturnValue(mockList),
- };
- });
-
- afterEach(() => {
- state = null;
- });
-
- it('optimistically deletes the list', () => {
- const commit = jest.fn();
-
- actions.removeList({ commit, state, getters, dispatch: () => {} }, listId);
-
- expect(commit.mock.calls).toEqual([[types.REMOVE_LIST, listId]]);
- });
-
- it('keeps the updated list if remove succeeds', async () => {
- const commit = jest.fn();
- const dispatch = jest.fn();
-
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- destroyBoardList: {
- errors: [],
- },
- },
- });
-
- await actions.removeList({ commit, state, getters, dispatch }, listId);
-
- expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
- expect(commit.mock.calls).toEqual([[types.REMOVE_LIST, listId]]);
- expect(dispatch.mock.calls).toEqual([['fetchItemsForList', { listId: mockList.id }]]);
- });
-
- it('restores the list if update fails', async () => {
- const commit = jest.fn();
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue(Promise.reject());
-
- await actions.removeList({ commit, state, getters, dispatch: () => {} }, listId);
-
- expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
- expect(commit.mock.calls).toEqual([
- [types.REMOVE_LIST, listId],
- [types.REMOVE_LIST_FAILURE, mockListsById],
- ]);
- });
-
- it('restores the list if update response has errors', async () => {
- const commit = jest.fn();
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- destroyBoardList: {
- errors: ['update failed, ID invalid'],
- },
- },
- });
-
- await actions.removeList({ commit, state, getters, dispatch: () => {} }, listId);
-
- expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
- expect(commit.mock.calls).toEqual([
- [types.REMOVE_LIST, listId],
- [types.REMOVE_LIST_FAILURE, mockListsById],
- ]);
- });
-});
-
-describe('fetchItemsForList', () => {
- const listId = mockLists[0].id;
-
- const state = {
- fullPath: 'gitlab-org',
- fullBoardId: 'gid://gitlab/Board/1',
- filterParams: {},
- boardType: 'group',
- };
-
- const mockIssuesNodes = mockIssues.map((issue) => ({ node: issue }));
-
- const pageInfo = {
- endCursor: '',
- hasNextPage: false,
- };
-
- const queryResponse = {
- data: {
- group: {
- board: {
- lists: {
- nodes: [
- {
- id: listId,
- issues: {
- edges: mockIssuesNodes,
- pageInfo,
- },
- },
- ],
- },
- },
- },
- },
- };
-
- const formattedIssues = formatListIssues(queryResponse.data.group.board.lists);
-
- const listPageInfo = {
- [listId]: pageInfo,
- };
-
- describe('when list id is undefined', () => {
- it('does not call the query', async () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- await actions.fetchItemsForList(
- { state, getters: () => {}, commit: () => {} },
- { listId: undefined },
- );
-
- expect(gqlClient.query).toHaveBeenCalledTimes(0);
- });
- });
-
- it('should commit mutations REQUEST_ITEMS_FOR_LIST and RECEIVE_ITEMS_FOR_LIST_SUCCESS on success', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- return testAction(
- actions.fetchItemsForList,
- { listId },
- state,
- [
- {
- type: types.REQUEST_ITEMS_FOR_LIST,
- payload: { listId, fetchNext: false },
- },
- {
- type: types.RECEIVE_ITEMS_FOR_LIST_SUCCESS,
- payload: { listItems: formattedIssues, listPageInfo, listId },
- },
- ],
- [],
- );
- });
-
- it('should commit mutations REQUEST_ITEMS_FOR_LIST and RECEIVE_ITEMS_FOR_LIST_FAILURE on failure', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
-
- return testAction(
- actions.fetchItemsForList,
- { listId },
- state,
- [
- {
- type: types.REQUEST_ITEMS_FOR_LIST,
- payload: { listId, fetchNext: false },
- },
- { type: types.RECEIVE_ITEMS_FOR_LIST_FAILURE, payload: listId },
- ],
- [],
- );
- });
-});
-
-describe('resetIssues', () => {
- it('commits RESET_ISSUES mutation', () => {
- return testAction(actions.resetIssues, {}, {}, [{ type: types.RESET_ISSUES }], []);
- });
-});
-
-describe('moveItem', () => {
- it('should dispatch moveIssue action with payload', () => {
- const payload = { mock: 'payload' };
-
- return testAction({
- action: actions.moveItem,
- payload,
- expectedActions: [{ type: 'moveIssue', payload }],
- });
- });
-});
-
-describe('moveIssue', () => {
- it('should dispatch a correct set of actions', () => {
- return testAction({
- action: actions.moveIssue,
- payload: mockMoveIssueParams,
- state: mockMoveState,
- expectedActions: [
- { type: 'moveIssueCard', payload: mockMoveData },
- { type: 'updateMovedIssue', payload: mockMoveData },
- { type: 'updateIssueOrder', payload: { moveData: mockMoveData } },
- ],
- });
- });
-});
-
-describe('moveIssueCard and undoMoveIssueCard', () => {
- describe('card should move without cloning', () => {
- let state;
- let params;
- let moveMutations;
- let undoMutations;
-
- describe('when re-ordering card', () => {
- beforeEach(() => {
- const itemId = 123;
- const fromListId = 'gid://gitlab/List/1';
- const toListId = 'gid://gitlab/List/1';
- const originalIssue = { foo: 'bar' };
- const originalIndex = 0;
- const moveBeforeId = undefined;
- const moveAfterId = undefined;
- const allItemsLoadedInList = true;
- const listPosition = undefined;
-
- state = {
- boardLists: {
- [toListId]: { listType: ListType.backlog },
- [fromListId]: { listType: ListType.backlog },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123] },
- };
- params = {
- itemId,
- fromListId,
- toListId,
- moveBeforeId,
- moveAfterId,
- listPosition,
- allItemsLoadedInList,
- };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: {
- itemId,
- listId: toListId,
- moveBeforeId,
- moveAfterId,
- listPosition,
- allItemsLoadedInList,
- atIndex: originalIndex,
- },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- });
-
- it('moveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.moveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: moveMutations,
- });
- });
-
- it('undoMoveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.undoMoveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: undoMutations,
- });
- });
- });
-
- describe.each([
- [
- 'issue moves out of backlog',
- {
- fromListType: ListType.backlog,
- toListType: ListType.label,
- },
- ],
- [
- 'issue card moves to closed',
- {
- fromListType: ListType.label,
- toListType: ListType.closed,
- },
- ],
- [
- 'issue card moves to non-closed, non-backlog list of the same type',
- {
- fromListType: ListType.label,
- toListType: ListType.label,
- },
- ],
- ])('when %s', (_, { toListType, fromListType }) => {
- beforeEach(() => {
- const itemId = 123;
- const fromListId = 'gid://gitlab/List/1';
- const toListId = 'gid://gitlab/List/2';
- const originalIssue = { foo: 'bar' };
- const originalIndex = 0;
- const moveBeforeId = undefined;
- const moveAfterId = undefined;
-
- state = {
- boardLists: {
- [fromListId]: { listType: fromListType },
- [toListId]: { listType: toListType },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
- };
- params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- });
-
- it('moveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.moveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: moveMutations,
- });
- });
-
- it('undoMoveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.undoMoveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: undoMutations,
- });
- });
- });
- });
-
- describe('card should clone on move', () => {
- let state;
- let params;
- let moveMutations;
- let undoMutations;
-
- describe.each([
- [
- 'issue card moves to non-closed, non-backlog list of a different type',
- {
- fromListType: ListType.label,
- toListType: ListType.assignee,
- },
- ],
- ])('when %s', (_, { toListType, fromListType }) => {
- beforeEach(() => {
- const itemId = 123;
- const fromListId = 'gid://gitlab/List/1';
- const toListId = 'gid://gitlab/List/2';
- const originalIssue = { foo: 'bar' };
- const originalIndex = 0;
- const moveBeforeId = undefined;
- const moveAfterId = undefined;
-
- state = {
- boardLists: {
- [fromListId]: { listType: fromListType },
- [toListId]: { listType: toListType },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
- };
- params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
- },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- });
-
- it('moveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.moveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: moveMutations,
- });
- });
-
- it('undoMoveIssueCard commits a correct set of actions', () => {
- return testAction({
- action: actions.undoMoveIssueCard,
- state,
- payload: getMoveData(state, params),
- expectedMutations: undoMutations,
- });
- });
- });
- });
-});
-
-describe('updateMovedIssueCard', () => {
- const label1 = {
- id: 'label1',
- };
-
- it.each([
- [
- 'issue without a label is moved to a label list',
- {
- state: {
- boardLists: {
- from: {},
- to: {
- listType: ListType.label,
- label: label1,
- },
- },
- boardItems: {
- 1: {
- labels: [],
- },
- },
- },
- moveData: {
- itemId: 1,
- fromListId: 'from',
- toListId: 'to',
- },
- updatedIssue: { labels: [label1] },
- },
- ],
- ])(
- 'should commit UPDATE_BOARD_ITEM with a correctly updated issue data when %s',
- (_, { state, moveData, updatedIssue }) => {
- return testAction({
- action: actions.updateMovedIssue,
- payload: moveData,
- state,
- expectedMutations: [{ type: types.UPDATE_BOARD_ITEM, payload: updatedIssue }],
- });
- },
- );
-});
-
-describe('updateIssueOrder', () => {
- const issues = {
- [mockIssue.id]: mockIssue,
- [mockIssue2.id]: mockIssue2,
- };
-
- const state = {
- boardItems: issues,
- fullBoardId: 'gid://gitlab/Board/1',
- };
-
- const moveData = {
- itemId: mockIssue.id,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- };
-
- it('calls mutate with the correct variables', () => {
- const mutationVariables = {
- mutation: issueMoveListMutation,
- variables: {
- projectPath: getProjectPath(mockIssue.referencePath),
- boardId: state.fullBoardId,
- iid: mockIssue.iid,
- fromListId: 1,
- toListId: 2,
- moveBeforeId: undefined,
- moveAfterId: undefined,
- },
- update: expect.anything(),
- };
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- issuableMoveList: {
- issuable: rawIssue,
- errors: [],
- },
- },
- });
-
- actions.updateIssueOrder({ state, commit: () => {}, dispatch: () => {} }, { moveData });
-
- expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
- });
-
- it('should commit MUTATE_ISSUE_SUCCESS mutation when successful', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- issuableMoveList: {
- issuable: rawIssue,
- errors: [],
- },
- },
- });
-
- return testAction(
- actions.updateIssueOrder,
- { moveData },
- state,
- [
- {
- type: types.MUTATE_ISSUE_IN_PROGRESS,
- payload: true,
- },
- {
- type: types.MUTATE_ISSUE_SUCCESS,
- payload: { issue: rawIssue },
- },
- {
- type: types.MUTATE_ISSUE_IN_PROGRESS,
- payload: false,
- },
- ],
- [],
- );
- });
-
- it('should commit SET_ERROR and dispatch undoMoveIssueCard', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- issuableMoveList: {
- issuable: {},
- errors: [{ foo: 'bar' }],
- },
- },
- });
-
- return testAction(
- actions.updateIssueOrder,
- { moveData },
- state,
- [
- {
- type: types.MUTATE_ISSUE_IN_PROGRESS,
- payload: true,
- },
- {
- type: types.MUTATE_ISSUE_IN_PROGRESS,
- payload: false,
- },
- {
- type: types.SET_ERROR,
- payload: 'An error occurred while moving the issue. Please try again.',
- },
- ],
- [{ type: 'undoMoveIssueCard', payload: moveData }],
- );
- });
-});
-
-describe('setAssignees', () => {
- const node = { username: 'name' };
-
- describe('when succeeds', () => {
- it('calls the correct mutation with the correct values', () => {
- return testAction(
- actions.setAssignees,
- { assignees: [node], iid: '1' },
- { commit: () => {} },
- [
- {
- type: 'UPDATE_BOARD_ITEM_BY_ID',
- payload: { prop: 'assignees', itemId: undefined, value: [node] },
- },
- ],
- [],
- );
- });
- });
-});
-
-describe('addListItem', () => {
- it('should commit ADD_BOARD_ITEM_TO_LIST and UPDATE_BOARD_ITEM mutations', () => {
- const payload = {
- list: mockLists[0],
- item: mockIssue,
- position: 0,
- inProgress: true,
- };
-
- return testAction(
- actions.addListItem,
- payload,
- {},
- [
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: {
- listId: mockLists[0].id,
- itemId: mockIssue.id,
- atIndex: 0,
- inProgress: true,
- },
- },
- { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
- ],
- [],
- );
- });
-
- it('should commit ADD_BOARD_ITEM_TO_LIST and UPDATE_BOARD_ITEM mutations, dispatch setActiveId action when inProgress is false', () => {
- const payload = {
- list: mockLists[0],
- item: mockIssue,
- position: 0,
- };
-
- return testAction(
- actions.addListItem,
- payload,
- {},
- [
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: {
- listId: mockLists[0].id,
- itemId: mockIssue.id,
- atIndex: 0,
- inProgress: false,
- },
- },
- { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
- ],
- [{ type: 'setActiveId', payload: { id: mockIssue.id, sidebarType: ISSUABLE } }],
- );
- });
-});
-
-describe('removeListItem', () => {
- it('should commit REMOVE_BOARD_ITEM_FROM_LIST and REMOVE_BOARD_ITEM mutations', () => {
- const payload = {
- listId: mockLists[0].id,
- itemId: mockIssue.id,
- };
-
- return testAction(actions.removeListItem, payload, {}, [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload },
- { type: types.REMOVE_BOARD_ITEM, payload: mockIssue.id },
- ]);
- });
-});
-
-describe('addListNewIssue', () => {
- const state = {
- boardType: 'group',
- fullPath: 'gitlab-org/gitlab',
- boardConfig: {
- labelIds: [],
- assigneeId: null,
- milestoneId: -1,
- },
- };
-
- const stateWithBoardConfig = {
- boardConfig: {
- labels: [
- {
- id: 5,
- title: 'Test',
- color: '#ff0000',
- description: 'testing;',
- textColor: 'white',
- },
- ],
- assigneeId: 2,
- milestoneId: 3,
- },
- };
-
- const fakeList = { id: 'gid://gitlab/List/123' };
-
- it('should add board scope to the issue being created', async () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssuable: {
- issuable: mockIssue,
- errors: [],
- },
- },
- });
-
- await actions.addListNewIssue(
- { dispatch: jest.fn(), commit: jest.fn(), state: stateWithBoardConfig },
- { issueInput: mockIssue, list: fakeList },
- );
-
- expect(gqlClient.mutate).toHaveBeenCalledWith({
- mutation: issueCreateMutation,
- variables: {
- input: formatIssueInput(mockIssue, stateWithBoardConfig.boardConfig),
- },
- update: expect.anything(),
- });
- });
-
- it('should add board scope by merging attributes to the issue being created', async () => {
- const issue = {
- ...mockIssue,
- assigneeIds: ['gid://gitlab/User/1'],
- labelIds: ['gid://gitlab/GroupLabel/4'],
- };
-
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssue: {
- issue,
- errors: [],
- },
- },
- });
-
- const payload = formatIssueInput(issue, stateWithBoardConfig.boardConfig);
-
- await actions.addListNewIssue(
- { dispatch: jest.fn(), commit: jest.fn(), state: stateWithBoardConfig },
- { issueInput: issue, list: fakeList },
- );
-
- expect(gqlClient.mutate).toHaveBeenCalledWith({
- mutation: issueCreateMutation,
- variables: {
- input: formatIssueInput(issue, stateWithBoardConfig.boardConfig),
- },
- update: expect.anything(),
- });
- expect(payload.labelIds).toEqual(['gid://gitlab/GroupLabel/4', 'gid://gitlab/GroupLabel/5']);
- expect(payload.assigneeIds).toEqual(['gid://gitlab/User/1', 'gid://gitlab/User/2']);
- });
-
- describe('when issue creation mutation request succeeds', () => {
- it('dispatches a correct set of mutations', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssuable: {
- issuable: mockIssue,
- errors: [],
- },
- },
- });
-
- return testAction({
- action: actions.addListNewIssue,
- payload: {
- issueInput: mockIssue,
- list: fakeList,
- placeholderId: 'tmp',
- },
- state,
- expectedActions: [
- {
- type: 'addListItem',
- payload: {
- list: fakeList,
- item: formatIssue({ ...mockIssue, id: 'tmp', isLoading: true }),
- position: 0,
- inProgress: true,
- },
- },
- { type: 'removeListItem', payload: { listId: fakeList.id, itemId: 'tmp' } },
- {
- type: 'addListItem',
- payload: {
- list: fakeList,
- item: formatIssue(mockIssue),
- position: 0,
- },
- },
- ],
- });
- });
- });
-
- describe('when issue creation mutation request fails', () => {
- it('dispatches a correct set of mutations', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssue: {
- issue: mockIssue,
- errors: [{ foo: 'bar' }],
- },
- },
- });
-
- return testAction({
- action: actions.addListNewIssue,
- payload: {
- issueInput: mockIssue,
- list: fakeList,
- placeholderId: 'tmp',
- },
- state,
- expectedActions: [
- {
- type: 'addListItem',
- payload: {
- list: fakeList,
- item: formatIssue({ ...mockIssue, id: 'tmp', isLoading: true }),
- position: 0,
- inProgress: true,
- },
- },
- { type: 'removeListItem', payload: { listId: fakeList.id, itemId: 'tmp' } },
- ],
- expectedMutations: [
- {
- type: types.SET_ERROR,
- payload: 'An error occurred while creating the issue. Please try again.',
- },
- ],
- });
- });
- });
-});
-
-describe('setActiveIssueLabels', () => {
- const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeBoardItem: { ...mockIssue, labels } };
- const testLabelIds = labels.map((label) => label.id);
- const input = {
- labelIds: testLabelIds,
- removeLabelIds: [],
- projectPath: 'h/b',
- labels,
- };
-
- it('should assign labels', () => {
- const payload = {
- itemId: getters.activeBoardItem.id,
- prop: 'labels',
- value: labels,
- };
-
- return testAction(
- actions.setActiveIssueLabels,
- input,
- { ...state, ...getters },
- [
- {
- type: types.UPDATE_BOARD_ITEM_BY_ID,
- payload,
- },
- ],
- [],
- );
- });
-
- it('should remove label', () => {
- const payload = {
- itemId: getters.activeBoardItem.id,
- prop: 'labels',
- value: [labels[1]],
- };
-
- return testAction(
- actions.setActiveIssueLabels,
- { ...input, removeLabelIds: [getIdFromGraphQLId(labels[0].id)] },
- { ...state, ...getters },
- [
- {
- type: types.UPDATE_BOARD_ITEM_BY_ID,
- payload,
- },
- ],
- [],
- );
- });
-});
-
-describe('setActiveItemSubscribed', () => {
- const state = {
- boardItems: {
- [mockActiveIssue.id]: mockActiveIssue,
- },
- fullPath: 'gitlab-org',
- issuableType: TYPE_ISSUE,
- };
- const getters = { activeBoardItem: mockActiveIssue, isEpicBoard: false };
- const subscribedState = true;
- const input = {
- subscribedState,
- projectPath: 'gitlab-org/gitlab-test',
- };
-
- it('should commit subscribed status', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- updateIssuableSubscription: {
- issue: {
- subscribed: subscribedState,
- },
- errors: [],
- },
- },
- });
-
- const payload = {
- itemId: getters.activeBoardItem.id,
- prop: 'subscribed',
- value: subscribedState,
- };
-
- return testAction(
- actions.setActiveItemSubscribed,
- input,
- { ...state, ...getters },
- [
- {
- type: types.UPDATE_BOARD_ITEM_BY_ID,
- payload,
- },
- ],
- [],
- );
- });
-
- it('throws error if fails', async () => {
- jest
- .spyOn(gqlClient, 'mutate')
- .mockResolvedValue({ data: { updateIssuableSubscription: { errors: ['failed mutation'] } } });
-
- await expect(actions.setActiveItemSubscribed({ getters }, input)).rejects.toThrow(Error);
- });
-});
-
-describe('setActiveItemTitle', () => {
- const state = {
- boardItems: { [mockIssue.id]: mockIssue },
- issuableType: TYPE_ISSUE,
- fullPath: 'path/f',
- };
- const getters = { activeBoardItem: mockIssue, isEpicBoard: false };
- const testTitle = 'Test Title';
- const input = {
- title: testTitle,
- projectPath: 'h/b',
- };
-
- it('should commit title after setting the issue', () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- updateIssuableTitle: {
- issue: {
- title: testTitle,
- },
- errors: [],
- },
- },
- });
-
- const payload = {
- itemId: getters.activeBoardItem.id,
- prop: 'title',
- value: testTitle,
- };
-
- return testAction(
- actions.setActiveItemTitle,
- input,
- { ...state, ...getters },
- [
- {
- type: types.UPDATE_BOARD_ITEM_BY_ID,
- payload,
- },
- ],
- [],
- );
- });
-
- it('throws error if fails', async () => {
- jest
- .spyOn(gqlClient, 'mutate')
- .mockResolvedValue({ data: { updateIssue: { errors: ['failed mutation'] } } });
-
- await expect(actions.setActiveItemTitle({ getters }, input)).rejects.toThrow(Error);
- });
-});
-
-describe('setActiveItemConfidential', () => {
- const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeBoardItem: mockIssue };
-
- it('set confidential value on board item', () => {
- const payload = {
- itemId: getters.activeBoardItem.id,
- prop: 'confidential',
- value: true,
- };
-
- return testAction(
- actions.setActiveItemConfidential,
- true,
- { ...state, ...getters },
- [
- {
- type: types.UPDATE_BOARD_ITEM_BY_ID,
- payload,
- },
- ],
- [],
- );
- });
-});
-
-describe('fetchGroupProjects', () => {
- const state = {
- fullPath: 'gitlab-org',
- };
-
- const pageInfo = {
- endCursor: '',
- hasNextPage: false,
- };
-
- const queryResponse = {
- data: {
- group: {
- projects: {
- nodes: mockGroupProjects,
- pageInfo: {
- endCursor: '',
- hasNextPage: false,
- },
- },
- },
- },
- };
-
- it('should commit mutations REQUEST_GROUP_PROJECTS and RECEIVE_GROUP_PROJECTS_SUCCESS on success', () => {
- jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
-
- return testAction(
- actions.fetchGroupProjects,
- {},
- state,
- [
- {
- type: types.REQUEST_GROUP_PROJECTS,
- payload: false,
- },
- {
- type: types.RECEIVE_GROUP_PROJECTS_SUCCESS,
- payload: { projects: mockGroupProjects, pageInfo, fetchNext: false },
- },
- ],
- [],
- );
- });
-
- it('should commit mutations REQUEST_GROUP_PROJECTS and RECEIVE_GROUP_PROJECTS_FAILURE on failure', () => {
- jest.spyOn(gqlClient, 'query').mockRejectedValue();
-
- return testAction(
- actions.fetchGroupProjects,
- {},
- state,
- [
- {
- type: types.REQUEST_GROUP_PROJECTS,
- payload: false,
- },
- {
- type: types.RECEIVE_GROUP_PROJECTS_FAILURE,
- },
- ],
- [],
- );
- });
-});
-
-describe('setSelectedProject', () => {
- it('should commit mutation SET_SELECTED_PROJECT', () => {
- const project = mockGroupProjects[0];
-
- return testAction(
- actions.setSelectedProject,
- project,
- {},
- [
- {
- type: types.SET_SELECTED_PROJECT,
- payload: project,
- },
- ],
- [],
- );
- });
-});
-
-describe('toggleBoardItemMultiSelection', () => {
- const boardItem = mockIssue;
- const boardItem2 = mockIssue2;
-
- it('should commit mutation ADD_BOARD_ITEM_TO_SELECTION if item is not on selection state', () => {
- return testAction(
- actions.toggleBoardItemMultiSelection,
- boardItem,
- { selectedBoardItems: [] },
- [
- {
- type: types.ADD_BOARD_ITEM_TO_SELECTION,
- payload: boardItem,
- },
- ],
- [],
- );
- });
-
- it('should commit mutation REMOVE_BOARD_ITEM_FROM_SELECTION if item is on selection state', () => {
- return testAction(
- actions.toggleBoardItemMultiSelection,
- boardItem,
- { selectedBoardItems: [mockIssue] },
- [
- {
- type: types.REMOVE_BOARD_ITEM_FROM_SELECTION,
- payload: boardItem,
- },
- ],
- [],
- );
- });
-
- it('should additionally commit mutation ADD_BOARD_ITEM_TO_SELECTION for active issue and dispatch unsetActiveId', () => {
- return testAction(
- actions.toggleBoardItemMultiSelection,
- boardItem2,
- { activeId: mockActiveIssue.id, activeBoardItem: mockActiveIssue, selectedBoardItems: [] },
- [
- {
- type: types.ADD_BOARD_ITEM_TO_SELECTION,
- payload: mockActiveIssue,
- },
- {
- type: types.ADD_BOARD_ITEM_TO_SELECTION,
- payload: boardItem2,
- },
- ],
- [{ type: 'unsetActiveId' }],
- );
- });
-});
-
-describe('resetBoardItemMultiSelection', () => {
- it('should commit mutation RESET_BOARD_ITEM_SELECTION', () => {
- return testAction({
- action: actions.resetBoardItemMultiSelection,
- state: { selectedBoardItems: [mockIssue] },
- expectedMutations: [
- {
- type: types.RESET_BOARD_ITEM_SELECTION,
- },
- ],
- });
- });
-});
-
-describe('toggleBoardItem', () => {
- it('should dispatch resetBoardItemMultiSelection and unsetActiveId when boardItem is the active item', () => {
- return testAction({
- action: actions.toggleBoardItem,
- payload: { boardItem: mockIssue },
- state: {
- activeId: mockIssue.id,
- },
- expectedActions: [{ type: 'resetBoardItemMultiSelection' }, { type: 'unsetActiveId' }],
- });
- });
-
- it('should dispatch resetBoardItemMultiSelection and setActiveId when boardItem is not the active item', () => {
- return testAction({
- action: actions.toggleBoardItem,
- payload: { boardItem: mockIssue },
- state: {
- activeId: inactiveId,
- },
- expectedActions: [
- { type: 'resetBoardItemMultiSelection' },
- { type: 'setActiveId', payload: { id: mockIssue.id, sidebarType: ISSUABLE } },
- ],
- });
- });
-});
-
-describe('setError', () => {
- it('should commit mutation SET_ERROR', () => {
- return testAction({
- action: actions.setError,
- payload: { message: 'mayday' },
- expectedMutations: [
- {
- payload: 'mayday',
- type: types.SET_ERROR,
- },
- ],
- });
- });
-
- it('should capture error using Sentry when captureError is true', () => {
- jest.spyOn(Sentry, 'captureException');
-
- const mockError = new Error();
- actions.setError(
- { commit: () => {} },
- {
- message: 'mayday',
- error: mockError,
- captureError: true,
- },
- );
-
- expect(Sentry.captureException).toHaveBeenNthCalledWith(1, mockError);
- });
-});
-
-describe('unsetError', () => {
- it('should commit mutation SET_ERROR with undefined as payload', () => {
- return testAction({
- action: actions.unsetError,
- expectedMutations: [
- {
- payload: undefined,
- type: types.SET_ERROR,
- },
- ],
- });
- });
-});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
deleted file mode 100644
index 944a7493504..00000000000
--- a/spec/frontend/boards/stores/getters_spec.js
+++ /dev/null
@@ -1,203 +0,0 @@
-import { inactiveId } from '~/boards/constants';
-import getters from '~/boards/stores/getters';
-import {
- mockIssue,
- mockIssue2,
- mockIssues,
- mockIssuesByListId,
- issues,
- mockLists,
- mockGroupProject1,
- mockArchivedGroupProject,
-} from '../mock_data';
-
-describe('Boards - Getters', () => {
- describe('isSidebarOpen', () => {
- it('returns true when activeId is not equal to 0', () => {
- const state = {
- activeId: 1,
- };
-
- expect(getters.isSidebarOpen(state)).toBe(true);
- });
-
- it('returns false when activeId is equal to 0', () => {
- const state = {
- activeId: inactiveId,
- };
-
- expect(getters.isSidebarOpen(state)).toBe(false);
- });
- });
-
- describe('isSwimlanesOn', () => {
- it('returns false', () => {
- expect(getters.isSwimlanesOn()).toBe(false);
- });
- });
-
- describe('getBoardItemById', () => {
- const state = { boardItems: { 'gid://gitlab/Issue/1': 'issue' } };
-
- it.each`
- id | expected
- ${'gid://gitlab/Issue/1'} | ${'issue'}
- ${''} | ${{}}
- `('returns $expected when $id is passed to state', ({ id, expected }) => {
- expect(getters.getBoardItemById(state)(id)).toEqual(expected);
- });
- });
-
- describe('activeBoardItem', () => {
- it.each`
- id | expected
- ${'gid://gitlab/Issue/1'} | ${'issue'}
- ${''} | ${{ id: '', iid: '' }}
- `('returns $expected when $id is passed to state', ({ id, expected }) => {
- const state = { boardItems: { 'gid://gitlab/Issue/1': 'issue' }, activeId: id };
-
- expect(getters.activeBoardItem(state)).toEqual(expected);
- });
- });
-
- describe('groupPathByIssueId', () => {
- it('returns group path for the active issue', () => {
- const mockActiveIssue = {
- referencePath: 'gitlab-org/gitlab-test#1',
- };
- expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
- 'gitlab-org',
- );
- });
-
- it('returns group path of last subgroup for the active issue', () => {
- const mockActiveIssue = {
- referencePath: 'gitlab-org/subgroup/subsubgroup/gitlab-test#1',
- };
- expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
- 'gitlab-org/subgroup/subsubgroup',
- );
- });
-
- it('returns empty string as group path when active issue is an empty object', () => {
- const mockActiveIssue = {};
- expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual('');
- });
- });
-
- describe('projectPathByIssueId', () => {
- it('returns project path for the active issue', () => {
- const mockActiveIssue = {
- referencePath: 'gitlab-org/gitlab-test#1',
- };
- expect(getters.projectPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
- 'gitlab-org/gitlab-test',
- );
- });
-
- it('returns empty string as project path when active issue is an empty object', () => {
- const mockActiveIssue = {};
- expect(getters.projectPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
- '',
- );
- });
- });
-
- describe('getBoardItemsByList', () => {
- const boardsState = {
- boardItemsByListId: mockIssuesByListId,
- boardItems: issues,
- };
- it('returns issues for a given listId', () => {
- const getBoardItemById = (issueId) =>
- [mockIssue, mockIssue2].find(({ id }) => id === issueId);
-
- expect(
- getters.getBoardItemsByList(boardsState, { getBoardItemById })('gid://gitlab/List/2'),
- ).toEqual(mockIssues);
- });
- });
-
- const boardsState = {
- boardLists: {
- 'gid://gitlab/List/1': mockLists[0],
- 'gid://gitlab/List/2': mockLists[1],
- },
- };
-
- describe('getListByLabelId', () => {
- it('returns list for a given label id', () => {
- expect(getters.getListByLabelId(boardsState)('gid://gitlab/GroupLabel/121')).toEqual(
- mockLists[1],
- );
- });
- });
-
- describe('getListByTitle', () => {
- it('returns list for a given list title', () => {
- expect(getters.getListByTitle(boardsState)('To Do')).toEqual(mockLists[1]);
- });
- });
-
- describe('activeGroupProjects', () => {
- const state = {
- groupProjects: [mockGroupProject1, mockArchivedGroupProject],
- };
-
- it('returns only returns non-archived group projects', () => {
- expect(getters.activeGroupProjects(state)).toEqual([mockGroupProject1]);
- });
- });
-
- describe('isIssueBoard', () => {
- it.each`
- issuableType | expected
- ${'issue'} | ${true}
- ${'epic'} | ${false}
- `(
- 'returns $expected when issuableType on state is $issuableType',
- ({ issuableType, expected }) => {
- const state = {
- issuableType,
- };
-
- expect(getters.isIssueBoard(state)).toBe(expected);
- },
- );
- });
-
- describe('isEpicBoard', () => {
- it('returns false', () => {
- expect(getters.isEpicBoard()).toBe(false);
- });
- });
-
- describe('hasScope', () => {
- const boardConfig = {
- labels: [],
- assigneeId: null,
- iterationCadenceId: null,
- iterationId: null,
- milestoneId: null,
- weight: null,
- };
-
- it('returns false when boardConfig is empty', () => {
- const state = { boardConfig };
-
- expect(getters.hasScope(state)).toBe(false);
- });
-
- it('returns true when boardScope has a label', () => {
- const state = { boardConfig: { ...boardConfig, labels: ['foo'] } };
-
- expect(getters.hasScope(state)).toBe(true);
- });
-
- it('returns true when boardConfig has a value other than null', () => {
- const state = { boardConfig: { ...boardConfig, assigneeId: 3 } };
-
- expect(getters.hasScope(state)).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/boards/stores/state_spec.js b/spec/frontend/boards/stores/state_spec.js
deleted file mode 100644
index 35490a63567..00000000000
--- a/spec/frontend/boards/stores/state_spec.js
+++ /dev/null
@@ -1,11 +0,0 @@
-import createState from '~/boards/stores/state';
-
-describe('createState', () => {
- it('is a function', () => {
- expect(createState).toEqual(expect.any(Function));
- });
-
- it('returns an object', () => {
- expect(createState()).toEqual(expect.any(Object));
- });
-});
diff --git a/spec/frontend/captcha/captcha_modal_spec.js b/spec/frontend/captcha/captcha_modal_spec.js
index 4bbed8ab3bb..977c685739f 100644
--- a/spec/frontend/captcha/captcha_modal_spec.js
+++ b/spec/frontend/captcha/captcha_modal_spec.js
@@ -1,6 +1,7 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
+import waitForPromises from 'helpers/wait_for_promises';
import CaptchaModal from '~/captcha/captcha_modal.vue';
import { initRecaptchaScript } from '~/captcha/init_recaptcha_script';
@@ -36,6 +37,7 @@ describe('Captcha Modal', () => {
beforeEach(() => {
grecaptcha = {
render: jest.fn(),
+ reset: jest.fn(),
};
initRecaptchaScript.mockResolvedValue(grecaptcha);
@@ -156,4 +158,65 @@ describe('Captcha Modal', () => {
});
});
});
+
+ describe('when showModal is false', () => {
+ beforeEach(() => {
+ createComponent({ props: { showModal: false, needsCaptchaResponse: true } });
+ });
+
+ it('does not render the modal', () => {
+ expect(findGlModal().exists()).toBe(false);
+ });
+
+ it('renders captcha', () => {
+ expect(grecaptcha.render).toHaveBeenCalledWith(wrapper.vm.$refs.captcha, {
+ sitekey: captchaSiteKey,
+ callback: expect.any(Function),
+ });
+ });
+ });
+
+ describe('needsCaptchaResponse watcher', () => {
+ describe('when showModal is true', () => {
+ beforeEach(() => {
+ createComponent({ props: { showModal: true, needsCaptchaResponse: false } });
+ wrapper.setProps({ needsCaptchaResponse: true });
+ });
+
+ it('shows modal', () => {
+ expect(showSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when showModal is false', () => {
+ beforeEach(() => {
+ createComponent({ props: { showModal: false, needsCaptchaResponse: false } });
+ wrapper.setProps({ needsCaptchaResponse: true });
+ });
+
+ it('does not render the modal', () => {
+ expect(findGlModal().exists()).toBe(false);
+ });
+
+ it('renders captcha', () => {
+ expect(grecaptcha.render).toHaveBeenCalledWith(wrapper.vm.$refs.captcha, {
+ sitekey: captchaSiteKey,
+ callback: expect.any(Function),
+ });
+ });
+ });
+ });
+
+ describe('resetSession watcher', () => {
+ beforeEach(() => {
+ createComponent({ props: { showModal: false, needsCaptchaResponse: true } });
+ });
+
+ it('calls reset when resetSession is true', async () => {
+ await waitForPromises();
+ await wrapper.setProps({ resetSession: true });
+
+ expect(grecaptcha.reset).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js
index 658a135534b..1c791857df9 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_about_spec.js
@@ -12,8 +12,8 @@ describe('CiResourceAbout', () => {
openMergeRequestsCount: 9,
latestVersion: {
id: 1,
- tagName: 'v1.0.0',
- tagPath: 'path/to/release',
+ name: 'v1.0.0',
+ path: 'path/to/release',
releasedAt: '2022-08-23T17:19:09Z',
},
webPath: 'path/to/project',
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
index 330163e9f39..f81344fa291 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_components_spec.js
@@ -115,7 +115,7 @@ describe('CiResourceComponents', () => {
it('renders the component name and snippet', () => {
components.forEach((component) => {
expect(wrapper.text()).toContain(component.name);
- expect(wrapper.text()).toContain(component.path);
+ expect(wrapper.text()).toContain(component.includePath);
});
});
@@ -124,7 +124,7 @@ describe('CiResourceComponents', () => {
const button = findCopyToClipboardButton(i);
expect(button.props().icon).toBe('copy-to-clipboard');
- expect(button.attributes('data-clipboard-text')).toContain(component.path);
+ expect(button.attributes('data-clipboard-text')).toContain(component.includePath);
});
});
diff --git a/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
index 6af9daabea0..b35c8a40744 100644
--- a/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
+++ b/spec/frontend/ci/catalog/components/details/ci_resource_header_spec.js
@@ -113,7 +113,7 @@ describe('CiResourceHeader', () => {
createComponent({
props: {
pipelineStatus: status,
- latestVersion: { tagName: '1.0.0', tagPath: 'path/to/release' },
+ latestVersion: { name: '1.0.0', path: 'path/to/release' },
},
});
});
diff --git a/spec/frontend/ci/catalog/components/list/catalog_search_spec.js b/spec/frontend/ci/catalog/components/list/catalog_search_spec.js
index c6f8498f2fd..803deeb0d45 100644
--- a/spec/frontend/ci/catalog/components/list/catalog_search_spec.js
+++ b/spec/frontend/ci/catalog/components/list/catalog_search_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByClick, GlSorting, GlSortingItem } from '@gitlab/ui';
+import { GlSearchBoxByClick, GlSorting } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CatalogSearch from '~/ci/catalog/components/list/catalog_search.vue';
import { SORT_ASC, SORT_DESC, SORT_OPTION_CREATED } from '~/ci/catalog/constants';
@@ -8,7 +8,7 @@ describe('CatalogSearch', () => {
const findSearchBar = () => wrapper.findComponent(GlSearchBoxByClick);
const findSorting = () => wrapper.findComponent(GlSorting);
- const findAllSortingItems = () => wrapper.findAllComponents(GlSortingItem);
+ const findAllSortingItems = () => findSorting().props('sortOptions');
const createComponent = () => {
wrapper = shallowMountExtended(CatalogSearch, {});
@@ -23,13 +23,14 @@ describe('CatalogSearch', () => {
expect(findSearchBar().exists()).toBe(true);
});
- it('renders the sorting options', () => {
- expect(findSorting().exists()).toBe(true);
- expect(findAllSortingItems()).toHaveLength(1);
+ it('sets sorting options', () => {
+ const sortOptionsProp = findAllSortingItems();
+ expect(sortOptionsProp).toHaveLength(1);
+ expect(sortOptionsProp[0].text).toBe('Created at');
});
it('renders the `Created at` option as the default', () => {
- expect(findAllSortingItems().at(0).text()).toBe('Created at');
+ expect(findSorting().props('text')).toBe('Created at');
});
});
diff --git a/spec/frontend/ci/catalog/components/list/catalog_tabs_spec.js b/spec/frontend/ci/catalog/components/list/catalog_tabs_spec.js
new file mode 100644
index 00000000000..ea216300017
--- /dev/null
+++ b/spec/frontend/ci/catalog/components/list/catalog_tabs_spec.js
@@ -0,0 +1,71 @@
+import { GlTab, GlTabs, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { trimText } from 'helpers/text_helper';
+import CatalogTabs from '~/ci/catalog/components/list/catalog_tabs.vue';
+import { SCOPE } from '~/ci/catalog/constants';
+
+describe('Catalog Tabs', () => {
+ let wrapper;
+
+ const defaultProps = {
+ isLoading: false,
+ resourceCounts: {
+ all: 11,
+ namespaces: 4,
+ },
+ };
+
+ const findAllTab = () => wrapper.findByTestId('resources-all-tab');
+ const findYourResourcesTab = () => wrapper.findByTestId('resources-your-tab');
+ const findLoadingIcons = () => wrapper.findAllComponents(GlLoadingIcon);
+
+ const triggerTabChange = (index) => wrapper.findAllComponents(GlTab).at(index).vm.$emit('click');
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = extendedWrapper(
+ shallowMount(CatalogTabs, {
+ propsData: {
+ ...props,
+ },
+ stubs: { GlTabs },
+ }),
+ );
+ };
+
+ describe('When count queries are loading', () => {
+ beforeEach(() => {
+ createComponent({ ...defaultProps, isLoading: true });
+ });
+
+ it('renders loading icons', () => {
+ expect(findLoadingIcons()).toHaveLength(2);
+ });
+ });
+
+ describe('When both tabs have resources', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders All tab with count', () => {
+ expect(trimText(findAllTab().text())).toBe(`All ${defaultProps.resourceCounts.all}`);
+ });
+
+ it('renders your resources tab with count', () => {
+ expect(trimText(findYourResourcesTab().text())).toBe(
+ `Your resources ${defaultProps.resourceCounts.namespaces}`,
+ );
+ });
+
+ it.each`
+ tabIndex | expectedScope
+ ${0} | ${SCOPE.all}
+ ${1} | ${SCOPE.namespaces}
+ `('emits setScope with $expectedScope on tab change', ({ tabIndex, expectedScope }) => {
+ triggerTabChange(tabIndex);
+
+ expect(wrapper.emitted()).toEqual({ setScope: [[expectedScope]] });
+ });
+ });
+});
diff --git a/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
index d74b133f386..15add3f307f 100644
--- a/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
+++ b/spec/frontend/ci/catalog/components/list/ci_resources_list_item_spec.js
@@ -21,7 +21,7 @@ describe('CiResourcesListItem', () => {
const release = {
author: { name: 'author', webUrl: '/user/1' },
releasedAt: Date.now(),
- tagName: '1.0.0',
+ name: '1.0.0',
};
const defaultProps = {
resource,
@@ -114,7 +114,7 @@ describe('CiResourcesListItem', () => {
it('renders the version badge', () => {
expect(findBadge().exists()).toBe(true);
- expect(findBadge().text()).toBe(release.tagName);
+ expect(findBadge().text()).toBe(release.name);
});
});
});
diff --git a/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js b/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js
index aca20a83979..41c6ccdd112 100644
--- a/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js
+++ b/spec/frontend/ci/catalog/components/list/ci_resources_list_spec.js
@@ -3,20 +3,19 @@ import { GlKeysetPagination } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import CiResourcesList from '~/ci/catalog/components/list/ci_resources_list.vue';
import CiResourcesListItem from '~/ci/catalog/components/list/ci_resources_list_item.vue';
-import { ciCatalogResourcesItemsCount } from '~/ci/catalog/graphql/settings';
import { catalogResponseBody, catalogSinglePageResponse } from '../../mock';
describe('CiResourcesList', () => {
let wrapper;
const createComponent = ({ props = {} } = {}) => {
- const { nodes, pageInfo, count } = catalogResponseBody.data.ciCatalogResources;
+ const { nodes, pageInfo } = catalogResponseBody.data.ciCatalogResources;
const defaultProps = {
currentPage: 1,
resources: nodes,
pageInfo,
- totalCount: count,
+ totalCount: 20,
};
wrapper = shallowMountExtended(CiResourcesList, {
@@ -36,11 +35,11 @@ describe('CiResourcesList', () => {
const findNextBtn = () => wrapper.findByTestId('nextButton');
describe('contains only one page', () => {
- const { nodes, pageInfo, count } = catalogSinglePageResponse.data.ciCatalogResources;
+ const { nodes, pageInfo } = catalogSinglePageResponse.data.ciCatalogResources;
beforeEach(async () => {
await createComponent({
- props: { currentPage: 1, resources: nodes, pageInfo, totalCount: count },
+ props: { currentPage: 1, resources: nodes, pageInfo, totalCount: nodes.length },
});
});
@@ -62,58 +61,56 @@ describe('CiResourcesList', () => {
});
describe.each`
- hasPreviousPage | hasNextPage | pageText | expectedTotal | currentPage
- ${false} | ${true} | ${'1 of 3'} | ${ciCatalogResourcesItemsCount} | ${1}
- ${true} | ${true} | ${'2 of 3'} | ${ciCatalogResourcesItemsCount} | ${2}
- ${true} | ${false} | ${'3 of 3'} | ${ciCatalogResourcesItemsCount} | ${3}
- `(
- 'when on page $pageText',
- ({ currentPage, expectedTotal, pageText, hasPreviousPage, hasNextPage }) => {
- const { nodes, pageInfo, count } = catalogResponseBody.data.ciCatalogResources;
-
- const previousPageState = hasPreviousPage ? 'enabled' : 'disabled';
- const nextPageState = hasNextPage ? 'enabled' : 'disabled';
-
- beforeEach(async () => {
- await createComponent({
- props: {
- currentPage,
- resources: nodes,
- pageInfo: { ...pageInfo, hasPreviousPage, hasNextPage },
- totalCount: count,
- },
- });
- });
+ hasPreviousPage | hasNextPage | pageText | currentPage
+ ${false} | ${true} | ${'1 of 3'} | ${1}
+ ${true} | ${true} | ${'2 of 3'} | ${2}
+ ${true} | ${false} | ${'3 of 3'} | ${3}
+ `('when on page $pageText', ({ currentPage, pageText, hasPreviousPage, hasNextPage }) => {
+ const { nodes, pageInfo } = catalogResponseBody.data.ciCatalogResources;
+ const count = 50; // We want 3 pages of data to test. There are 20 items per page.
+
+ const previousPageState = hasPreviousPage ? 'enabled' : 'disabled';
+ const nextPageState = hasNextPage ? 'enabled' : 'disabled';
- it('shows the right number of items', () => {
- expect(findResourcesListItems()).toHaveLength(expectedTotal);
+ beforeEach(async () => {
+ await createComponent({
+ props: {
+ currentPage,
+ resources: nodes,
+ pageInfo: { ...pageInfo, hasPreviousPage, hasNextPage },
+ totalCount: count,
+ },
});
+ });
- it(`shows the keyset control for previous page as ${previousPageState}`, () => {
- const disableAttr = findPrevBtn().attributes('disabled');
+ it('shows the right number of items', () => {
+ expect(findResourcesListItems()).toHaveLength(20);
+ });
- if (previousPageState === 'disabled') {
- expect(disableAttr).toBeDefined();
- } else {
- expect(disableAttr).toBeUndefined();
- }
- });
+ it(`shows the keyset control for previous page as ${previousPageState}`, () => {
+ const disableAttr = findPrevBtn().attributes('disabled');
- it(`shows the keyset control for next page as ${nextPageState}`, () => {
- const disableAttr = findNextBtn().attributes('disabled');
+ if (previousPageState === 'disabled') {
+ expect(disableAttr).toBeDefined();
+ } else {
+ expect(disableAttr).toBeUndefined();
+ }
+ });
- if (nextPageState === 'disabled') {
- expect(disableAttr).toBeDefined();
- } else {
- expect(disableAttr).toBeUndefined();
- }
- });
+ it(`shows the keyset control for next page as ${nextPageState}`, () => {
+ const disableAttr = findNextBtn().attributes('disabled');
- it('shows the correct count of current page', () => {
- expect(findPageCount().text()).toContain(pageText);
- });
- },
- );
+ if (nextPageState === 'disabled') {
+ expect(disableAttr).toBeDefined();
+ } else {
+ expect(disableAttr).toBeUndefined();
+ }
+ });
+
+ it('shows the correct count of current page', () => {
+ expect(findPageCount().text()).toContain(pageText);
+ });
+ });
describe('when there is an error getting the page count', () => {
beforeEach(() => {
diff --git a/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js b/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
index e6fbd63f307..6fb5eed0d93 100644
--- a/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
+++ b/spec/frontend/ci/catalog/components/pages/ci_resources_page_spec.js
@@ -7,17 +7,25 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { createAlert } from '~/alert';
import CatalogHeader from '~/ci/catalog/components/list/catalog_header.vue';
-import CatalogSearch from '~/ci/catalog/components/list/catalog_search.vue';
import CiResourcesList from '~/ci/catalog/components/list/ci_resources_list.vue';
+import CiResourcesPage from '~/ci/catalog/components/pages/ci_resources_page.vue';
+import CatalogSearch from '~/ci/catalog/components/list/catalog_search.vue';
+import CatalogTabs from '~/ci/catalog/components/list/catalog_tabs.vue';
import CatalogListSkeletonLoader from '~/ci/catalog/components/list/catalog_list_skeleton_loader.vue';
import EmptyState from '~/ci/catalog/components/list/empty_state.vue';
+
import { cacheConfig, resolvers } from '~/ci/catalog/graphql/settings';
+import { DEFAULT_SORT_VALUE, SCOPE } from '~/ci/catalog/constants';
import typeDefs from '~/ci/catalog/graphql/typedefs.graphql';
-import ciResourcesPage from '~/ci/catalog/components/pages/ci_resources_page.vue';
import getCatalogResources from '~/ci/catalog/graphql/queries/get_ci_catalog_resources.query.graphql';
+import getCatalogResourcesCount from '~/ci/catalog/graphql/queries/get_ci_catalog_resources_count.query.graphql';
-import { emptyCatalogResponseBody, catalogResponseBody } from '../../mock';
+import {
+ emptyCatalogResponseBody,
+ catalogResponseBody,
+ catalogResourcesCountResponseBody,
+} from '../../mock';
Vue.use(VueApollo);
jest.mock('~/alert');
@@ -25,14 +33,23 @@ jest.mock('~/alert');
describe('CiResourcesPage', () => {
let wrapper;
let catalogResourcesResponse;
+ let catalogResourcesCountResponse;
- const defaultQueryVariables = { first: 20 };
+ const defaultQueryVariables = {
+ first: 20,
+ scope: SCOPE.all,
+ searchTerm: null,
+ sortValue: DEFAULT_SORT_VALUE,
+ };
const createComponent = () => {
- const handlers = [[getCatalogResources, catalogResourcesResponse]];
+ const handlers = [
+ [getCatalogResources, catalogResourcesResponse],
+ [getCatalogResourcesCount, catalogResourcesCountResponse],
+ ];
const mockApollo = createMockApollo(handlers, resolvers, { cacheConfig, typeDefs });
- wrapper = shallowMountExtended(ciResourcesPage, {
+ wrapper = shallowMountExtended(CiResourcesPage, {
apolloProvider: mockApollo,
});
@@ -41,12 +58,15 @@ describe('CiResourcesPage', () => {
const findCatalogHeader = () => wrapper.findComponent(CatalogHeader);
const findCatalogSearch = () => wrapper.findComponent(CatalogSearch);
+ const findCatalogTabs = () => wrapper.findComponent(CatalogTabs);
const findCiResourcesList = () => wrapper.findComponent(CiResourcesList);
const findLoadingState = () => wrapper.findComponent(CatalogListSkeletonLoader);
const findEmptyState = () => wrapper.findComponent(EmptyState);
beforeEach(() => {
catalogResourcesResponse = jest.fn();
+ catalogResourcesCountResponse = jest.fn();
+ catalogResourcesCountResponse.mockResolvedValue(catalogResourcesCountResponseBody);
});
describe('when initial queries are loading', () => {
@@ -83,31 +103,56 @@ describe('CiResourcesPage', () => {
expect(findCatalogSearch().exists()).toBe(true);
});
+ it('renders the tabs', () => {
+ expect(findCatalogTabs().exists()).toBe(true);
+ });
+
it('does not render the list', () => {
expect(findCiResourcesList().exists()).toBe(false);
});
});
describe('and there are resources', () => {
- const { nodes, pageInfo, count } = catalogResponseBody.data.ciCatalogResources;
+ const { nodes, pageInfo } = catalogResponseBody.data.ciCatalogResources;
beforeEach(async () => {
catalogResourcesResponse.mockResolvedValue(catalogResponseBody);
await createComponent();
});
+
it('renders the resources list', () => {
expect(findLoadingState().exists()).toBe(false);
expect(findEmptyState().exists()).toBe(false);
expect(findCiResourcesList().exists()).toBe(true);
});
+ it('renders the catalog tabs', () => {
+ expect(findCatalogTabs().exists()).toBe(true);
+ });
+
+ it('updates the scope after switching tabs', async () => {
+ await findCatalogTabs().vm.$emit('setScope', SCOPE.namespaces);
+
+ expect(catalogResourcesResponse).toHaveBeenCalledWith({
+ ...defaultQueryVariables,
+ scope: SCOPE.namespaces,
+ });
+
+ await findCatalogTabs().vm.$emit('setScope', SCOPE.all);
+
+ expect(catalogResourcesResponse).toHaveBeenCalledWith({
+ ...defaultQueryVariables,
+ scope: SCOPE.all,
+ });
+ });
+
it('passes down props to the resources list', () => {
expect(findCiResourcesList().props()).toMatchObject({
currentPage: 1,
resources: nodes,
pageInfo,
- totalCount: count,
+ totalCount: 0,
});
});
@@ -145,6 +190,7 @@ describe('CiResourcesPage', () => {
before: pageInfo.startCursor,
last: 20,
first: null,
+ scope: SCOPE.all,
});
}
});
@@ -190,10 +236,12 @@ describe('CiResourcesPage', () => {
beforeEach(async () => {
catalogResourcesResponse.mockResolvedValue(emptyCatalogResponseBody);
await createComponent();
- await findCatalogSearch().vm.$emit('update-search-term', newSearch);
});
- it('renders the empty state and passes down the search query', () => {
+ it('renders the empty state and passes down the search query', async () => {
+ await findCatalogSearch().vm.$emit('update-search-term', newSearch);
+ await waitForPromises();
+
expect(findEmptyState().exists()).toBe(true);
expect(findEmptyState().props().searchTerm).toBe(newSearch);
});
diff --git a/spec/frontend/ci/catalog/mock.js b/spec/frontend/ci/catalog/mock.js
index e370ac5054f..c9256435990 100644
--- a/spec/frontend/ci/catalog/mock.js
+++ b/spec/frontend/ci/catalog/mock.js
@@ -10,12 +10,26 @@ export const emptyCatalogResponseBody = {
hasPreviousPage: false,
__typename: 'PageInfo',
},
- count: 0,
nodes: [],
},
},
};
+export const catalogResourcesCountResponseBody = {
+ data: {
+ ciCatalogResources: {
+ all: {
+ count: 1,
+ __typename: 'CiCatalogResourceConnection',
+ },
+ namespaces: {
+ count: 7,
+ __typename: 'CiCatalogResourceConnection',
+ },
+ },
+ },
+};
+
export const catalogResponseBody = {
data: {
ciCatalogResources: {
@@ -28,7 +42,6 @@ export const catalogResponseBody = {
hasPreviousPage: false,
__typename: 'PageInfo',
},
- count: 41,
nodes: [
{
id: 'gid://gitlab/Ci::Catalog::Resource/129',
@@ -248,7 +261,6 @@ export const catalogSinglePageResponse = {
hasPreviousPage: false,
__typename: 'PageInfo',
},
- count: 3,
nodes: [
{
id: 'gid://gitlab/Ci::Catalog::Resource/132',
@@ -298,8 +310,8 @@ export const catalogSharedDataMock = {
latestVersion: {
__typename: 'Release',
id: '3',
- tagName: '1.0.0',
- tagPath: 'path/to/release',
+ name: '1.0.0',
+ path: 'path/to/release',
releasedAt: Date.now(),
author: { id: 1, webUrl: 'profile/1', name: 'username' },
},
@@ -344,7 +356,7 @@ export const catalogAdditionalDetailsMock = {
],
},
},
- tagName: 'v1.0.2',
+ name: 'v1.0.2',
releasedAt: '2022-08-23T17:19:09Z',
},
],
@@ -366,8 +378,8 @@ const generateResourcesNodes = (count = 20, startId = 0) => {
latestVersion: {
__typename: 'Release',
id: '3',
- tagName: '1.0.0',
- tagPath: 'path/to/release',
+ name: '1.0.0',
+ path: 'path/to/release',
releasedAt: Date.now(),
author: { id: 1, webUrl: 'profile/1', name: 'username' },
},
@@ -387,14 +399,14 @@ const componentsMockData = {
id: 'gid://gitlab/Ci::Component/1',
name: 'Ruby gal',
description: 'This is a pretty amazing component that does EVERYTHING ruby.',
- path: 'gitlab.com/gitlab-org/ruby-gal@~latest',
+ includePath: 'gitlab.com/gitlab-org/ruby-gal@~latest',
inputs: [{ name: 'version', default: '1.0.0', required: true }],
},
{
id: 'gid://gitlab/Ci::Component/2',
name: 'Javascript madness',
description: 'Adds some spice to your life.',
- path: 'gitlab.com/gitlab-org/javascript-madness@~latest',
+ includePath: 'gitlab.com/gitlab-org/javascript-madness@~latest',
inputs: [
{ name: 'isFun', default: 'true', required: true },
{ name: 'RandomNumber', default: '10', required: false },
@@ -404,7 +416,7 @@ const componentsMockData = {
id: 'gid://gitlab/Ci::Component/3',
name: 'Go go go',
description: 'When you write Go, you gotta go go go.',
- path: 'gitlab.com/gitlab-org/go-go-go@~latest',
+ includePath: 'gitlab.com/gitlab-org/go-go-go@~latest',
inputs: [{ name: 'version', default: '1.0.0', required: true }],
},
],
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci/ci_environments_dropdown/ci_environments_dropdown_spec.js
index 353b5fd3c47..d26827de57b 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
+++ b/spec/frontend/ci/ci_environments_dropdown/ci_environments_dropdown_spec.js
@@ -1,14 +1,15 @@
import { GlListboxItem, GlCollapsibleListbox, GlDropdownDivider, GlIcon } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { allEnvironments, ENVIRONMENT_QUERY_LIMIT } from '~/ci/ci_variable_list/constants';
-import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
+import CiEnvironmentsDropdown from '~/ci/common/private/ci_environments_dropdown';
describe('Ci environments dropdown', () => {
let wrapper;
const envs = ['dev', 'prod', 'staging'];
const defaultProps = {
+ isEnvironmentRequired: true,
areEnvironmentsLoading: false,
+ canCreateWildcard: true,
environments: envs,
selectedEnvironmentScope: '',
};
@@ -33,19 +34,43 @@ describe('Ci environments dropdown', () => {
findListbox().vm.$emit('search', searchTerm);
};
- describe('No environments found', () => {
- beforeEach(() => {
- createComponent({ searchTerm: 'stable' });
+ describe('create wildcard button', () => {
+ describe('when canCreateWildcard is true', () => {
+ beforeEach(() => {
+ createComponent({ props: { canCreateWildcard: true }, searchTerm: 'stable' });
+ });
+
+ it('renders create button during search', () => {
+ expect(findCreateWildcardButton().exists()).toBe(true);
+ });
});
- it('renders dropdown divider', () => {
- expect(findDropdownDivider().exists()).toBe(true);
+ describe('when canCreateWildcard is false', () => {
+ beforeEach(() => {
+ createComponent({ props: { canCreateWildcard: false }, searchTerm: 'stable' });
+ });
+
+ it('does not render create button during search', () => {
+ expect(findCreateWildcardButton().exists()).toBe(false);
+ });
});
+ });
- it('renders create button with search term if environments do not contain search term', () => {
- const button = findCreateWildcardButton();
- expect(button.exists()).toBe(true);
- expect(button.text()).toBe('Create wildcard: stable');
+ describe('No environments found', () => {
+ describe('default behavior', () => {
+ beforeEach(() => {
+ createComponent({ searchTerm: 'stable' });
+ });
+
+ it('renders dropdown divider', () => {
+ expect(findDropdownDivider().exists()).toBe(true);
+ });
+
+ it('renders create button with search term if environments do not contain search term', () => {
+ const button = findCreateWildcardButton();
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Create wildcard: stable');
+ });
});
});
@@ -54,7 +79,7 @@ describe('Ci environments dropdown', () => {
createComponent({ props: { environments: envs } });
});
- it(`prepends * in listbox`, () => {
+ it('prepends * in listbox', () => {
expect(findListboxItemByIndex(0).text()).toBe('*');
});
@@ -67,6 +92,16 @@ describe('Ci environments dropdown', () => {
it('does not display active checkmark', () => {
expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
});
+
+ describe('when isEnvironmentRequired is false', () => {
+ beforeEach(() => {
+ createComponent({ props: { isEnvironmentRequired: false, environments: envs } });
+ });
+
+ it('adds Not applicable as an option', () => {
+ expect(findListboxItemByIndex(1).text()).toBe('Not applicable');
+ });
+ });
});
describe('when `*` is the value of selectedEnvironmentScope props', () => {
@@ -77,7 +112,7 @@ describe('Ci environments dropdown', () => {
});
it('shows the `All environments` text and not the wildcard', () => {
- expect(findListboxText()).toContain(allEnvironments.text);
+ expect(findListboxText()).toContain('All (default)');
expect(findListboxText()).not.toContain(wildcardScope);
});
});
@@ -124,9 +159,9 @@ describe('Ci environments dropdown', () => {
expect(wrapper.emitted('search-environment-scope')[1]).toEqual([currentEnv]);
});
- it('displays note about max environments shown', () => {
+ it('displays note about max environments', () => {
expect(findMaxEnvNote().exists()).toBe(true);
- expect(findMaxEnvNote().text()).toContain(String(ENVIRONMENT_QUERY_LIMIT));
+ expect(findMaxEnvNote().text()).toContain('30');
});
});
diff --git a/spec/frontend/ci/ci_variable_list/utils_spec.js b/spec/frontend/ci/ci_environments_dropdown/utils_spec.js
index fbcf0e7c5a5..6da0d7cdbca 100644
--- a/spec/frontend/ci/ci_variable_list/utils_spec.js
+++ b/spec/frontend/ci/ci_environments_dropdown/utils_spec.js
@@ -1,13 +1,19 @@
-import { convertEnvironmentScope, mapEnvironmentNames } from '~/ci/ci_variable_list/utils';
-import { allEnvironments } from '~/ci/ci_variable_list/constants';
+import {
+ convertEnvironmentScope,
+ mapEnvironmentNames,
+} from '~/ci/common/private/ci_environments_dropdown';
describe('utils', () => {
describe('convertEnvironmentScope', () => {
it('converts the * to the `All environments` text', () => {
- expect(convertEnvironmentScope('*')).toBe(allEnvironments.text);
+ expect(convertEnvironmentScope('*')).toBe('All (default)');
});
- it('returns the environment as is if not the *', () => {
+ it('converts the `Not applicable` to the `Not applicable`', () => {
+ expect(convertEnvironmentScope('Not applicable')).toBe('Not applicable');
+ });
+
+ it('returns other environments as-is', () => {
expect(convertEnvironmentScope('prod')).toBe('prod');
});
});
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
index 567a49d663c..0b5440d1bee 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
@@ -9,7 +9,7 @@ import {
DELETE_MUTATION_ACTION,
UPDATE_MUTATION_ACTION,
} from '~/ci/ci_variable_list/constants';
-import getGroupEnvironments from '~/ci/ci_variable_list/graphql/queries/group_environments.query.graphql';
+import { getGroupEnvironments } from '~/ci/common/private/ci_environments_dropdown';
import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql';
import addGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_add_variable.mutation.graphql';
import deleteGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_delete_variable.mutation.graphql';
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js
index 69b0d4261b2..66a085f2661 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js
@@ -9,7 +9,7 @@ import {
DELETE_MUTATION_ACTION,
UPDATE_MUTATION_ACTION,
} from '~/ci/ci_variable_list/constants';
-import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql';
+import { getProjectEnvironments } from '~/ci/common/private/ci_environments_dropdown';
import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql';
import addProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_add_variable.mutation.graphql';
import deleteProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_delete_variable.mutation.graphql';
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
index 721e2b831fc..645aaf798d4 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_drawer_spec.js
@@ -11,7 +11,7 @@ import {
} from '@gitlab/ui';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { helpPagePath } from '~/helpers/help_page_helper';
-import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
+import CiEnvironmentsDropdown from '~/ci/common/private/ci_environments_dropdown';
import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
import { awsTokenList } from '~/ci/ci_variable_list/components/ci_variable_autocomplete_tokens';
import {
@@ -113,6 +113,10 @@ describe('CI Variable Drawer', () => {
helpPagePath('ci/variables/index', { anchor: 'define-a-cicd-variable-in-the-ui' }),
);
});
+
+ it('value field is resizable', () => {
+ expect(findValueField().props('noResize')).toBe(false);
+ });
});
describe('validations', () => {
@@ -513,7 +517,7 @@ describe('CI Variable Drawer', () => {
it('title and confirm button renders the correct text', () => {
expect(findTitle().text()).toBe('Edit variable');
- expect(findConfirmBtn().text()).toBe('Edit variable');
+ expect(findConfirmBtn().text()).toBe('Save changes');
});
it('dispatches the edit-variable event', async () => {
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
index 01d3cdf504d..078958fe44a 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
@@ -2,14 +2,12 @@ import { shallowMount } from '@vue/test-utils';
import CiVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue';
import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
import CiVariableDrawer from '~/ci/ci_variable_list/components/ci_variable_drawer.vue';
-
import {
ADD_VARIABLE_ACTION,
EDIT_VARIABLE_ACTION,
projectString,
} from '~/ci/ci_variable_list/constants';
-import { mapEnvironmentNames } from '~/ci/ci_variable_list/utils';
-
+import { mapEnvironmentNames } from '~/ci/common/private/ci_environments_dropdown';
import { mockEnvs, mockVariablesWithScopes, newVariable } from '../mocks';
describe('Ci variable table', () => {
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
index c90ff4cc682..f9c1cbe0d30 100644
--- a/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
@@ -11,13 +11,11 @@ import { resolvers } from '~/ci/ci_variable_list/graphql/settings';
import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue';
import ciVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue';
import ciVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
-import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql';
+import { getProjectEnvironments } from '~/ci/common/private/ci_environments_dropdown';
import getAdminVariables from '~/ci/ci_variable_list/graphql/queries/variables.query.graphql';
import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql';
import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql';
-
import {
- ENVIRONMENT_QUERY_LIMIT,
environmentFetchErrorText,
genericMutationErrorText,
variableFetchErrorText,
@@ -230,7 +228,7 @@ describe('Ci Variable Shared Component', () => {
it('initial query is called with the correct variables', () => {
expect(mockEnvironments).toHaveBeenCalledWith({
- first: ENVIRONMENT_QUERY_LIMIT,
+ first: 30,
fullPath: '/namespace/project/',
search: '',
});
diff --git a/spec/frontend/ci/ci_variable_list/mocks.js b/spec/frontend/ci/ci_variable_list/mocks.js
index 9c9c99ad5ea..35bca408f17 100644
--- a/spec/frontend/ci/ci_variable_list/mocks.js
+++ b/spec/frontend/ci/ci_variable_list/mocks.js
@@ -20,7 +20,7 @@ import updateProjectVariable from '~/ci/ci_variable_list/graphql/mutations/proje
import getAdminVariables from '~/ci/ci_variable_list/graphql/queries/variables.query.graphql';
import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql';
-import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql';
+import { getProjectEnvironments } from '~/ci/common/private/ci_environments_dropdown';
import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql';
export const devName = 'dev';
diff --git a/spec/frontend/ci/pipeline_details/test_reports/mock_data.js b/spec/frontend/ci/pipeline_details/test_reports/mock_data.js
index 7c9f9287c86..643863c9d24 100644
--- a/spec/frontend/ci/pipeline_details/test_reports/mock_data.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/mock_data.js
@@ -1,4 +1,4 @@
-import { TestStatus } from '~/ci/pipeline_details/constants';
+import { testStatus } from '~/ci/pipeline_details/constants';
export default [
{
@@ -7,7 +7,7 @@ export default [
execution_time: 0,
name: 'Test#skipped text',
stack_trace: null,
- status: TestStatus.SKIPPED,
+ status: testStatus.SKIPPED,
system_output: null,
},
{
@@ -16,7 +16,7 @@ export default [
execution_time: 0,
name: 'Test#error text',
stack_trace: null,
- status: TestStatus.ERROR,
+ status: testStatus.ERROR,
system_output: null,
},
{
@@ -25,7 +25,7 @@ export default [
execution_time: 0,
name: 'Test#unknown text',
stack_trace: null,
- status: TestStatus.UNKNOWN,
+ status: testStatus.UNKNOWN,
system_output: null,
},
];
diff --git a/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
index d318aa36bcf..836c35977b4 100644
--- a/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_reports_spec.js
@@ -5,7 +5,12 @@ import Vue from 'vue';
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { getParameterValues } from '~/lib/utils/url_utility';
+import {
+ getParameterValues,
+ updateHistory,
+ removeParams,
+ setUrlParams,
+} from '~/lib/utils/url_utility';
import EmptyState from '~/ci/pipeline_details/test_reports/empty_state.vue';
import TestReports from '~/ci/pipeline_details/test_reports/test_reports.vue';
import TestSummary from '~/ci/pipeline_details/test_reports/test_summary.vue';
@@ -17,6 +22,9 @@ Vue.use(Vuex);
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
getParameterValues: jest.fn().mockReturnValue([]),
+ updateHistory: jest.fn().mockName('updateHistory'),
+ removeParams: jest.fn().mockName('removeParams'),
+ setUrlParams: jest.fn().mockName('setUrlParams'),
}));
describe('Test reports app', () => {
@@ -36,7 +44,7 @@ describe('Test reports app', () => {
removeSelectedSuiteIndex: jest.fn(),
};
- const createComponent = ({ state = {} } = {}) => {
+ const createComponent = ({ state = {}, getterStubs = {} } = {}) => {
store = new Vuex.Store({
modules: {
testReports: {
@@ -48,7 +56,10 @@ describe('Test reports app', () => {
...state,
},
actions: actionSpies,
- getters,
+ getters: {
+ ...getters,
+ ...getterStubs,
+ },
},
},
});
@@ -124,24 +135,41 @@ describe('Test reports app', () => {
describe('when a suite is clicked', () => {
beforeEach(() => {
- createComponent({ state: { hasFullReport: true } });
+ document.title = 'Test reports';
+ createComponent({
+ state: { hasFullReport: true },
+ getters: { getSelectedSuite: jest.fn().mockReturnValue({ name: 'test' }) },
+ });
testSummaryTable().vm.$emit('row-click', 0);
});
- it('should call setSelectedSuiteIndex and fetchTestSuite', () => {
- expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalled();
- expect(actionSpies.fetchTestSuite).toHaveBeenCalled();
+ it('should call setSelectedSuiteIndex, fetchTestSuite and updateHistory', () => {
+ expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalledWith(expect.anything(Object), 0);
+ expect(actionSpies.fetchTestSuite).toHaveBeenCalledWith(expect.anything(Object), 0);
+ expect(setUrlParams).toHaveBeenCalledWith({ job_name: undefined });
+ expect(updateHistory).toHaveBeenCalledWith({
+ replace: true,
+ title: 'Test reports',
+ url: undefined,
+ });
});
});
describe('when clicking back to summary', () => {
beforeEach(() => {
+ document.title = 'Test reports';
createComponent({ state: { selectedSuiteIndex: 0 } });
testSummary().vm.$emit('on-back-click');
});
- it('should call removeSelectedSuiteIndex', () => {
+ it('should call removeSelectedSuiteIndex and updateHistory', () => {
expect(actionSpies.removeSelectedSuiteIndex).toHaveBeenCalled();
+ expect(removeParams).toHaveBeenCalledWith(['job_name']);
+ expect(updateHistory).toHaveBeenCalledWith({
+ replace: true,
+ title: 'Test reports',
+ url: undefined,
+ });
});
});
});
diff --git a/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js b/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js
index 5bdea6bbcbf..181b8df31f4 100644
--- a/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/ci/pipeline_details/test_reports/test_suite_table_spec.js
@@ -5,7 +5,7 @@ import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SuiteTable, { i18n } from '~/ci/pipeline_details/test_reports/test_suite_table.vue';
-import { TestStatus } from '~/ci/pipeline_details/constants';
+import { testStatus } from '~/ci/pipeline_details/constants';
import * as getters from '~/ci/pipeline_details/stores/test_reports/getters';
import { formatFilePath } from '~/ci/pipeline_details/stores/test_reports/utils';
import { ARTIFACTS_EXPIRED_ERROR_MESSAGE } from '~/ci/pipeline_details/stores/test_reports/constants';
@@ -92,10 +92,10 @@ describe('Test reports suite table', () => {
});
it.each([
- TestStatus.ERROR,
- TestStatus.FAILED,
- TestStatus.SKIPPED,
- TestStatus.SUCCESS,
+ testStatus.ERROR,
+ testStatus.FAILED,
+ testStatus.SKIPPED,
+ testStatus.SUCCESS,
'unknown',
])('renders the correct icon for test case with %s status', (status) => {
const test = testCases.findIndex((x) => x.status === status);
diff --git a/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js b/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js
index 37339b1c422..d379da390a4 100644
--- a/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/popovers/walkthrough_popover_spec.js
@@ -1,24 +1,23 @@
-import { mount, shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
import WalkthroughPopover from '~/ci/pipeline_editor/components/popovers/walkthrough_popover.vue';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-
-Vue.config.ignoredElements = ['gl-emoji'];
describe('WalkthroughPopover component', () => {
let wrapper;
- const createComponent = (mountFn = shallowMount) => {
- return extendedWrapper(mountFn(WalkthroughPopover));
+ const createComponent = () => {
+ wrapper = shallowMount(WalkthroughPopover, {
+ components: {
+ GlEmoji: { template: '<img/>' },
+ },
+ });
};
describe('CTA button clicked', () => {
- beforeEach(async () => {
- wrapper = createComponent(mount);
- await wrapper.findByTestId('ctaBtn').trigger('click');
- });
-
it('emits "walkthrough-popover-cta-clicked" event', () => {
+ createComponent(shallowMount);
+ wrapper.findComponent(GlButton).vm.$emit('click');
+
expect(wrapper.emitted()['walkthrough-popover-cta-clicked']).toHaveLength(1);
});
});
diff --git a/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
index f824dab9ae1..96de1d18aa2 100644
--- a/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
+++ b/spec/frontend/ci/pipelines_page/components/empty_state/pipelines_ci_templates_spec.js
@@ -18,6 +18,9 @@ describe('Pipelines CI Templates', () => {
showJenkinsCiPrompt: false,
...propsData,
},
+ components: {
+ GlEmoji: { template: '<img/>' },
+ },
stubs,
});
};
diff --git a/spec/frontend/ci/runner/components/runner_job_status_badge_spec.js b/spec/frontend/ci/runner/components/runner_job_status_badge_spec.js
index c4476d01386..adb07d4086d 100644
--- a/spec/frontend/ci/runner/components/runner_job_status_badge_spec.js
+++ b/spec/frontend/ci/runner/components/runner_job_status_badge_spec.js
@@ -35,8 +35,7 @@ describe('RunnerTypeBadge', () => {
expect(findBadge().classes().sort()).toEqual(
[
...classes,
- 'gl-border',
- 'gl-display-inline-block',
+ 'gl-inset-border-1-gray-400',
'gl-max-w-full',
'gl-text-truncate',
'gl-bg-transparent!',
diff --git a/spec/frontend/clusters/agents/components/show_spec.js b/spec/frontend/clusters/agents/components/show_spec.js
index 019f789d875..8a40c528c1d 100644
--- a/spec/frontend/clusters/agents/components/show_spec.js
+++ b/spec/frontend/clusters/agents/components/show_spec.js
@@ -76,6 +76,7 @@ describe('ClusterAgentShow', () => {
const findPaginationButtons = () => wrapper.findComponent(GlKeysetPagination);
const findTokenCount = () => wrapper.findByTestId('cluster-agent-token-count').text();
const findEESecurityTabSlot = () => wrapper.findByTestId('ee-security-tab');
+ const findEEWorkspacesTabSlot = () => wrapper.findByTestId('ee-workspaces-tab');
const findActivity = () => wrapper.findComponent(ActivityEvents);
const findIntegrationStatus = () => wrapper.findComponent(IntegrationStatus);
@@ -253,4 +254,23 @@ describe('ClusterAgentShow', () => {
expect(findEESecurityTabSlot().exists()).toBe(true);
});
});
+
+ describe('ee-workspaces-tab slot', () => {
+ it('does not display when a slot is not passed in', async () => {
+ createWrapperWithoutApollo({ clusterAgent: defaultClusterAgent });
+ await nextTick();
+ expect(findEEWorkspacesTabSlot().exists()).toBe(false);
+ });
+
+ it('does display when a slot is passed in', async () => {
+ createWrapperWithoutApollo({
+ clusterAgent: defaultClusterAgent,
+ slots: {
+ 'ee-workspaces-tab': `<gl-tab data-testid="ee-workspaces-tab">Workspaces Tab!</gl-tab>`,
+ },
+ });
+ await nextTick();
+ expect(findEEWorkspacesTabSlot().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/comment_templates/components/form_spec.js b/spec/frontend/comment_templates/components/form_spec.js
index b48feba5290..ab368a42483 100644
--- a/spec/frontend/comment_templates/components/form_spec.js
+++ b/spec/frontend/comment_templates/components/form_spec.js
@@ -74,7 +74,7 @@ describe('Comment templates form component', () => {
name: 'Test',
});
expect(trackingSpy).toHaveBeenCalledWith(
- expect.any(String),
+ undefined,
'i_code_review_saved_replies_create',
expect.any(Object),
);
@@ -135,6 +135,18 @@ describe('Comment templates form component', () => {
expect(findSubmitBtn().props('loading')).toBe(false);
});
+
+ it('shows markdown preview button', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.text()).toContain('Preview');
+ });
+
+ it('allows switching to rich text editor', () => {
+ wrapper = createComponent();
+
+ expect(wrapper.text()).toContain('Switch to rich text editing');
+ });
});
describe('updates saved reply', () => {
diff --git a/spec/frontend/commit/components/signature_badge_spec.js b/spec/frontend/commit/components/signature_badge_spec.js
index d52ad2b43e2..4e8ad8e12f1 100644
--- a/spec/frontend/commit/components/signature_badge_spec.js
+++ b/spec/frontend/commit/components/signature_badge_spec.js
@@ -37,6 +37,7 @@ describe('Commit signature', () => {
describe.each`
signatureType | verificationStatus
${signatureTypes.GPG} | ${verificationStatuses.VERIFIED}
+ ${signatureTypes.GPG} | ${verificationStatuses.VERIFIED_SYSTEM}
${signatureTypes.GPG} | ${verificationStatuses.UNVERIFIED}
${signatureTypes.GPG} | ${verificationStatuses.UNVERIFIED_KEY}
${signatureTypes.GPG} | ${verificationStatuses.UNKNOWN_KEY}
diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
index 94628f2b2c5..9f233f2f412 100644
--- a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
@@ -149,6 +149,10 @@ describe('content/components/wrappers/table_cell_base', () => {
},
);
+ it('does not show alignment options for table cells', () => {
+ expect(findDropdown().text()).not.toContain('Align');
+ });
+
describe("when current row is the table's header", () => {
beforeEach(async () => {
// Remove 2 rows condition
@@ -180,6 +184,44 @@ describe('content/components/wrappers/table_cell_base', () => {
});
describe.each`
+ currentAlignment | visibleOptions | newAlignment | command
+ ${'left'} | ${['center', 'right']} | ${'center'} | ${'alignColumnCenter'}
+ ${'center'} | ${['left', 'right']} | ${'right'} | ${'alignColumnRight'}
+ ${'right'} | ${['left', 'center']} | ${'left'} | ${'alignColumnLeft'}
+ `(
+ 'when align=$currentAlignment',
+ ({ currentAlignment, visibleOptions, newAlignment, command }) => {
+ beforeEach(async () => {
+ Object.assign(node.attrs, { align: currentAlignment });
+
+ createWrapper({ cellType: 'th' });
+
+ await nextTick();
+ });
+
+ visibleOptions.forEach((alignment) => {
+ it(`shows "Align column ${alignment}" option`, () => {
+ expect(findDropdown().text()).toContain(`Align column ${alignment}`);
+ });
+ });
+
+ it(`does not show "Align column ${currentAlignment}" option`, () => {
+ expect(findDropdown().text()).not.toContain(`Align column ${currentAlignment}`);
+ });
+
+ it('allows changing alignment', async () => {
+ const mocks = mockChainedCommands(editor, [command, 'run']);
+
+ await wrapper
+ .findByRole('button', { name: `Align column ${newAlignment}` })
+ .trigger('click');
+
+ expect(mocks[command]).toHaveBeenCalled();
+ });
+ },
+ );
+
+ describe.each`
attrs | rect
${{ rowspan: 2 }} | ${{ top: 0, left: 0, bottom: 2, right: 1 }}
${{ colspan: 2 }} | ${{ top: 0, left: 0, bottom: 1, right: 2 }}
diff --git a/spec/frontend/content_editor/extensions/copy_paste_spec.js b/spec/frontend/content_editor/extensions/copy_paste_spec.js
index 6969f4985a1..801385422d7 100644
--- a/spec/frontend/content_editor/extensions/copy_paste_spec.js
+++ b/spec/frontend/content_editor/extensions/copy_paste_spec.js
@@ -92,7 +92,7 @@ describe('content_editor/extensions/copy_paste', () => {
return Object.assign(new Event(eventName), {
clipboardData: {
types,
- getData: jest.fn((type) => data[type] || defaultData[type]),
+ getData: jest.fn((type) => data[type] ?? defaultData[type]),
setData: jest.fn(),
clearData: jest.fn(),
},
@@ -190,6 +190,17 @@ describe('content_editor/extensions/copy_paste', () => {
});
});
+ it('does not handle pasting when textContent is empty (eg. images)', async () => {
+ expect(
+ await triggerPasteEventHandler(
+ buildClipboardEvent({
+ types: ['text/plain'],
+ data: { 'text/plain': '' },
+ }),
+ ),
+ ).toBe(false);
+ });
+
describe('when pasting raw markdown source', () => {
it('shows a loading indicator while markdown is being processed', async () => {
await triggerPasteEventHandler(buildClipboardEvent());
diff --git a/spec/frontend/content_editor/extensions/task_item_spec.js b/spec/frontend/content_editor/extensions/task_item_spec.js
new file mode 100644
index 00000000000..a38a68112cd
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/task_item_spec.js
@@ -0,0 +1,115 @@
+import TaskList from '~/content_editor/extensions/task_list';
+import TaskItem from '~/content_editor/extensions/task_item';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/extensions/task_item', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let taskList;
+ let taskItem;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [TaskList, TaskItem] });
+
+ ({
+ builders: { doc, p, taskList, taskItem },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ taskItem: { nodeType: TaskItem.name },
+ taskList: { nodeType: TaskList.name },
+ },
+ }));
+ });
+
+ it('renders a regular task item for non-inapplicable items', () => {
+ const initialDoc = doc(taskList(taskItem(p('foo'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ expect(tiptapEditor.view.dom.querySelector('li')).toMatchInlineSnapshot(`
+ <li
+ data-checked="false"
+ dir="auto"
+ >
+ <label>
+ <input
+ type="checkbox"
+ />
+ <span />
+ </label>
+ <div>
+ <p
+ dir="auto"
+ >
+ foo
+ </p>
+ </div>
+ </li>
+ `);
+ });
+
+ it('renders task item as disabled if it is inapplicable', () => {
+ const initialDoc = doc(taskList(taskItem({ inapplicable: true }, p('foo'))));
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ expect(tiptapEditor.view.dom.querySelector('li')).toMatchInlineSnapshot(`
+ <li
+ data-checked="false"
+ data-inapplicable="true"
+ dir="auto"
+ >
+ <label>
+ <input
+ disabled=""
+ type="checkbox"
+ />
+ <span />
+ </label>
+ <div>
+ <p
+ dir="auto"
+ >
+ foo
+ </p>
+ </div>
+ </li>
+ `);
+ });
+
+ it('ignores any <s> tags in the task item', () => {
+ tiptapEditor.commands.setContent(`
+ <ul dir="auto" class="task-list">
+ <li class="task-list-item inapplicable">
+ <input disabled="" data-inapplicable="" class="task-list-item-checkbox" type="checkbox">
+ <s>foo</s>
+ </li>
+ </ul>
+ `);
+
+ expect(tiptapEditor.view.dom.querySelector('li')).toMatchInlineSnapshot(`
+ <li
+ data-checked="false"
+ data-inapplicable="true"
+ dir="auto"
+ >
+ <label>
+ <input
+ disabled=""
+ type="checkbox"
+ />
+ <span />
+ </label>
+ <div>
+ <p
+ dir="auto"
+ >
+ foo
+ </p>
+ </div>
+ </li>
+ `);
+ });
+});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index c329a12bcc4..4ae39f7a5a7 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -660,6 +660,24 @@ var a = 0;
);
});
+ it('correctly serializes a task list with inapplicable items', () => {
+ expect(
+ serialize(
+ taskList(
+ taskItem({ checked: true }, paragraph('list item 1')),
+ taskItem({ checked: true, inapplicable: true }, paragraph('list item 2')),
+ taskItem(paragraph('list item 3')),
+ ),
+ ),
+ ).toBe(
+ `
+* [x] list item 1
+* [~] list item 2
+* [ ] list item 3
+ `.trim(),
+ );
+ });
+
it('correctly serializes bullet task list with different bullet styles', () => {
expect(
serialize(
@@ -1080,6 +1098,38 @@ _An elephant at sunset_
);
});
+ it('correctly serializes a table with inline content with alignment', () => {
+ expect(
+ serialize(
+ table(
+ // each table cell must contain at least one paragraph
+ tableRow(
+ tableHeader({ align: 'center' }, paragraph('header')),
+ tableHeader({ align: 'right' }, paragraph('header')),
+ tableHeader({ align: 'left' }, paragraph('header')),
+ ),
+ tableRow(
+ tableCell(paragraph('cell')),
+ tableCell(paragraph('cell')),
+ tableCell(paragraph('cell')),
+ ),
+ tableRow(
+ tableCell(paragraph('cell')),
+ tableCell(paragraph('cell')),
+ tableCell(paragraph('cell')),
+ ),
+ ),
+ ).trim(),
+ ).toBe(
+ `
+| header | header | header |
+|:------:|-------:|--------|
+| cell | cell | cell |
+| cell | cell | cell |
+ `.trim(),
+ );
+ });
+
it('correctly serializes a table with a pipe in a cell', () => {
expect(
serialize(
diff --git a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
index 4428fa682e7..f904f138e85 100644
--- a/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
+++ b/spec/frontend/content_editor/services/markdown_sourcemap_spec.js
@@ -20,6 +20,17 @@ const BULLET_LIST_HTML = `<ul data-sourcepos="1:1-3:24" dir="auto">
</li>
</ul>`;
+const MALFORMED_BULLET_LIST_HTML =
+ `<ul data-sourcepos="1:1-3:24" dir="auto">
+ <li data-sourcepos="1:1-1:13">list item 1</li>` +
+ // below line has malformed sourcepos
+ `<li data-sourcepos="5:1-5:24">list item 2
+ <ul data-sourcepos="3:3-3:24">
+ <li data-sourcepos="3:3-3:24">embedded list item 3</li>
+ </ul>
+ </li>
+</ul>`;
+
const BULLET_TASK_LIST_MARKDOWN = `- [ ] list item 1
+ [x] checked list item 2
+ [ ] embedded list item 1
@@ -85,6 +96,21 @@ const bulletListDoc = () =>
),
);
+const bulletListDocWithMalformedSourcepos = () =>
+ doc(
+ bulletList(
+ { bullet: '+', source: '+ list item 1\n+ list item 2\n - embedded list item 3' },
+ listItem({ source: '+ list item 1' }, paragraph('list item 1')),
+ listItem(
+ paragraph('list item 2'),
+ bulletList(
+ { bullet: '-', source: '- embedded list item 3' },
+ listItem({ source: '- embedded list item 3' }, paragraph('embedded list item 3')),
+ ),
+ ),
+ ),
+ );
+
const bulletTaskListDoc = () =>
doc(
taskList(
@@ -138,9 +164,10 @@ describe('content_editor/services/markdown_sourcemap', () => {
});
it.each`
- description | sourceMarkdown | sourceHTML | expectedDoc
- ${'bullet list'} | ${BULLET_LIST_MARKDOWN} | ${BULLET_LIST_HTML} | ${bulletListDoc}
- ${'bullet task list'} | ${BULLET_TASK_LIST_MARKDOWN} | ${BULLET_TASK_LIST_HTML} | ${bulletTaskListDoc}
+ description | sourceMarkdown | sourceHTML | expectedDoc
+ ${'bullet list'} | ${BULLET_LIST_MARKDOWN} | ${BULLET_LIST_HTML} | ${bulletListDoc}
+ ${'bullet list with malformed sourcepos'} | ${BULLET_LIST_MARKDOWN} | ${MALFORMED_BULLET_LIST_HTML} | ${bulletListDocWithMalformedSourcepos}
+ ${'bullet task list'} | ${BULLET_TASK_LIST_MARKDOWN} | ${BULLET_TASK_LIST_HTML} | ${bulletTaskListDoc}
`(
'gets markdown source for a rendered $description',
async ({ sourceMarkdown, sourceHTML, expectedDoc }) => {
diff --git a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
index c69547deb1c..a43b4aae586 100644
--- a/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
+++ b/spec/frontend/custom_emoji/components/__snapshots__/list_spec.js.snap
@@ -141,7 +141,7 @@ exports[`Custom emoji settings list component renders table of custom emoji 1`]
class="gl-vertical-align-middle!"
role="cell"
>
- <gl-emoji
+ <div
data-fallback-src="https://gitlab.com/custom_emoji/custom_emoji/-/raw/main/img/confused_husky.gif"
data-name="confused_husky"
data-unicode-version="custom"
diff --git a/spec/frontend/custom_emoji/components/list_spec.js b/spec/frontend/custom_emoji/components/list_spec.js
index b5729d59464..4177aea2d33 100644
--- a/spec/frontend/custom_emoji/components/list_spec.js
+++ b/spec/frontend/custom_emoji/components/list_spec.js
@@ -21,6 +21,9 @@ function createComponent(propsData = {}) {
userPermissions: { createCustomEmoji: true },
...propsData,
},
+ stubs: {
+ GlEmoji: { template: '<div/>' },
+ },
});
}
diff --git a/spec/frontend/deploy_keys/components/action_btn_spec.js b/spec/frontend/deploy_keys/components/action_btn_spec.js
index c4c7a9aea2d..e94734da4ce 100644
--- a/spec/frontend/deploy_keys/components/action_btn_spec.js
+++ b/spec/frontend/deploy_keys/components/action_btn_spec.js
@@ -1,28 +1,44 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import data from 'test_fixtures/deploy_keys/keys.json';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import enableKeyMutation from '~/deploy_keys/graphql/mutations/enable_key.mutation.graphql';
import actionBtn from '~/deploy_keys/components/action_btn.vue';
-import eventHub from '~/deploy_keys/eventhub';
+
+Vue.use(VueApollo);
describe('Deploy keys action btn', () => {
const deployKey = data.enabled_keys[0];
let wrapper;
+ let enableKeyMock;
const findButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
+ enableKeyMock = jest.fn();
+
+ const mockResolvers = {
+ Mutation: {
+ enableKey: enableKeyMock,
+ },
+ };
+
+ const apolloProvider = createMockApollo([], mockResolvers);
wrapper = shallowMount(actionBtn, {
propsData: {
deployKey,
- type: 'enable',
category: 'primary',
variant: 'confirm',
icon: 'edit',
+ mutation: enableKeyMutation,
},
slots: {
default: 'Enable',
},
+ apolloProvider,
});
});
@@ -38,13 +54,26 @@ describe('Deploy keys action btn', () => {
});
});
- it('sends eventHub event with btn type', async () => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
-
+ it('fires the passed mutation', async () => {
findButton().vm.$emit('click');
await nextTick();
- expect(eventHub.$emit).toHaveBeenCalledWith('enable.key', deployKey, expect.anything());
+ expect(enableKeyMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { id: deployKey.id },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('emits the mutation error', async () => {
+ const error = new Error('oops!');
+ enableKeyMock.mockRejectedValue(error);
+ findButton().vm.$emit('click');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
});
it('shows loading spinner after click', async () => {
diff --git a/spec/frontend/deploy_keys/components/app_spec.js b/spec/frontend/deploy_keys/components/app_spec.js
index de4112134ce..5e012bc1c51 100644
--- a/spec/frontend/deploy_keys/components/app_spec.js
+++ b/spec/frontend/deploy_keys/components/app_spec.js
@@ -1,28 +1,45 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import data from 'test_fixtures/deploy_keys/keys.json';
+import { GlPagination } from '@gitlab/ui';
+import enabledKeys from 'test_fixtures/deploy_keys/enabled_keys.json';
+import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { TEST_HOST } from 'spec/test_constants';
+import { captureException } from '~/sentry/sentry_browser_wrapper';
+import { mapDeployKey } from '~/deploy_keys/graphql/resolvers';
+import deployKeysQuery from '~/deploy_keys/graphql/queries/deploy_keys.query.graphql';
import deployKeysApp from '~/deploy_keys/components/app.vue';
import ConfirmModal from '~/deploy_keys/components/confirm_modal.vue';
import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
-import eventHub from '~/deploy_keys/eventhub';
import axios from '~/lib/utils/axios_utils';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-const TEST_ENDPOINT = `${TEST_HOST}/dummy/`;
+jest.mock('~/sentry/sentry_browser_wrapper');
+
+Vue.use(VueApollo);
describe('Deploy keys app component', () => {
let wrapper;
let mock;
+ let deployKeyMock;
+ let currentPageMock;
+ let currentScopeMock;
+ let confirmRemoveKeyMock;
+ let pageInfoMock;
+ let pageMutationMock;
+ let scopeMutationMock;
+ let disableKeyMock;
+ let resolvers;
const mountComponent = () => {
+ const apolloProvider = createMockApollo([[deployKeysQuery, deployKeyMock]], resolvers);
+
wrapper = mount(deployKeysApp, {
propsData: {
- endpoint: TEST_ENDPOINT,
+ projectPath: 'test/project',
projectId: '8',
},
+ apolloProvider,
});
return waitForPromises();
@@ -30,7 +47,28 @@ describe('Deploy keys app component', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(TEST_ENDPOINT).reply(HTTP_STATUS_OK, data);
+ deployKeyMock = jest.fn();
+ currentPageMock = jest.fn();
+ currentScopeMock = jest.fn();
+ confirmRemoveKeyMock = jest.fn();
+ pageInfoMock = jest.fn();
+ scopeMutationMock = jest.fn();
+ pageMutationMock = jest.fn();
+ disableKeyMock = jest.fn();
+
+ resolvers = {
+ Query: {
+ currentPage: currentPageMock,
+ currentScope: currentScopeMock,
+ deployKeyToRemove: confirmRemoveKeyMock,
+ pageInfo: pageInfoMock,
+ },
+ Mutation: {
+ currentPage: pageMutationMock,
+ currentScope: scopeMutationMock,
+ disableKey: disableKeyMock,
+ },
+ };
});
afterEach(() => {
@@ -43,8 +81,7 @@ describe('Deploy keys app component', () => {
const findNavigationTabs = () => wrapper.findComponent(NavigationTabs);
it('renders loading icon while waiting for request', async () => {
- mock.onGet(TEST_ENDPOINT).reply(() => new Promise());
-
+ deployKeyMock.mockReturnValue(new Promise(() => {}));
mountComponent();
await nextTick();
@@ -52,85 +89,190 @@ describe('Deploy keys app component', () => {
});
it('renders keys panels', async () => {
+ const deployKeys = enabledKeys.keys.map(mapDeployKey);
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys, __typename: 'Project' },
+ },
+ });
await mountComponent();
expect(findKeyPanels().length).toBe(3);
});
- it.each`
- selector
- ${'.js-deployKeys-tab-enabled_keys'}
- ${'.js-deployKeys-tab-available_project_keys'}
- ${'.js-deployKeys-tab-public_keys'}
- `('$selector title exists', ({ selector }) => {
- return mountComponent().then(() => {
+ describe.each`
+ scope
+ ${'enabledKeys'}
+ ${'availableProjectKeys'}
+ ${'availablePublicKeys'}
+ `('tab $scope', ({ scope }) => {
+ let selector;
+
+ beforeEach(async () => {
+ selector = `.js-deployKeys-tab-${scope}`;
+ const deployKeys = enabledKeys.keys.map(mapDeployKey);
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys, __typename: 'Project' },
+ },
+ });
+
+ await mountComponent();
+ });
+
+ it('displays the title', () => {
const element = wrapper.find(selector);
expect(element.exists()).toBe(true);
});
+
+ it('triggers changing the scope on click', async () => {
+ await findNavigationTabs().vm.$emit('onChangeTab', scope);
+
+ expect(scopeMutationMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { scope },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
});
- it('does not render key panels when keys object is empty', () => {
- mock.onGet(TEST_ENDPOINT).reply(HTTP_STATUS_OK, []);
+ it('captures a failed tab change', async () => {
+ const scope = 'fake scope';
+ const error = new Error('fail!');
- return mountComponent().then(() => {
- expect(findKeyPanels().length).toBe(0);
+ const deployKeys = enabledKeys.keys.map(mapDeployKey);
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys, __typename: 'Project' },
+ },
});
+
+ scopeMutationMock.mockRejectedValue(error);
+ await mountComponent();
+ await findNavigationTabs().vm.$emit('onChangeTab', scope);
+ await waitForPromises();
+
+ expect(captureException).toHaveBeenCalledWith(error, { tags: { deployKeyScope: scope } });
});
it('hasKeys returns true when there are keys', async () => {
+ const deployKeys = enabledKeys.keys.map(mapDeployKey);
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys, __typename: 'Project' },
+ },
+ });
await mountComponent();
expect(findNavigationTabs().exists()).toBe(true);
expect(findLoadingIcon().exists()).toBe(false);
});
- describe('enabling and disabling keys', () => {
- const key = data.public_keys[0];
- let getMethodMock;
- let putMethodMock;
+ describe('disabling keys', () => {
+ const key = mapDeployKey(enabledKeys.keys[0]);
+
+ beforeEach(() => {
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys: [key], __typename: 'Project' },
+ },
+ });
+ });
- const removeKey = async (keyEvent) => {
- eventHub.$emit(keyEvent, key, () => {});
+ it('re-fetches deploy keys when disabling a key', async () => {
+ confirmRemoveKeyMock.mockReturnValue(key);
+ await mountComponent();
+ expect(deployKeyMock).toHaveBeenCalledTimes(1);
await nextTick();
expect(findModal().props('visible')).toBe(true);
findModal().vm.$emit('remove');
- };
-
- beforeEach(() => {
- getMethodMock = jest.spyOn(axios, 'get');
- putMethodMock = jest.spyOn(axios, 'put');
+ await waitForPromises();
+ expect(deployKeyMock).toHaveBeenCalledTimes(2);
});
+ });
- afterEach(() => {
- getMethodMock.mockClear();
- putMethodMock.mockClear();
- });
+ describe('pagination', () => {
+ const key = mapDeployKey(enabledKeys.keys[0]);
+ let page;
+ let pageInfo;
+ let glPagination;
- it('re-fetches deploy keys when enabling a key', async () => {
- await mountComponent();
+ beforeEach(async () => {
+ page = 2;
+ pageInfo = {
+ total: 20,
+ perPage: 5,
+ nextPage: 3,
+ page,
+ previousPage: 1,
+ __typename: 'LocalPageInfo',
+ };
+ deployKeyMock.mockReturnValue({
+ data: {
+ project: { id: 1, deployKeys: [], __typename: 'Project' },
+ },
+ });
- eventHub.$emit('enable.key', key);
+ confirmRemoveKeyMock.mockReturnValue(key);
+ pageInfoMock.mockReturnValue(pageInfo);
+ currentPageMock.mockReturnValue(page);
+ await mountComponent();
+ glPagination = wrapper.findComponent(GlPagination);
+ });
- expect(putMethodMock).toHaveBeenCalledWith(`${TEST_ENDPOINT}/${key.id}/enable`);
- expect(getMethodMock).toHaveBeenCalled();
+ it('shows pagination with correct page info', () => {
+ expect(glPagination.exists()).toBe(true);
+ expect(glPagination.props()).toMatchObject({
+ totalItems: pageInfo.total,
+ perPage: pageInfo.perPage,
+ value: page,
+ });
});
- it('re-fetches deploy keys when disabling a key', async () => {
- await mountComponent();
+ it('moves back a page', async () => {
+ await glPagination.vm.$emit('previous');
- await removeKey('disable.key');
+ expect(pageMutationMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { page: page - 1 },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('moves forward a page', async () => {
+ await glPagination.vm.$emit('next');
- expect(putMethodMock).toHaveBeenCalledWith(`${TEST_ENDPOINT}/${key.id}/disable`);
- expect(getMethodMock).toHaveBeenCalled();
+ expect(pageMutationMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { page: page + 1 },
+ expect.anything(),
+ expect.anything(),
+ );
});
- it('calls disableKey when removing a key', async () => {
- await mountComponent();
+ it('moves to specified page', async () => {
+ await glPagination.vm.$emit('input', 5);
+
+ expect(pageMutationMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { page: 5 },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
- await removeKey('remove.key');
+ it('moves a page back if there are no more keys on this page', async () => {
+ await findModal().vm.$emit('remove');
+ await waitForPromises();
- expect(putMethodMock).toHaveBeenCalledWith(`${TEST_ENDPOINT}/${key.id}/disable`);
- expect(getMethodMock).toHaveBeenCalled();
+ expect(pageMutationMock).toHaveBeenCalledWith(
+ expect.anything(),
+ { page: page - 1 },
+ expect.anything(),
+ expect.anything(),
+ );
});
});
});
diff --git a/spec/frontend/deploy_keys/components/key_spec.js b/spec/frontend/deploy_keys/components/key_spec.js
index e57da4df150..5410914da04 100644
--- a/spec/frontend/deploy_keys/components/key_spec.js
+++ b/spec/frontend/deploy_keys/components/key_spec.js
@@ -1,64 +1,85 @@
+import VueApollo from 'vue-apollo';
+import Vue, { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import data from 'test_fixtures/deploy_keys/keys.json';
+import enabledKeys from 'test_fixtures/deploy_keys/enabled_keys.json';
+import availablePublicKeys from 'test_fixtures/deploy_keys/available_public_keys.json';
+import { createAlert } from '~/alert';
+import { mapDeployKey } from '~/deploy_keys/graphql/resolvers';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import key from '~/deploy_keys/components/key.vue';
-import DeployKeysStore from '~/deploy_keys/store';
+import ActionBtn from '~/deploy_keys/components/action_btn.vue';
import { getTimeago, localeDateFormat } from '~/lib/utils/datetime_utility';
+jest.mock('~/alert');
+
+Vue.use(VueApollo);
+
describe('Deploy keys key', () => {
let wrapper;
- let store;
+ let currentScopeMock;
const findTextAndTrim = (selector) => wrapper.find(selector).text().trim();
- const createComponent = (propsData) => {
+ const createComponent = async (propsData) => {
+ const resolvers = {
+ Query: {
+ currentScope: currentScopeMock,
+ },
+ };
+
+ const apolloProvider = createMockApollo([], resolvers);
wrapper = mount(key, {
propsData: {
- store,
endpoint: 'https://test.host/dummy/endpoint',
...propsData,
},
+ apolloProvider,
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},
});
+ await nextTick();
};
beforeEach(() => {
- store = new DeployKeysStore();
- store.keys = data;
+ currentScopeMock = jest.fn();
});
describe('enabled key', () => {
- const deployKey = data.enabled_keys[0];
+ const deployKey = mapDeployKey(enabledKeys.keys[0]);
- it('renders the keys title', () => {
- createComponent({ deployKey });
+ beforeEach(() => {
+ currentScopeMock.mockReturnValue('enabledKeys');
+ });
+
+ it('renders the keys title', async () => {
+ await createComponent({ deployKey });
expect(findTextAndTrim('.title')).toContain('My title');
});
- it('renders human friendly formatted created date', () => {
- createComponent({ deployKey });
+ it('renders human friendly formatted created date', async () => {
+ await createComponent({ deployKey });
expect(findTextAndTrim('.key-created-at')).toBe(
- `${getTimeago().format(deployKey.created_at)}`,
+ `${getTimeago().format(deployKey.createdAt)}`,
);
});
- it('renders human friendly expiration date', () => {
+ it('renders human friendly expiration date', async () => {
const expiresAt = new Date();
- createComponent({
- deployKey: { ...deployKey, expires_at: expiresAt },
+ await createComponent({
+ deployKey: { ...deployKey, expiresAt },
});
expect(findTextAndTrim('.key-expires-at')).toBe(`${getTimeago().format(expiresAt)}`);
});
- it('shows tooltip for expiration date', () => {
+ it('shows tooltip for expiration date', async () => {
const expiresAt = new Date();
- createComponent({
- deployKey: { ...deployKey, expires_at: expiresAt },
+ await createComponent({
+ deployKey: { ...deployKey, expiresAt },
});
const expiryComponent = wrapper.find('[data-testid="expires-at-tooltip"]');
@@ -68,55 +89,57 @@ describe('Deploy keys key', () => {
`${localeDateFormat.asDateTimeFull.format(expiresAt)}`,
);
});
- it('renders never when no expiration date', () => {
- createComponent({
- deployKey: { ...deployKey, expires_at: null },
+ it('renders never when no expiration date', async () => {
+ await createComponent({
+ deployKey: { ...deployKey, expiresAt: null },
});
expect(wrapper.find('[data-testid="expires-never"]').exists()).toBe(true);
});
- it('shows pencil button for editing', () => {
- createComponent({ deployKey });
+ it('shows pencil button for editing', async () => {
+ await createComponent({ deployKey });
expect(wrapper.find('.btn [data-testid="pencil-icon"]').exists()).toBe(true);
});
- it('shows disable button when the project is not deletable', () => {
- createComponent({ deployKey });
+ it('shows disable button when the project is not deletable', async () => {
+ await createComponent({ deployKey });
+ await waitForPromises();
expect(wrapper.find('.btn [data-testid="cancel-icon"]').exists()).toBe(true);
});
- it('shows remove button when the project is deletable', () => {
- createComponent({
- deployKey: { ...deployKey, destroyed_when_orphaned: true, almost_orphaned: true },
+ it('shows remove button when the project is deletable', async () => {
+ await createComponent({
+ deployKey: { ...deployKey, destroyedWhenOrphaned: true, almostOrphaned: true },
});
+ await waitForPromises();
expect(wrapper.find('.btn [data-testid="remove-icon"]').exists()).toBe(true);
});
});
describe('deploy key labels', () => {
- const deployKey = data.enabled_keys[0];
- const deployKeysProjects = [...deployKey.deploy_keys_projects];
- it('shows write access title when key has write access', () => {
- deployKeysProjects[0] = { ...deployKeysProjects[0], can_push: true };
- createComponent({ deployKey: { ...deployKey, deploy_keys_projects: deployKeysProjects } });
+ const deployKey = mapDeployKey(enabledKeys.keys[0]);
+ const deployKeysProjects = [...deployKey.deployKeysProjects];
+ it('shows write access title when key has write access', async () => {
+ deployKeysProjects[0] = { ...deployKeysProjects[0], canPush: true };
+ await createComponent({ deployKey: { ...deployKey, deployKeysProjects } });
expect(wrapper.find('.deploy-project-label').attributes('title')).toBe(
'Grant write permissions to this key',
);
});
- it('does not show write access title when key has write access', () => {
- deployKeysProjects[0] = { ...deployKeysProjects[0], can_push: false };
- createComponent({ deployKey: { ...deployKey, deploy_keys_projects: deployKeysProjects } });
+ it('does not show write access title when key has write access', async () => {
+ deployKeysProjects[0] = { ...deployKeysProjects[0], canPush: false };
+ await createComponent({ deployKey: { ...deployKey, deployKeysProjects } });
expect(wrapper.find('.deploy-project-label').attributes('title')).toBe('Read access only');
});
- it('shows expandable button if more than two projects', () => {
- createComponent({ deployKey });
+ it('shows expandable button if more than two projects', async () => {
+ await createComponent({ deployKey });
const labels = wrapper.findAll('.deploy-project-label');
expect(labels.length).toBe(2);
@@ -125,53 +148,68 @@ describe('Deploy keys key', () => {
});
it('expands all project labels after click', async () => {
- createComponent({ deployKey });
- const { length } = deployKey.deploy_keys_projects;
+ await createComponent({ deployKey });
+ const { length } = deployKey.deployKeysProjects;
wrapper.findAll('.deploy-project-label').at(1).trigger('click');
await nextTick();
const labels = wrapper.findAll('.deploy-project-label');
- expect(labels.length).toBe(length);
+ expect(labels).toHaveLength(length);
expect(labels.at(1).text()).not.toContain(`+${length} others`);
expect(labels.at(1).attributes('title')).not.toContain('Expand');
});
- it('shows two projects', () => {
- createComponent({
- deployKey: { ...deployKey, deploy_keys_projects: [...deployKeysProjects].slice(0, 2) },
+ it('shows two projects', async () => {
+ await createComponent({
+ deployKey: { ...deployKey, deployKeysProjects: [...deployKeysProjects].slice(0, 2) },
});
const labels = wrapper.findAll('.deploy-project-label');
expect(labels.length).toBe(2);
- expect(labels.at(1).text()).toContain(deployKey.deploy_keys_projects[1].project.full_name);
+ expect(labels.at(1).text()).toContain(deployKey.deployKeysProjects[1].project.fullName);
});
});
describe('public keys', () => {
- const deployKey = data.public_keys[0];
+ const deployKey = mapDeployKey(availablePublicKeys.keys[0]);
- it('renders deploy keys without any enabled projects', () => {
- createComponent({ deployKey: { ...deployKey, deploy_keys_projects: [] } });
+ it('renders deploy keys without any enabled projects', async () => {
+ await createComponent({ deployKey: { ...deployKey, deployKeysProjects: [] } });
expect(findTextAndTrim('.deploy-project-list')).toBe('None');
});
- it('shows enable button', () => {
- createComponent({ deployKey });
+ it('shows enable button', async () => {
+ await createComponent({ deployKey });
expect(findTextAndTrim('.btn')).toBe('Enable');
});
- it('shows pencil button for editing', () => {
- createComponent({ deployKey });
- expect(wrapper.find('.btn [data-testid="pencil-icon"]').exists()).toBe(true);
+ it('shows an error on enable failure', async () => {
+ await createComponent({ deployKey });
+
+ const error = new Error('oops!');
+ wrapper.findComponent(ActionBtn).vm.$emit('error', error);
+
+ await nextTick();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Error enabling deploy key',
+ captureError: true,
+ error,
+ });
});
- it('shows disable button when key is enabled', () => {
- store.keys.enabled_keys.push(deployKey);
+ it('shows pencil button for editing', async () => {
+ await createComponent({ deployKey });
+ expect(wrapper.find('.btn [data-testid="pencil-icon"]').exists()).toBe(true);
+ });
- createComponent({ deployKey });
+ it('shows disable button when key is enabled', async () => {
+ currentScopeMock.mockReturnValue('enabledKeys');
+ await createComponent({ deployKey });
+ await waitForPromises();
expect(wrapper.find('.btn [data-testid="cancel-icon"]').exists()).toBe(true);
});
diff --git a/spec/frontend/deploy_keys/components/keys_panel_spec.js b/spec/frontend/deploy_keys/components/keys_panel_spec.js
index e63b269fe23..6e653010d8f 100644
--- a/spec/frontend/deploy_keys/components/keys_panel_spec.js
+++ b/spec/frontend/deploy_keys/components/keys_panel_spec.js
@@ -1,7 +1,9 @@
import { mount } from '@vue/test-utils';
-import data from 'test_fixtures/deploy_keys/keys.json';
+import enabledKeys from 'test_fixtures/deploy_keys/enabled_keys.json';
import deployKeysPanel from '~/deploy_keys/components/keys_panel.vue';
-import DeployKeysStore from '~/deploy_keys/store';
+import { mapDeployKey } from '~/deploy_keys/graphql/resolvers';
+
+const keys = enabledKeys.keys.map(mapDeployKey);
describe('Deploy keys panel', () => {
let wrapper;
@@ -9,14 +11,11 @@ describe('Deploy keys panel', () => {
const findTableRowHeader = () => wrapper.find('.table-row-header');
const mountComponent = (props) => {
- const store = new DeployKeysStore();
- store.keys = data;
wrapper = mount(deployKeysPanel, {
propsData: {
title: 'test',
- keys: data.enabled_keys,
+ keys,
showHelpBox: true,
- store,
endpoint: 'https://test.host/dummy/endpoint',
...props,
},
@@ -25,7 +24,7 @@ describe('Deploy keys panel', () => {
it('renders list of keys', () => {
mountComponent();
- expect(wrapper.findAll('.deploy-key').length).toBe(wrapper.vm.keys.length);
+ expect(wrapper.findAll('.deploy-key').length).toBe(keys.length);
});
it('renders table header', () => {
diff --git a/spec/frontend/deploy_keys/graphql/resolvers_spec.js b/spec/frontend/deploy_keys/graphql/resolvers_spec.js
index 458232697cb..486cbc525d1 100644
--- a/spec/frontend/deploy_keys/graphql/resolvers_spec.js
+++ b/spec/frontend/deploy_keys/graphql/resolvers_spec.js
@@ -64,7 +64,7 @@ describe('~/deploy_keys/graphql/resolvers', () => {
const scope = 'enabledKeys';
const page = 2;
mock
- .onGet(ENDPOINTS.enabledKeysEndpoint, { params: { page } })
+ .onGet(ENDPOINTS.enabledKeysEndpoint, { params: { page, per_page: 5 } })
.reply(HTTP_STATUS_OK, { keys: [key] });
const keys = await mockResolvers.Project.deployKeys(null, { scope, page }, { client });
@@ -157,6 +157,11 @@ describe('~/deploy_keys/graphql/resolvers', () => {
data: { currentPage: 1 },
});
});
+
+ it('throws failure on bad scope', () => {
+ scope = 'bad scope';
+ expect(() => mockResolvers.Mutation.currentScope(null, { scope }, { client })).toThrow(scope);
+ });
});
describe('disableKey', () => {
diff --git a/spec/frontend/diffs/components/__snapshots__/tree_list_spec.js.snap b/spec/frontend/diffs/components/__snapshots__/tree_list_spec.js.snap
new file mode 100644
index 00000000000..605f6335b5c
--- /dev/null
+++ b/spec/frontend/diffs/components/__snapshots__/tree_list_spec.js.snap
@@ -0,0 +1,160 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Diffs tree list component pinned file files in folders pins 1.rb file 1`] = `
+Array [
+ "📁folder/",
+ "──1.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins 2.rb file 1`] = `
+Array [
+ "📁folder/",
+ "──2.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins 3.rb file 1`] = `
+Array [
+ "📁folder/",
+ "──3.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins nested-1.rb file 1`] = `
+Array [
+ "📁folder/sub-folder/",
+ "──nested-1.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins nested-2.rb file 1`] = `
+Array [
+ "📁folder/sub-folder/",
+ "──nested-2.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins nested-3.rb file 1`] = `
+Array [
+ "📁folder/sub-folder/",
+ "──nested-3.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins root-first.rb file 1`] = `
+Array [
+ "root-first.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-last.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins root-last.rb file 1`] = `
+Array [
+ "root-last.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "📁folder-single",
+ "──single.rb",
+ "root-first.rb",
+]
+`;
+
+exports[`Diffs tree list component pinned file files in folders pins single.rb file 1`] = `
+Array [
+ "📁folder-single/",
+ "──single.rb",
+ "📁folder",
+ "──📁sub-folder",
+ "────nested-1.rb",
+ "────nested-2.rb",
+ "────nested-3.rb",
+ "──1.rb",
+ "──2.rb",
+ "──3.rb",
+ "root-first.rb",
+ "root-last.rb",
+]
+`;
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 63d9a2471b6..813db12e83f 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -31,6 +31,9 @@ import * as urlUtils from '~/lib/utils/url_utility';
import * as commonUtils from '~/lib/utils/common_utils';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { stubPerformanceWebAPI } from 'helpers/performance';
+import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
+import waitForPromises from 'helpers/wait_for_promises';
+import { diffMetadata } from 'jest/diffs/mock_data/diff_metadata';
import createDiffsStore from '../create_diffs_store';
import diffsMockData from '../mock_data/merge_request_diffs';
@@ -38,6 +41,8 @@ const mergeRequestDiff = { version_index: 1 };
const TEST_ENDPOINT = `${TEST_HOST}/diff/endpoint`;
const COMMIT_URL = `${TEST_HOST}/COMMIT/OLD`;
const UPDATED_COMMIT_URL = `${TEST_HOST}/COMMIT/NEW`;
+const ENDPOINT_BATCH_URL = `${TEST_HOST}/diff/endpointBatch`;
+const ENDPOINT_METADATA_URL = `${TEST_HOST}/diff/endpointMetadata`;
Vue.use(Vuex);
Vue.use(VueApollo);
@@ -77,8 +82,8 @@ describe('diffs/components/app', () => {
store.dispatch('diffs/setBaseConfig', {
endpoint: TEST_ENDPOINT,
- endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
- endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
+ endpointMetadata: ENDPOINT_METADATA_URL,
+ endpointBatch: ENDPOINT_BATCH_URL,
endpointDiffForPath: TEST_ENDPOINT,
projectPath: 'namespace/project',
dismissEndpoint: '',
@@ -126,7 +131,7 @@ describe('diffs/components/app', () => {
const fetchResolver = () => {
store.state.diffs.retrievingBatches = false;
store.state.notes.doneFetchingBatchDiscussions = true;
- store.state.notes.discussions = 'test';
+ store.state.notes.discussions = [];
return Promise.resolve({ real_size: 100 });
};
jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
@@ -861,4 +866,32 @@ describe('diffs/components/app', () => {
expect(loadSpy).not.toHaveBeenCalledWith({ file: store.state.diffs.diffFiles[0] });
});
});
+
+ describe('pinned file', () => {
+ const pinnedFileUrl = 'http://localhost.test/pinned-file';
+ let pinnedFile;
+
+ beforeEach(() => {
+ pinnedFile = getDiffFileMock();
+ mock.onGet(pinnedFileUrl).reply(HTTP_STATUS_OK, { diff_files: [pinnedFile] });
+ mock
+ .onGet(new RegExp(ENDPOINT_BATCH_URL))
+ .reply(HTTP_STATUS_OK, { diff_files: [], pagination: {} });
+ mock.onGet(new RegExp(ENDPOINT_METADATA_URL)).reply(HTTP_STATUS_OK, diffMetadata);
+
+ createComponent({ shouldShow: true, pinnedFileUrl });
+ });
+
+ it('fetches and displays pinned file', async () => {
+ await waitForPromises();
+
+ expect(wrapper.findComponent({ name: 'DynamicScroller' }).props('items')[0].file_hash).toBe(
+ pinnedFile.file_hash,
+ );
+ });
+
+ it('shows a spinner during loading', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index d6539a5bffa..c02875963fd 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -1,8 +1,8 @@
-import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { cloneDeep } from 'lodash';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
@@ -20,6 +20,7 @@ import { truncateSha } from '~/lib/utils/text_utility';
import { __, sprintf } from '~/locale';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import { TEST_HOST } from 'spec/test_constants';
import testAction from '../../__helpers__/vuex_action_helper';
import diffDiscussionsMockData from '../mock_data/diff_discussions';
@@ -73,6 +74,7 @@ describe('DiffFileHeader component', () => {
setFileCollapsedByUser: jest.fn(),
setFileForcedOpen: jest.fn(),
reviewFile: jest.fn(),
+ unpinFile: jest.fn(),
},
},
},
@@ -87,7 +89,7 @@ describe('DiffFileHeader component', () => {
});
const findHeader = () => wrapper.findComponent({ ref: 'header' });
- const findTitleLink = () => wrapper.findComponent({ ref: 'titleWrapper' });
+ const findTitleLink = () => wrapper.findByTestId('file-title');
const findExpandButton = () => wrapper.findComponent({ ref: 'expandDiffToFullFileButton' });
const findFileActions = () => wrapper.find('.file-actions');
const findModeChangedLine = () => wrapper.findComponent({ ref: 'fileMode' });
@@ -105,7 +107,7 @@ describe('DiffFileHeader component', () => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
const store = new Vuex.Store({ ...mockStoreConfig, ...options.store });
- wrapper = shallowMount(DiffFileHeader, {
+ wrapper = shallowMountExtended(DiffFileHeader, {
propsData: {
diffFile,
canCurrentUserFork: false,
@@ -711,4 +713,23 @@ describe('DiffFileHeader component', () => {
expect(wrapper.find('[data-testid="comment-files-button"]').exists()).toEqual(true);
});
+
+ describe('pinned file', () => {
+ beforeEach(() => {
+ window.gon.features = { pinnedFile: true };
+ });
+
+ it('has pinned URL search param', () => {
+ createComponent();
+ const url = new URL(TEST_HOST + findTitleLink().attributes('href'));
+ expect(url.searchParams.get('pin')).toBe(diffFile.file_hash);
+ });
+
+ it('can unpin file', () => {
+ createComponent({ props: { addMergeRequestButtons: true, pinned: true } });
+ const unpinButton = wrapper.findComponentByTestId('unpin-button');
+ unpinButton.vm.$emit('click');
+ expect(mockStoreConfig.modules.diffs.actions.unpinFile).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index a9fbf4632ac..444f4102e26 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -29,6 +29,7 @@ import createNotesStore from '~/notes/stores/modules';
import diffsModule from '~/diffs/store/modules';
import { SOMETHING_WENT_WRONG, SAVING_THE_COMMENT_FAILED } from '~/diffs/i18n';
import diffLineNoteFormMixin from '~/notes/mixins/diff_line_note_form';
+import { SET_PINNED_FILE_HASH } from '~/diffs/store/mutation_types';
import { getDiffFileMock } from '../mock_data/diff_file';
import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
import diffsMockData from '../mock_data/merge_request_diffs';
@@ -90,49 +91,6 @@ function markFileToBeRendered(store, index = 0) {
});
}
-function createComponent({ file, first = false, last = false, options = {}, props = {} }) {
- const diffs = diffsModule();
- diffs.actions = {
- ...diffs.actions,
- prefetchFileNeighbors: prefetchFileNeighborsMock,
- saveDiffDiscussion: saveDiffDiscussionMock,
- };
-
- diffs.getters = {
- ...diffs.getters,
- diffCompareDropdownTargetVersions: () => [],
- diffCompareDropdownSourceVersions: () => [],
- };
-
- const store = new Vuex.Store({
- ...createNotesStore(),
- modules: { diffs },
- });
-
- store.state.diffs = {
- mergeRequestDiff: diffsMockData[0],
- diffFiles: [file],
- };
-
- const wrapper = shallowMountExtended(DiffFileComponent, {
- store,
- propsData: {
- file,
- canCurrentUserFork: false,
- viewDiffsFileByFile: false,
- isFirstFile: first,
- isLastFile: last,
- ...props,
- },
- ...options,
- });
-
- return {
- wrapper,
- store,
- };
-}
-
const findDiffHeader = (wrapper) => wrapper.findComponent(DiffFileHeaderComponent);
const findDiffContentArea = (wrapper) => wrapper.findByTestId('content-area');
const findLoader = (wrapper) => wrapper.findByTestId('loader-icon');
@@ -159,15 +117,58 @@ const triggerSaveDraftNote = (wrapper, note, parent, error) =>
findNoteForm(wrapper).vm.$emit('handleFormUpdateAddToReview', note, false, parent, error);
describe('DiffFile', () => {
- let readableFile;
let wrapper;
let store;
let axiosMock;
+ function createComponent({
+ file = getReadableFile(),
+ first = false,
+ last = false,
+ options = {},
+ props = {},
+ } = {}) {
+ const diffs = diffsModule();
+ diffs.actions = {
+ ...diffs.actions,
+ prefetchFileNeighbors: prefetchFileNeighborsMock,
+ saveDiffDiscussion: saveDiffDiscussionMock,
+ };
+
+ diffs.getters = {
+ ...diffs.getters,
+ diffCompareDropdownTargetVersions: () => [],
+ diffCompareDropdownSourceVersions: () => [],
+ };
+
+ store = new Vuex.Store({
+ ...createNotesStore(),
+ modules: { diffs },
+ });
+
+ store.state.diffs = {
+ ...store.state.diffs,
+ mergeRequestDiff: diffsMockData[0],
+ diffFiles: [file],
+ };
+
+ wrapper = shallowMountExtended(DiffFileComponent, {
+ store,
+ propsData: {
+ file,
+ canCurrentUserFork: false,
+ viewDiffsFileByFile: false,
+ isFirstFile: first,
+ isLastFile: last,
+ ...props,
+ },
+ ...options,
+ });
+ }
+
beforeEach(() => {
- readableFile = getReadableFile();
axiosMock = new MockAdapter(axios);
- ({ wrapper, store } = createComponent({ file: readableFile }));
+ createComponent();
});
afterEach(() => {
@@ -186,7 +187,6 @@ describe('DiffFile', () => {
`('$description', ({ fileByFile }) => {
createComponent({
props: { viewDiffsFileByFile: fileByFile },
- file: readableFile,
});
if (fileByFile) {
@@ -217,11 +217,11 @@ describe('DiffFile', () => {
forceHasDiff({ store, ...file });
}
- ({ wrapper, store } = createComponent({
+ createComponent({
file: store.state.diffs.diffFiles[0],
first,
last,
- }));
+ });
await nextTick();
@@ -233,14 +233,13 @@ describe('DiffFile', () => {
);
it('emits the "first file shown" and "files end" events when in File-by-File mode', async () => {
- ({ wrapper, store } = createComponent({
- file: getReadableFile(),
+ createComponent({
first: false,
last: false,
props: {
viewDiffsFileByFile: true,
},
- }));
+ });
await nextTick();
@@ -253,11 +252,11 @@ describe('DiffFile', () => {
describe('after loading the diff', () => {
it('indicates that it loaded the file', async () => {
forceHasDiff({ store, inlineLines: [], parallelLines: [], readableText: true });
- ({ wrapper, store } = createComponent({
+ createComponent({
file: store.state.diffs.diffFiles[0],
first: true,
last: true,
- }));
+ });
jest.spyOn(wrapper.vm, 'loadCollapsedDiff').mockResolvedValue(getReadableFile());
jest.spyOn(window, 'requestIdleCallback').mockImplementation((fn) => fn());
@@ -314,11 +313,11 @@ describe('DiffFile', () => {
`('should be $bool when { userIsLoggedIn: $loggedIn }', ({ loggedIn, bool }) => {
setLoggedIn(loggedIn);
- ({ wrapper } = createComponent({
+ createComponent({
props: {
file: store.state.diffs.diffFiles[0],
},
- }));
+ });
expect(wrapper.vm.showLocalFileReviews).toBe(bool);
});
@@ -556,7 +555,7 @@ describe('DiffFile', () => {
describe('general (other) collapsed', () => {
it('should be expandable for unreadable files', async () => {
- ({ wrapper, store } = createComponent({ file: getUnreadableFile() }));
+ createComponent({ file: getUnreadableFile() });
makeFileAutomaticallyCollapsed(store);
await nextTick();
@@ -622,7 +621,7 @@ describe('DiffFile', () => {
renderIt: true,
};
- ({ wrapper, store } = createComponent({ file }));
+ createComponent({ file });
expect(wrapper.findByTestId('conflictsAlert').exists()).toBe(false);
});
@@ -634,7 +633,7 @@ describe('DiffFile', () => {
renderIt: true,
};
- ({ wrapper, store } = createComponent({ file }));
+ createComponent({ file });
expect(wrapper.findByTestId('conflictsAlert').exists()).toBe(true);
});
@@ -656,9 +655,9 @@ describe('DiffFile', () => {
...extraProps,
};
- ({ wrapper, store } = createComponent({
+ createComponent({
file,
- }));
+ });
expect(wrapper.findByTestId('file-discussions').exists()).toEqual(exists);
},
@@ -676,9 +675,9 @@ describe('DiffFile', () => {
hasCommentForm,
};
- ({ wrapper, store } = createComponent({
+ createComponent({
file,
- }));
+ });
expect(findNoteForm(wrapper).exists()).toEqual(exists);
},
@@ -694,9 +693,9 @@ describe('DiffFile', () => {
discussions,
};
- ({ wrapper, store } = createComponent({
+ createComponent({
file,
- }));
+ });
expect(wrapper.findByTestId('diff-file-discussions').exists()).toEqual(exists);
});
@@ -712,10 +711,10 @@ describe('DiffFile', () => {
const errorCallback = jest.fn();
beforeEach(() => {
- ({ wrapper, store } = createComponent({
+ createComponent({
file,
options: { provide: { glFeatures: { commentOnFiles: true } } },
- }));
+ });
});
it('calls saveDiffDiscussionMock', () => {
@@ -771,10 +770,10 @@ describe('DiffFile', () => {
const errorCallback = jest.fn();
beforeEach(async () => {
- ({ wrapper, store } = createComponent({
+ createComponent({
file,
options: { provide: { glFeatures: { commentOnFiles: true } } },
- }));
+ });
triggerSaveDraftNote(wrapper, note, parentElement, errorCallback);
@@ -791,4 +790,13 @@ describe('DiffFile', () => {
});
});
});
+
+ describe('pinned file', () => {
+ it('passes down pinned prop', async () => {
+ createComponent();
+ store.commit(`diffs/${SET_PINNED_FILE_HASH}`, getReadableFile().file_hash);
+ await nextTick();
+ expect(wrapper.findComponent(DiffFileHeaderComponent).props('pinned')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
index 6e9eb433924..bd9592e4f5e 100644
--- a/spec/frontend/diffs/components/diff_row_utils_spec.js
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -6,6 +6,7 @@ import {
NEW_NO_NEW_LINE_TYPE,
EMPTY_CELL_TYPE,
} from '~/diffs/constants';
+import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
const LINE_CODE = 'abc123';
@@ -108,15 +109,47 @@ describe('diff_row_utils', () => {
describe('lineHref', () => {
it(`should return #${LINE_CODE}`, () => {
- expect(utils.lineHref({ line_code: LINE_CODE })).toEqual(`#${LINE_CODE}`);
+ expect(utils.lineHref({ line_code: LINE_CODE }, {})).toEqual(`#${LINE_CODE}`);
});
it(`should return '#' if line is undefined`, () => {
- expect(utils.lineHref()).toEqual('#');
+ expect(utils.lineHref()).toEqual('');
});
it(`should return '#' if line_code is undefined`, () => {
- expect(utils.lineHref({})).toEqual('#');
+ expect(utils.lineHref({}, {})).toEqual('');
+ });
+
+ describe('pinned file', () => {
+ beforeEach(() => {
+ window.gon.features = { pinnedFile: true };
+ });
+
+ afterEach(() => {
+ delete window.gon.features;
+ });
+
+ it(`should return pinned file URL`, () => {
+ const diffFile = getDiffFileMock();
+ expect(utils.lineHref({ line_code: LINE_CODE }, { diffFile })).toEqual(
+ `?pin=${diffFile.file_hash}#${LINE_CODE}`,
+ );
+ });
+ });
+ });
+
+ describe('pinnedFileHref', () => {
+ beforeEach(() => {
+ window.gon.features = { pinnedFile: true };
+ });
+
+ afterEach(() => {
+ delete window.gon.features;
+ });
+
+ it(`should return pinned file URL`, () => {
+ const diffFile = getDiffFileMock();
+ expect(utils.pinnedFileHref(diffFile)).toEqual(`?pin=${diffFile.file_hash}`);
});
});
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index a54cf9b8bff..230839f0ecf 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -7,6 +7,9 @@ import batchComments from '~/batch_comments/stores/modules/batch_comments';
import DiffFileRow from '~/diffs/components//diff_file_row.vue';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { SET_PINNED_FILE_HASH, SET_TREE_DATA, SET_DIFF_FILES } from '~/diffs/store/mutation_types';
+import { generateTreeList } from '~/diffs/utils/tree_worker_utils';
+import { sortTree } from '~/ide/stores/utils';
describe('Diffs tree list component', () => {
let wrapper;
@@ -58,6 +61,14 @@ describe('Diffs tree list component', () => {
const setupFilesInState = () => {
const treeEntries = {
+ app: {
+ key: 'app',
+ path: 'app',
+ name: 'app',
+ type: 'tree',
+ tree: [],
+ opened: true,
+ },
'index.js': {
addedLines: 0,
changed: true,
@@ -71,6 +82,8 @@ describe('Diffs tree list component', () => {
type: 'blob',
parentPath: 'app',
tree: [],
+ file_path: 'app/index.js',
+ file_hash: 'app-index',
},
'test.rb': {
addedLines: 0,
@@ -85,20 +98,39 @@ describe('Diffs tree list component', () => {
type: 'blob',
parentPath: 'app',
tree: [],
+ file_path: 'app/test.rb',
+ file_hash: 'app-test',
},
- app: {
- key: 'app',
- path: 'app',
- name: 'app',
- type: 'tree',
+ LICENSE: {
+ addedLines: 0,
+ changed: true,
+ deleted: false,
+ fileHash: 'LICENSE',
+ key: 'LICENSE',
+ name: 'LICENSE',
+ path: 'LICENSE',
+ removedLines: 0,
+ tempFile: true,
+ type: 'blob',
+ parentPath: '/',
tree: [],
+ file_path: 'LICENSE',
+ file_hash: 'LICENSE',
},
};
Object.assign(store.state.diffs, {
treeEntries,
- tree: [treeEntries['index.js'], treeEntries.app],
+ tree: [
+ treeEntries.LICENSE,
+ {
+ ...treeEntries.app,
+ tree: [treeEntries['index.js'], treeEntries['test.rb']],
+ },
+ ],
});
+
+ return treeEntries;
};
describe('default', () => {
@@ -149,7 +181,7 @@ describe('Diffs tree list component', () => {
});
it('renders tree', () => {
- expect(getScroller().props('items')).toHaveLength(2);
+ expect(getScroller().props('items')).toHaveLength(4);
});
it('hides file stats', () => {
@@ -169,7 +201,7 @@ describe('Diffs tree list component', () => {
store.state.diffs.renderTreeList = false;
await nextTick();
- expect(getScroller().props('items')).toHaveLength(3);
+ expect(getScroller().props('items')).toHaveLength(5);
});
});
@@ -188,4 +220,59 @@ describe('Diffs tree list component', () => {
expect(getFileRow().props('viewedFiles')).toBe(viewedDiffFileIds);
});
});
+
+ describe('pinned file', () => {
+ const filePaths = [
+ ['nested-1.rb', 'folder/sub-folder/'],
+ ['nested-2.rb', 'folder/sub-folder/'],
+ ['nested-3.rb', 'folder/sub-folder/'],
+ ['1.rb', 'folder/'],
+ ['2.rb', 'folder/'],
+ ['3.rb', 'folder/'],
+ ['single.rb', 'folder-single/'],
+ ['root-first.rb'],
+ ['root-last.rb'],
+ ];
+
+ const pinFile = (fileHash) => {
+ store.commit(`diffs/${SET_PINNED_FILE_HASH}`, fileHash);
+ };
+
+ const setupFiles = (diffFiles) => {
+ const { treeEntries, tree } = generateTreeList(diffFiles);
+ store.commit(`diffs/${SET_DIFF_FILES}`, diffFiles);
+ store.commit(`diffs/${SET_TREE_DATA}`, {
+ treeEntries,
+ tree: sortTree(tree),
+ });
+ };
+
+ const createFile = (name, path = '') => ({
+ file_hash: name,
+ path: `${path}${name}`,
+ new_path: `${path}${name}`,
+ file_path: `${path}${name}`,
+ });
+
+ beforeEach(() => {
+ createComponent();
+ setupFiles(filePaths.map(([name, path]) => createFile(name, path)));
+ });
+
+ describe('files in folders', () => {
+ it.each(filePaths.map((path) => path[0]))('pins %s file', async (pinnedFile) => {
+ pinFile(pinnedFile);
+ await nextTick();
+ const items = getScroller().props('items');
+ expect(
+ items.map(
+ (item) =>
+ `${'─'.repeat(item.level * 2)}${item.type === 'tree' ? '📁' : ''}${
+ item.name || item.path
+ }`,
+ ),
+ ).toMatchSnapshot();
+ });
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index be3b30e8e7a..ceaaa32a0e8 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -11,7 +11,7 @@ import {
PARALLEL_DIFF_VIEW_TYPE,
EVT_MR_PREPARED,
} from '~/diffs/constants';
-import { LOAD_SINGLE_DIFF_FAILED, BUILDING_YOUR_MR, SOMETHING_WENT_WRONG } from '~/diffs/i18n';
+import { BUILDING_YOUR_MR, SOMETHING_WENT_WRONG } from '~/diffs/i18n';
import * as diffActions from '~/diffs/store/actions';
import * as types from '~/diffs/store/mutation_types';
import * as utils from '~/diffs/store/utils';
@@ -28,6 +28,8 @@ import {
import { mergeUrlParams } from '~/lib/utils/url_utility';
import eventHub from '~/notes/event_hub';
import diffsEventHub from '~/diffs/event_hub';
+import { handleLocationHash, historyPushState, scrollToElement } from '~/lib/utils/common_utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { diffMetadata } from '../mock_data/diff_metadata';
jest.mock('~/alert');
@@ -37,6 +39,8 @@ jest.mock('~/lib/utils/secret_detection', () => ({
containsSensitiveToken: jest.requireActual('~/lib/utils/secret_detection').containsSensitiveToken,
}));
+const endpointDiffForPath = '/diffs/set/endpoint/path';
+
describe('DiffsStoreActions', () => {
let mock;
@@ -78,7 +82,6 @@ describe('DiffsStoreActions', () => {
const endpoint = '/diffs/set/endpoint';
const endpointMetadata = '/diffs/set/endpoint/metadata';
const endpointBatch = '/diffs/set/endpoint/batch';
- const endpointDiffForPath = '/diffs/set/endpoint/path';
const endpointCoverage = '/diffs/set/coverage_reports';
const projectPath = '/root/project';
const dismissEndpoint = '/-/user_callouts';
@@ -180,8 +183,8 @@ describe('DiffsStoreActions', () => {
new_path: 'new/123',
w: '1',
view: 'inline',
+ diff_head: true,
};
- const endpointDiffForPath = '/diffs/set/endpoint/path';
const diffForPath = mergeUrlParams(defaultParams, endpointDiffForPath);
const treeEntry = {
fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818',
@@ -256,7 +259,9 @@ describe('DiffsStoreActions', () => {
// wait for the mocked network request to return and start processing the .then
await waitForPromises();
- expect(mock.history.get[0].url).toEqual(finalPath);
+ expect(mock.history.get[0].url).toContain(
+ 'old_path=old%2F123&new_path=new%2F123&w=1&view=inline&commit_id=123',
+ );
});
describe('version parameters', () => {
@@ -285,6 +290,7 @@ describe('DiffsStoreActions', () => {
endpointDiffForPath,
);
state.mergeRequestDiff = { version_path: versionPath };
+ state.endpointBatch = versionPath;
mock.onGet(finalPath).reply(HTTP_STATUS_OK, fileResult);
diffActions.prefetchSingleFile({ state, getters, commit }, treeEntry);
@@ -349,8 +355,8 @@ describe('DiffsStoreActions', () => {
new_path: 'new/123',
w: '1',
view: 'inline',
+ diff_head: true,
};
- const endpointDiffForPath = '/diffs/set/endpoint/path';
const diffForPath = mergeUrlParams(defaultParams, endpointDiffForPath);
const treeEntry = {
fileHash: 'e334a2a10f036c00151a04cea7938a5d4213a818',
@@ -445,7 +451,9 @@ describe('DiffsStoreActions', () => {
// wait for the mocked network request to return and start processing the .then
await waitForPromises();
- expect(mock.history.get[0].url).toEqual(finalPath);
+ expect(mock.history.get[0].url).toContain(
+ 'old_path=old%2F123&new_path=new%2F123&w=1&view=inline&commit_id=123',
+ );
});
describe('version parameters', () => {
@@ -473,7 +481,7 @@ describe('DiffsStoreActions', () => {
{ ...defaultParams, diff_id, start_sha },
endpointDiffForPath,
);
- state.mergeRequestDiff = { version_path: versionPath };
+ state.endpointBatch = versionPath;
mock.onGet(finalPath).reply(HTTP_STATUS_OK, fileResult);
diffActions.fetchFileByFile({ state, getters, commit });
@@ -490,8 +498,8 @@ describe('DiffsStoreActions', () => {
describe('fetchDiffFilesBatch', () => {
it('should fetch batch diff files', () => {
const endpointBatch = '/fetch/diffs_batch';
- const res1 = { diff_files: [{ file_hash: 'test' }], pagination: { total_pages: 7 } };
- const res2 = { diff_files: [{ file_hash: 'test2' }], pagination: { total_pages: 7 } };
+ const res1 = { diff_files: [{ file_hash: 'test' }], pagination: { total_pages: 2 } };
+ const res2 = { diff_files: [{ file_hash: 'test2' }], pagination: { total_pages: 2 } };
mock
.onGet(
mergeUrlParams(
@@ -520,7 +528,7 @@ describe('DiffsStoreActions', () => {
return testAction(
diffActions.fetchDiffFilesBatch,
- {},
+ undefined,
{ endpointBatch, diffViewType: 'inline', diffFiles: [], perPage: 5 },
[
{ type: types.SET_BATCH_LOADING_STATE, payload: 'loading' },
@@ -532,7 +540,6 @@ describe('DiffsStoreActions', () => {
{ type: types.SET_BATCH_LOADING_STATE, payload: 'loaded' },
{ type: types.SET_CURRENT_DIFF_FILE, payload: 'test2' },
{ type: types.SET_RETRIEVING_BATCHES, payload: false },
- { type: types.SET_BATCH_LOADING_STATE, payload: 'error' },
],
[],
);
@@ -690,7 +697,7 @@ describe('DiffsStoreActions', () => {
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
- return testAction(diffActions.setHighlightedRow, 'ABC_123', {}, [
+ return testAction(diffActions.setHighlightedRow, { lineCode: 'ABC_123' }, {}, [
{ type: types.SET_HIGHLIGHTED_ROW, payload: 'ABC_123' },
{ type: types.SET_CURRENT_DIFF_FILE, payload: 'ABC' },
]);
@@ -1310,14 +1317,17 @@ describe('DiffsStoreActions', () => {
diffActions.goToFile({ state, dispatch, getters, commit }, file);
expect(commit).toHaveBeenCalledWith(types.SET_CURRENT_DIFF_FILE, fileHash);
- expect(dispatch).toHaveBeenCalledTimes(0);
+ expect(dispatch).not.toHaveBeenCalledWith('fetchFileByFile');
});
describe('when the tree entry has not been loaded', () => {
it('updates location hash', () => {
diffActions.goToFile({ state, commit, getters, dispatch }, file);
- expect(document.location.hash).toBe('#test');
+ expect(historyPushState).toHaveBeenCalledWith(new URL(`${TEST_HOST}#test`), {
+ skipScrolling: true,
+ });
+ expect(scrollToElement).toHaveBeenCalledWith('.diff-files-holder', { duration: 0 });
});
it('loads the file and then scrolls to it', async () => {
@@ -1333,21 +1343,12 @@ describe('DiffsStoreActions', () => {
expect(commonUtils.scrollToElement).toHaveBeenCalledWith('.diff-files-holder', {
duration: 0,
});
- expect(dispatch).toHaveBeenCalledTimes(1);
+ expect(dispatch).toHaveBeenCalledWith('fetchFileByFile');
});
- it('shows an alert when there was an error fetching the file', async () => {
- dispatch = jest.fn().mockRejectedValue();
-
+ it('unpins the file', () => {
diffActions.goToFile({ state, commit, getters, dispatch }, file);
-
- // Wait for the fetchFileByFile dispatch to return, to trigger the catch
- await waitForPromises();
-
- expect(createAlert).toHaveBeenCalledTimes(1);
- expect(createAlert).toHaveBeenCalledWith({
- message: expect.stringMatching(LOAD_SINGLE_DIFF_FAILED),
- });
+ expect(dispatch).toHaveBeenCalledWith('unpinFile');
});
});
});
@@ -1969,7 +1970,7 @@ describe('DiffsStoreActions', () => {
0,
{ flatBlobsList: [{ fileHash: '123' }] },
[{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }],
- [],
+ [{ type: 'unpinFile' }],
);
});
@@ -1979,7 +1980,7 @@ describe('DiffsStoreActions', () => {
0,
{ viewDiffsFileByFile: true, flatBlobsList: [{ fileHash: '123' }] },
[{ type: types.SET_CURRENT_DIFF_FILE, payload: '123' }],
- [{ type: 'fetchFileByFile' }],
+ [{ type: 'unpinFile' }, { type: 'fetchFileByFile' }],
);
});
});
@@ -2120,4 +2121,84 @@ describe('DiffsStoreActions', () => {
);
});
});
+
+ describe('fetchPinnedFile', () => {
+ it('fetches pinned file', async () => {
+ const pinnedFileHref = `${TEST_HOST}/pinned-file`;
+ const pinnedFile = getDiffFileMock();
+ const diffFiles = [pinnedFile];
+ const hubSpy = jest.spyOn(diffsEventHub, '$emit');
+ mock.onGet(new RegExp(pinnedFileHref)).reply(HTTP_STATUS_OK, { diff_files: diffFiles });
+
+ await testAction(
+ diffActions.fetchPinnedFile,
+ pinnedFileHref,
+ {},
+ [
+ { type: types.SET_BATCH_LOADING_STATE, payload: 'loading' },
+ { type: types.SET_RETRIEVING_BATCHES, payload: true },
+ {
+ type: types.SET_DIFF_DATA_BATCH,
+ payload: { diff_files: diffFiles, updatePosition: false },
+ },
+ { type: types.SET_PINNED_FILE_HASH, payload: pinnedFile.file_hash },
+ { type: types.SET_CURRENT_DIFF_FILE, payload: pinnedFile.file_hash },
+ { type: types.SET_BATCH_LOADING_STATE, payload: 'loaded' },
+ { type: types.SET_RETRIEVING_BATCHES, payload: false },
+ ],
+ [],
+ );
+
+ jest.runAllTimers();
+ expect(hubSpy).toHaveBeenCalledWith('diffFilesModified');
+ expect(handleLocationHash).toHaveBeenCalled();
+ });
+
+ it('handles load error', async () => {
+ const pinnedFileHref = `${TEST_HOST}/pinned-file`;
+ const hubSpy = jest.spyOn(diffsEventHub, '$emit');
+ mock.onGet(new RegExp(pinnedFileHref)).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+
+ try {
+ await testAction(
+ diffActions.fetchPinnedFile,
+ pinnedFileHref,
+ {},
+ [
+ { type: types.SET_BATCH_LOADING_STATE, payload: 'loading' },
+ { type: types.SET_RETRIEVING_BATCHES, payload: true },
+ { type: types.SET_BATCH_LOADING_STATE, payload: 'error' },
+ { type: types.SET_RETRIEVING_BATCHES, payload: false },
+ ],
+ [],
+ );
+ } catch (error) {
+ expect(error.response.status).toBe(HTTP_STATUS_INTERNAL_SERVER_ERROR);
+ }
+
+ jest.runAllTimers();
+ expect(hubSpy).not.toHaveBeenCalledWith('diffFilesModified');
+ expect(handleLocationHash).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('unpinFile', () => {
+ it('unpins pinned file', () => {
+ const pinnedFile = getDiffFileMock();
+ setWindowLocation(`${TEST_HOST}/?pin=${pinnedFile.file_hash}#${pinnedFile.file_hash}_10_10`);
+ testAction(
+ diffActions.unpinFile,
+ undefined,
+ { pinnedFile },
+ [{ type: types.SET_PINNED_FILE_HASH, payload: null }],
+ [],
+ );
+ expect(window.location.hash).toBe('');
+ expect(window.location.search).toBe('');
+ });
+
+ it('does nothing when no pinned file present', () => {
+ testAction(diffActions.unpinFile, undefined, {}, [], []);
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index 8097f0976f6..cb0f40534fe 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -1,6 +1,7 @@
import { PARALLEL_DIFF_VIEW_TYPE, INLINE_DIFF_VIEW_TYPE } from '~/diffs/constants';
import * as getters from '~/diffs/store/getters';
import state from '~/diffs/store/modules/diff_state';
+import { getDiffFileMock } from 'jest/diffs/mock_data/diff_file';
import discussion from '../mock_data/diff_discussions';
describe('Diffs Module Getters', () => {
@@ -495,4 +496,35 @@ describe('Diffs Module Getters', () => {
},
);
});
+
+ describe('diffFiles', () => {
+ it('proxies diffFiles state', () => {
+ const diffFiles = [getDiffFileMock()];
+ expect(getters.diffFiles({ diffFiles }, {})).toBe(diffFiles);
+ });
+
+ it('pins the file', () => {
+ const pinnedFile = getDiffFileMock();
+ const regularFile = getDiffFileMock();
+ const diffFiles = [regularFile, pinnedFile];
+ expect(getters.diffFiles({ diffFiles }, { pinnedFile })).toStrictEqual([
+ pinnedFile,
+ regularFile,
+ ]);
+ });
+ });
+
+ describe('pinnedFile', () => {
+ it('returns pinnedFile', () => {
+ const pinnedFile = getDiffFileMock();
+ const diffFiles = [pinnedFile];
+ expect(getters.pinnedFile({ diffFiles, pinnedFileHash: pinnedFile.file_hash }, {})).toBe(
+ pinnedFile,
+ );
+ });
+
+ it('returns null if no pinned file is set', () => {
+ expect(getters.pinnedFile({}, {})).toBe(null);
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index a5be41aa69f..8d52cd39542 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -92,7 +92,7 @@ describe('DiffsStoreMutations', () => {
});
});
- describe('SET_DIFF_DATA_BATCH_DATA', () => {
+ describe('SET_DIFF_DATA_BATCH', () => {
it('should set diff data batch type properly', () => {
const mockFile = getDiffFileMock();
const state = {
@@ -108,6 +108,39 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].collapsed).toEqual(false);
expect(state.treeEntries[mockFile.file_path].diffLoaded).toBe(true);
});
+
+ it('should update diff position by default', () => {
+ const mockFile = getDiffFileMock();
+ const state = {
+ diffFiles: [mockFile, { ...mockFile, file_hash: 'foo', file_path: 'foo' }],
+ treeEntries: { [mockFile.file_path]: { fileHash: mockFile.file_hash } },
+ };
+ const diffMock = {
+ diff_files: [mockFile],
+ };
+
+ mutations[types.SET_DIFF_DATA_BATCH](state, diffMock);
+
+ expect(state.diffFiles[1].file_hash).toBe(mockFile.file_hash);
+ expect(state.treeEntries[mockFile.file_path].diffLoaded).toBe(true);
+ });
+
+ it('should not update diff position', () => {
+ const mockFile = getDiffFileMock();
+ const state = {
+ diffFiles: [mockFile, { ...mockFile, file_hash: 'foo', file_path: 'foo' }],
+ treeEntries: { [mockFile.file_path]: { fileHash: mockFile.file_hash } },
+ };
+ const diffMock = {
+ diff_files: [mockFile],
+ updatePosition: false,
+ };
+
+ mutations[types.SET_DIFF_DATA_BATCH](state, diffMock);
+
+ expect(state.diffFiles[0].file_hash).toBe(mockFile.file_hash);
+ expect(state.treeEntries[mockFile.file_path].diffLoaded).toBe(true);
+ });
});
describe('SET_COVERAGE_DATA', () => {
@@ -122,6 +155,17 @@ describe('DiffsStoreMutations', () => {
});
});
+ describe('SET_DIFF_TREE_ENTRY', () => {
+ it('should set tree entry', () => {
+ const file = getDiffFileMock();
+ const state = { treeEntries: { [file.file_path]: {} } };
+
+ mutations[types.SET_DIFF_TREE_ENTRY](state, file);
+
+ expect(state.treeEntries[file.file_path].diffLoaded).toBe(true);
+ });
+ });
+
describe('SET_DIFF_VIEW_TYPE', () => {
it('should set diff view type properly', () => {
const state = {};
@@ -1076,4 +1120,15 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].viewer.forceOpen).toBe(true);
});
});
+
+ describe('SET_PINNED_FILE_HASH', () => {
+ it('set pinned file hash', () => {
+ const state = {};
+ const file = getDiffFileMock();
+
+ mutations[types.SET_PINNED_FILE_HASH](state, file.file_hash);
+
+ expect(state.pinnedFileHash).toBe(file.file_hash);
+ });
+ });
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 6331269d6e8..019ed663d82 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -476,6 +476,17 @@ describe('DiffsStoreUtils', () => {
expect(updatedFilesList).toEqual([mock, fakeNewFile]);
});
+ it('updates diff position', () => {
+ const priorFiles = [mock, { ...mock, file_hash: 'foo', file_path: 'foo' }];
+ const updatedFilesList = utils.prepareDiffData({
+ diff: { diff_files: [mock] },
+ priorFiles,
+ updatePosition: true,
+ });
+
+ expect(updatedFilesList[1].file_hash).toEqual(mock.file_hash);
+ });
+
it('completes an existing split diff without overwriting existing diffs', () => {
// The current state has a file that has only loaded inline lines
const priorFiles = [{ ...mock }];
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 7986509074e..7a37f53c7a6 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -38,8 +38,10 @@ import SecretsYaml from './yaml_tests/positive_tests/secrets.yml';
import ServicesYaml from './yaml_tests/positive_tests/services.yml';
import NeedsParallelMatrixYaml from './yaml_tests/positive_tests/needs_parallel_matrix.yml';
import ScriptYaml from './yaml_tests/positive_tests/script.yml';
-import AutoCancelPipelineOnJobFailureAllYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml';
-import AutoCancelPipelineOnJobFailureNoneYaml from './yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml';
+import WorkflowAutoCancelOnJobFailureYaml from './yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml';
+import WorkflowAutoCancelOnNewCommitYaml from './yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml';
+import WorkflowRulesAutoCancelOnJobFailureYaml from './yaml_tests/positive_tests/workflow/rules/auto_cancel/on_job_failure.yml';
+import WorkflowRulesAutoCancelOnNewCommitYaml from './yaml_tests/positive_tests/workflow/rules/auto_cancel/on_new_commit.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
@@ -66,7 +68,10 @@ import NeedsParallelMatrixNumericYaml from './yaml_tests/negative_tests/needs/pa
import NeedsParallelMatrixWrongParallelValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_parallel_value.yml';
import NeedsParallelMatrixWrongMatrixValueYaml from './yaml_tests/negative_tests/needs/parallel_matrix/wrong_matrix_value.yml';
import ScriptNegativeYaml from './yaml_tests/negative_tests/script.yml';
-import AutoCancelPipelineNegativeYaml from './yaml_tests/negative_tests/auto_cancel_pipeline.yml';
+import WorkflowAutoCancelOnJobFailureNegativeYaml from './yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml';
+import WorkflowAutoCancelOnNewCommitNegativeYaml from './yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml';
+import WorkflowRulesAutoCancelOnJobFailureNegativeYaml from './yaml_tests/negative_tests/workflow/rules/auto_cancel/on_job_failure.yml';
+import WorkflowRulesAutoCancelOnNewCommitNegativeYaml from './yaml_tests/negative_tests/workflow/rules/auto_cancel/on_new_commit.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -110,8 +115,10 @@ describe('positive tests', () => {
SecretsYaml,
NeedsParallelMatrixYaml,
ScriptYaml,
- AutoCancelPipelineOnJobFailureAllYaml,
- AutoCancelPipelineOnJobFailureNoneYaml,
+ WorkflowAutoCancelOnJobFailureYaml,
+ WorkflowAutoCancelOnNewCommitYaml,
+ WorkflowRulesAutoCancelOnJobFailureYaml,
+ WorkflowRulesAutoCancelOnNewCommitYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
@@ -157,7 +164,10 @@ describe('negative tests', () => {
NeedsParallelMatrixWrongParallelValueYaml,
NeedsParallelMatrixWrongMatrixValueYaml,
ScriptNegativeYaml,
- AutoCancelPipelineNegativeYaml,
+ WorkflowAutoCancelOnJobFailureNegativeYaml,
+ WorkflowAutoCancelOnNewCommitNegativeYaml,
+ WorkflowRulesAutoCancelOnJobFailureNegativeYaml,
+ WorkflowRulesAutoCancelOnNewCommitNegativeYaml,
}),
)('schema validates %s', (_, input) => {
// We construct a new "JSON" from each main key that is inside a
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml
index ad37cd6c3ba..d6bc3cccf41 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/image.yml
@@ -26,6 +26,17 @@ invalid_image_platform:
docker:
platform: ["arm64"] # The expected value is a string, not an array
+invalid_image_user:
+ image:
+ name: alpine:latest
+ docker:
+ user: ["dave"] # The expected value is a string, not an array
+
+empty_image_user:
+ image:
+ name: alpine:latest
+ docker:
+ user: ""
invalid_image_executor_opts:
image:
name: alpine:latest
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
index 4baf4c6b850..23d667eeeff 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/secrets.yml
@@ -71,3 +71,21 @@ job_with_secrets_with_missing_required_name_property:
azure_key_vault:
name:
version: latest
+
+job_with_gcp_secret_manager_secret_without_name:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ gcp_secret_manager:
+ version: latest
+ token: $TEST_TOKEN
+
+job_with_gcp_secret_manager_secret_without_token:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ gcp_secret_manager:
+ name: my-secret
+
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
index e14ac9ca86e..fd05d2606e5 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/services.yml
@@ -50,3 +50,17 @@ invalid_service_platform:
- name: mysql:5.7
docker:
platform: ["arm64"] # The expected value is a string, not an array
+
+invalid_service_user:
+ script: echo "Specifying user."
+ services:
+ - name: mysql:5.7
+ docker:
+ user: ["dave"] # The expected value is a string, not an array
+
+empty_service_user:
+ script: echo "Specifying user"
+ services:
+ - name: alpine:latest
+ docker:
+ user: ""
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml
index 0ba3e5632e3..2bf9effe1be 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/auto_cancel_pipeline.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_job_failure.yml
@@ -1,4 +1,3 @@
-# invalid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..371662efd24
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/auto_cancel/on_new_commit.yml
@@ -0,0 +1,3 @@
+workflow:
+ auto_cancel:
+ on_new_commit: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_job_failure.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_job_failure.yml
new file mode 100644
index 00000000000..11029a85189
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_job_failure.yml
@@ -0,0 +1,7 @@
+workflow:
+ auto_cancel:
+ on_job_failure: all
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ auto_cancel:
+ on_job_failure: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..4c7e9be9018
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/workflow/rules/auto_cancel/on_new_commit.yml
@@ -0,0 +1,7 @@
+workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ auto_cancel:
+ on_new_commit: unexpected_value
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
deleted file mode 100644
index b99eb50e962..00000000000
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/none.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-# valid workflow:auto-cancel:on-job-failure
-workflow:
- auto_cancel:
- on_job_failure: none
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml
index 4c2559d0800..020cce80fd3 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/image.yml
@@ -30,6 +30,19 @@ valid_image_with_docker:
docker:
platform: linux/amd64
+valid_image_with_docker_user:
+ image:
+ name: ubuntu:latest
+ docker:
+ user: ubuntu
+
+valid_image_with_docker_multiple_options:
+ image:
+ name: ubuntu:latest
+ docker:
+ platform: linux/arm64
+ user: ubuntu
+
valid_image_full:
image:
name: alpine:latest
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
index af3107974b9..e615fa52dc5 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/secrets.yml
@@ -43,3 +43,32 @@ valid_job_with_azure_key_vault_secrets_name_and_version:
azure_key_vault:
name: 'test'
version: 'version'
+
+valid_job_with_gcp_secret_manager_name:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ gcp_secret_manager:
+ name: 'test'
+ token: $TEST_TOKEN
+
+valid_job_with_gcp_secret_manager_name_and_numbered_version:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ gcp_secret_manager:
+ name: 'test'
+ version: 2
+ token: $TEST_TOKEN
+
+valid_job_with_gcp_secret_manager_name_and_string_version:
+ script:
+ - echo $TEST_DB_PASSWORD
+ secrets:
+ TEST_DB_PASSWORD:
+ gcp_secret_manager:
+ name: 'test'
+ version: 'latest'
+ token: $TEST_TOKEN
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
index 1d19ee52cc3..0f45b075f53 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/services.yml
@@ -36,3 +36,18 @@ services_platform_string:
- name: mysql:5.7
docker:
platform: arm64
+
+services_with_docker_user:
+ script: echo "Specifying platform."
+ services:
+ - name: mysql:5.7
+ docker:
+ user: ubuntu
+
+services_with_docker_multiple_options:
+ script: echo "Specifying platform."
+ services:
+ - name: mysql:5.7
+ docker:
+ platform: linux/arm64
+ user: ubuntu
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml
index bf84ff16f42..79d18f40721 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/auto_cancel_pipeline/on_job_failure/all.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_job_failure.yml
@@ -1,4 +1,3 @@
-# valid workflow:auto-cancel:on-job-failure
workflow:
auto_cancel:
on_job_failure: all
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..a1641878e4d
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/auto_cancel/on_new_commit.yml
@@ -0,0 +1,3 @@
+workflow:
+ auto_cancel:
+ on_new_commit: conservative
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_job_failure.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_job_failure.yml
new file mode 100644
index 00000000000..9050566fbd3
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_job_failure.yml
@@ -0,0 +1,7 @@
+workflow:
+ auto_cancel:
+ on_job_failure: all
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ auto_cancel:
+ on_job_failure: none
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_new_commit.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_new_commit.yml
new file mode 100644
index 00000000000..c5ec387fe50
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/workflow/rules/auto_cancel/on_new_commit.yml
@@ -0,0 +1,7 @@
+workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ auto_cancel:
+ on_new_commit: none
diff --git a/spec/frontend/emoji/components/emoji_group_spec.js b/spec/frontend/emoji/components/emoji_group_spec.js
index a2a46bedd7b..a2e3643356f 100644
--- a/spec/frontend/emoji/components/emoji_group_spec.js
+++ b/spec/frontend/emoji/components/emoji_group_spec.js
@@ -13,6 +13,7 @@ function factory(propsData = {}) {
propsData,
stubs: {
GlButton,
+ GlEmoji: { template: '<div/>' },
},
}),
);
diff --git a/spec/frontend/environments/helpers/k8s_integration_helper_spec.js b/spec/frontend/environments/helpers/k8s_integration_helper_spec.js
index 97100557ef3..852b5318c77 100644
--- a/spec/frontend/environments/helpers/k8s_integration_helper_spec.js
+++ b/spec/frontend/environments/helpers/k8s_integration_helper_spec.js
@@ -1,5 +1,4 @@
import {
- generateServicePortsString,
getDeploymentsStatuses,
getDaemonSetStatuses,
getStatefulSetStatuses,
@@ -12,35 +11,6 @@ import {
import { CLUSTER_AGENT_ERROR_MESSAGES } from '~/environments/constants';
describe('k8s_integration_helper', () => {
- describe('generateServicePortsString', () => {
- const port = '8080';
- const protocol = 'TCP';
- const nodePort = '31732';
-
- it('returns empty string if no ports provided', () => {
- expect(generateServicePortsString([])).toBe('');
- });
-
- it('returns port and protocol when provided', () => {
- expect(generateServicePortsString([{ port, protocol }])).toBe(`${port}/${protocol}`);
- });
-
- it('returns port, protocol and nodePort when provided', () => {
- expect(generateServicePortsString([{ port, protocol, nodePort }])).toBe(
- `${port}:${nodePort}/${protocol}`,
- );
- });
-
- it('returns joined strings of ports if multiple are provided', () => {
- expect(
- generateServicePortsString([
- { port, protocol },
- { port, protocol, nodePort },
- ]),
- ).toBe(`${port}/${protocol}, ${port}:${nodePort}/${protocol}`);
- });
- });
-
describe('getDeploymentsStatuses', () => {
const pending = {
status: {
diff --git a/spec/frontend/environments/kubernetes_status_bar_spec.js b/spec/frontend/environments/kubernetes_status_bar_spec.js
index dcd628354e1..e4bf8f3ea07 100644
--- a/spec/frontend/environments/kubernetes_status_bar_spec.js
+++ b/spec/frontend/environments/kubernetes_status_bar_spec.js
@@ -10,7 +10,6 @@ import {
} from '~/environments/constants';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { s__ } from '~/locale';
import { mockKasTunnelUrl } from './mock_data';
Vue.use(VueApollo);
@@ -23,6 +22,8 @@ const configuration = {
},
};
const environmentName = 'environment_name';
+const kustomizationResourcePath =
+ 'kustomize.toolkit.fluxcd.io/v1beta1/namespaces/my-namespace/kustomizations/app';
describe('~/environments/components/kubernetes_status_bar.vue', () => {
let wrapper;
@@ -97,7 +98,7 @@ describe('~/environments/components/kubernetes_status_bar.vue', () => {
});
it('renders sync status as Unavailable', () => {
- expect(findSyncBadge().text()).toBe(s__('Deployment|Unavailable'));
+ expect(findSyncBadge().text()).toBe('Unavailable');
});
});
@@ -106,8 +107,7 @@ describe('~/environments/components/kubernetes_status_bar.vue', () => {
describe('if the provided resource is a Kustomization', () => {
beforeEach(() => {
- fluxResourcePath =
- 'kustomize.toolkit.fluxcd.io/v1beta1/namespaces/my-namespace/kustomizations/app';
+ fluxResourcePath = kustomizationResourcePath;
createWrapper({ fluxResourcePath });
});
@@ -178,6 +178,47 @@ describe('~/environments/components/kubernetes_status_bar.vue', () => {
});
});
+ describe('when receives data from the Flux', () => {
+ const createApolloProviderWithKustomizations = (result) => {
+ const mockResolvers = {
+ Query: {
+ fluxKustomizationStatus: jest.fn().mockReturnValue([result]),
+ fluxHelmReleaseStatus: fluxHelmReleaseStatusQuery,
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+ const message = 'Message from Flux';
+
+ it.each`
+ status | type | reason | statusText | statusPopover
+ ${'True'} | ${'Stalled'} | ${''} | ${'Stalled'} | ${message}
+ ${'True'} | ${'Reconciling'} | ${''} | ${'Reconciling'} | ${'Flux sync reconciling'}
+ ${'Unknown'} | ${'Ready'} | ${'Progressing'} | ${'Reconciling'} | ${message}
+ ${'True'} | ${'Ready'} | ${''} | ${'Reconciled'} | ${'Flux sync reconciled successfully'}
+ ${'False'} | ${'Ready'} | ${''} | ${'Failed'} | ${message}
+ ${'Unknown'} | ${'Ready'} | ${''} | ${'Unknown'} | ${'Unable to detect state. How are states detected?'}
+ `(
+ 'renders sync status as $statusText when status is $status, type is $type, and reason is $reason',
+ async ({ status, type, reason, statusText, statusPopover }) => {
+ createWrapper({
+ fluxResourcePath: kustomizationResourcePath,
+ apolloProvider: createApolloProviderWithKustomizations({
+ status,
+ type,
+ reason,
+ message,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findSyncBadge().text()).toBe(statusText);
+ expect(findPopover().text()).toBe(statusPopover);
+ },
+ );
+ });
+
describe('when Flux API errored', () => {
const error = new Error('Error from the cluster_client API');
const createApolloProviderWithErrors = () => {
@@ -212,9 +253,7 @@ describe('~/environments/components/kubernetes_status_bar.vue', () => {
it('renders popover with an API error message', () => {
expect(findPopover().text()).toBe(error.message);
- expect(findPopover().props('title')).toBe(
- s__('Deployment|Flux sync status is unavailable'),
- );
+ expect(findPopover().props('title')).toBe('Flux sync status is unavailable');
});
});
});
diff --git a/spec/frontend/error_tracking/components/error_details_info_spec.js b/spec/frontend/error_tracking/components/error_details_info_spec.js
index a3f4b0e0dd8..f563fee0ec0 100644
--- a/spec/frontend/error_tracking/components/error_details_info_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_info_spec.js
@@ -47,6 +47,13 @@ describe('ErrorDetails', () => {
expect(wrapper.findByTestId('user-count-card').text()).toMatchInterpolatedText('Users 2');
});
+ it('should not render a card with user counts if integrated error tracking', () => {
+ mountComponent({
+ integrated: true,
+ });
+ expect(wrapper.findByTestId('user-count-card').exists()).toBe(false);
+ });
+
describe('first seen card', () => {
it('if firstSeen is missing, does not render a card', () => {
mountComponent({
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 823f7132fdd..91518002f0e 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -146,6 +146,28 @@ describe('ErrorTrackingList', () => {
expect(findErrorListRows().length).toEqual(store.state.list.errors.length);
});
+ describe('user count', () => {
+ it('shows user count', () => {
+ mountComponent({
+ integratedErrorTrackingEnabled: false,
+ stubs: {
+ GlTable: false,
+ },
+ });
+ expect(findErrorListTable().find('thead').text()).toContain('Users');
+ });
+
+ it('does not show user count', () => {
+ mountComponent({
+ integratedErrorTrackingEnabled: true,
+ stubs: {
+ GlTable: false,
+ },
+ });
+ expect(findErrorListTable().find('thead').text()).not.toContain('Users');
+ });
+ });
+
describe.each([
['/test-project/-/error_tracking'],
['/test-project/-/error_tracking/'], // handles leading '/' https://gitlab.com/gitlab-org/gitlab/-/issues/430211
diff --git a/spec/frontend/fixtures/static/oauth_remember_me.html b/spec/frontend/fixtures/static/oauth_remember_me.html
index 60277ecf66e..d7519dd695f 100644
--- a/spec/frontend/fixtures/static/oauth_remember_me.html
+++ b/spec/frontend/fixtures/static/oauth_remember_me.html
@@ -1,20 +1,20 @@
-<div id="oauth-container">
+<div class="js-oauth-login">
<input id="remember_me_omniauth" type="checkbox" />
<form method="post" action="http://example.com/">
- <button class="js-oauth-login twitter" type="submit">
+ <button class="twitter" type="submit">
<span>Twitter</span>
</button>
</form>
<form method="post" action="http://example.com/">
- <button class="js-oauth-login github" type="submit">
+ <button class="github" type="submit">
<span>GitHub</span>
</button>
</form>
<form method="post" action="http://example.com/?redirect_fragment=L1">
- <button class="js-oauth-login facebook" type="submit">
+ <button class="facebook" type="submit">
<span>Facebook</span>
</button>
</form>
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 8ac410c87b1..027c1709e0b 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -58,6 +58,9 @@ describe('AppComponent', () => {
mocks: {
$toast,
},
+ provide: {
+ emptySearchIllustration: '/assets/illustrations/empty-state/empty-search-md.svg',
+ },
});
vm = wrapper.vm;
};
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 94460de9dd6..26c97a7cb41 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -11,6 +11,8 @@ import {
VISIBILITY_LEVEL_PRIVATE_STRING,
VISIBILITY_LEVEL_INTERNAL_STRING,
VISIBILITY_LEVEL_PUBLIC_STRING,
+ GROUP_VISIBILITY_TYPE,
+ PROJECT_VISIBILITY_TYPE,
} from '~/visibility_level/constants';
import { helpPagePath } from '~/helpers/help_page_helper';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -24,7 +26,7 @@ const createComponent = (
) => {
return mountExtended(GroupItem, {
propsData,
- components: { GroupFolder },
+ components: { GroupFolder, GroupItem },
provide,
});
};
@@ -115,6 +117,51 @@ describe('GroupItemComponent', () => {
wrapper.destroy();
});
});
+
+ describe('visibilityTooltip', () => {
+ describe('if item represents group', () => {
+ it.each`
+ visibilityLevel | visibilityTooltip
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${GROUP_VISIBILITY_TYPE[VISIBILITY_LEVEL_PUBLIC_STRING]}
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${GROUP_VISIBILITY_TYPE[VISIBILITY_LEVEL_INTERNAL_STRING]}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${GROUP_VISIBILITY_TYPE[VISIBILITY_LEVEL_PRIVATE_STRING]}
+ `(
+ 'should return corresponding text when visibility level is $visibilityLevel',
+ ({ visibilityLevel, visibilityTooltip }) => {
+ const group = { ...mockParentGroupItem };
+
+ group.type = 'group';
+ group.visibility = visibilityLevel;
+ wrapper = createComponent({ group });
+
+ expect(wrapper.vm.visibilityTooltip).toBe(visibilityTooltip);
+ wrapper.destroy();
+ },
+ );
+ });
+
+ describe('if item represents project', () => {
+ it.each`
+ visibilityLevel | visibilityTooltip
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${PROJECT_VISIBILITY_TYPE[VISIBILITY_LEVEL_PUBLIC_STRING]}
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${PROJECT_VISIBILITY_TYPE[VISIBILITY_LEVEL_INTERNAL_STRING]}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${PROJECT_VISIBILITY_TYPE[VISIBILITY_LEVEL_PRIVATE_STRING]}
+ `(
+ 'should return corresponding text when visibility level is $visibilityLevel',
+ ({ visibilityLevel, visibilityTooltip }) => {
+ const group = { ...mockParentGroupItem };
+
+ group.type = 'project';
+ group.lastActivityAt = '2017-04-09T18:40:39.101Z';
+ group.visibility = visibilityLevel;
+ wrapper = createComponent({ group });
+
+ expect(wrapper.vm.visibilityTooltip).toBe(visibilityTooltip);
+ wrapper.destroy();
+ },
+ );
+ });
+ });
});
describe('methods', () => {
@@ -261,10 +308,9 @@ describe('GroupItemComponent', () => {
});
it.each`
- attr | value
- ${'itemscope'} | ${'itemscope'}
- ${'itemtype'} | ${'https://schema.org/Organization'}
- ${'itemprop'} | ${'subOrganization'}
+ attr | value
+ ${'itemtype'} | ${'https://schema.org/Organization'}
+ ${'itemprop'} | ${'subOrganization'}
`('does set correct $attr', ({ attr, value } = {}) => {
expect(wrapper.attributes(attr)).toBe(value);
});
@@ -281,7 +327,7 @@ describe('GroupItemComponent', () => {
});
describe('visibility warning popover', () => {
- const findPopover = () => extendedWrapper(wrapper.findComponent(GlPopover));
+ const findPopover = () => wrapper.findComponent(GlPopover);
const itDoesNotRenderVisibilityWarningPopover = () => {
it('does not render visibility warning popover', () => {
@@ -343,9 +389,10 @@ describe('GroupItemComponent', () => {
if (isPopoverShown) {
it('renders visibility warning popover with `Learn more` link', () => {
- const popover = findPopover();
+ const popover = extendedWrapper(findPopover());
expect(popover.exists()).toBe(true);
+
expect(
popover.findByRole('link', { name: GroupItem.i18n.learnMore }).attributes('href'),
).toBe(
diff --git a/spec/frontend/groups/components/group_name_and_path_spec.js b/spec/frontend/groups/components/group_name_and_path_spec.js
index 0a18e657c94..59c42e54af6 100644
--- a/spec/frontend/groups/components/group_name_and_path_spec.js
+++ b/spec/frontend/groups/components/group_name_and_path_spec.js
@@ -1,7 +1,7 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { merge } from 'lodash';
-import { GlAlert, GlDropdown, GlTruncate, GlDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlTruncate, GlDropdownItem } from '@gitlab/ui';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -74,7 +74,8 @@ describe('GroupNameAndPath', () => {
const findSubgroupNameField = () => wrapper.findByLabelText('Subgroup name');
const findSubgroupSlugField = () => wrapper.findByLabelText('Subgroup slug');
const findSelectedGroup = () => wrapper.findComponent(GlTruncate);
- const findAlert = () => extendedWrapper(wrapper.findComponent(GlAlert));
+ const findChangeUrlAlert = () => extendedWrapper(wrapper.findByTestId('changing-url-alert'));
+ const findDotInPathAlert = () => extendedWrapper(wrapper.findByTestId('dot-in-path-alert'));
const apiMockAvailablePath = () => {
getGroupPathAvailability.mockResolvedValueOnce({
@@ -181,6 +182,12 @@ describe('GroupNameAndPath', () => {
expectLoadingMessageExists();
});
+ it('shows warning alert on using dot in path', () => {
+ createComponentEditGroup();
+
+ expect(findDotInPathAlert().exists()).toBe(true);
+ });
+
describe('when path is available', () => {
it('does not update `Group URL` field', async () => {
apiMockAvailablePath();
@@ -396,8 +403,10 @@ describe('GroupNameAndPath', () => {
it('shows warning alert with `Learn more` link', () => {
createComponentEditGroup();
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().findByRole('link', { name: 'Learn more' }).attributes('href')).toBe(
+ expect(findChangeUrlAlert().exists()).toBe(true);
+ expect(
+ findChangeUrlAlert().findByRole('link', { name: 'Learn more' }).attributes('href'),
+ ).toBe(
helpPagePath('user/group/manage', {
anchor: 'change-a-groups-path',
}),
diff --git a/spec/frontend/groups/components/overview_tabs_spec.js b/spec/frontend/groups/components/overview_tabs_spec.js
index 6bed744685f..8b80330c910 100644
--- a/spec/frontend/groups/components/overview_tabs_spec.js
+++ b/spec/frontend/groups/components/overview_tabs_spec.js
@@ -1,4 +1,4 @@
-import { GlSorting, GlSortingItem, GlTab } from '@gitlab/ui';
+import { GlSorting, GlTab } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -17,6 +17,7 @@ import {
ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
ACTIVE_TAB_SHARED,
ACTIVE_TAB_ARCHIVED,
+ OVERVIEW_TABS_SORTING_ITEMS,
SORTING_ITEM_NAME,
SORTING_ITEM_UPDATED,
SORTING_ITEM_STARS,
@@ -44,6 +45,7 @@ describe('OverviewTabs', () => {
newProjectIllustration: '',
emptyProjectsIllustration: '',
emptySubgroupIllustration: '',
+ emptySearchIllustration: '',
canCreateSubgroups: false,
canCreateProjects: false,
initialSort: 'name_asc',
@@ -73,6 +75,7 @@ describe('OverviewTabs', () => {
const findTab = (name) => wrapper.findByRole('tab', { name });
const findSelectedTab = () => wrapper.findByRole('tab', { selected: true });
const findSearchInput = () => wrapper.findByPlaceholderText(OverviewTabs.i18n.searchPlaceholder);
+ const findGlSorting = () => wrapper.findComponent(GlSorting);
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
@@ -300,7 +303,7 @@ describe('OverviewTabs', () => {
describe('when sort is changed', () => {
beforeEach(async () => {
await setup();
- wrapper.findAllComponents(GlSortingItem).at(2).vm.$emit('click');
+ findGlSorting().vm.$emit('sortByChange', SORTING_ITEM_UPDATED.label);
await nextTick();
});
@@ -402,12 +405,15 @@ describe('OverviewTabs', () => {
});
it('sets sort dropdown', () => {
- expect(wrapper.findComponent(GlSorting).props()).toMatchObject({
+ const expectedSortOptions = OVERVIEW_TABS_SORTING_ITEMS.map(({ label }) => {
+ return { value: label, text: label };
+ });
+ expect(findGlSorting().props()).toMatchObject({
text: SORTING_ITEM_UPDATED.label,
isAscending: false,
+ sortBy: SORTING_ITEM_UPDATED.label,
+ sortOptions: expectedSortOptions,
});
-
- expect(wrapper.findAllComponents(GlSortingItem).at(2).vm.$attrs.active).toBe(true);
});
});
});
diff --git a/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js b/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js
index 1bcff8a44be..777190149d1 100644
--- a/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js
+++ b/spec/frontend/groups_projects/components/more_actions_dropdown_spec.js
@@ -1,4 +1,8 @@
-import { GlDisclosureDropdownItem, GlDisclosureDropdown } from '@gitlab/ui';
+import {
+ GlDisclosureDropdownItem,
+ GlDisclosureDropdown,
+ GlDisclosureDropdownGroup,
+} from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import moreActionsDropdown from '~/groups_projects/components/more_actions_dropdown.vue';
@@ -28,6 +32,7 @@ describe('moreActionsDropdown', () => {
const showDropdown = () => {
findDropdown().vm.$emit('show');
};
+ const findDropdownGroup = () => wrapper.findComponent(GlDisclosureDropdownGroup);
describe('copy id', () => {
describe('project namespace type', () => {
@@ -72,6 +77,29 @@ describe('moreActionsDropdown', () => {
});
});
+ describe('dropdown group', () => {
+ it('is not rendered if no path is set', () => {
+ createComponent({
+ provideData: {
+ requestAccessPath: undefined,
+ leavePath: '',
+ withdrawPath: null,
+ },
+ });
+
+ expect(findDropdownGroup().exists()).toBe(false);
+ });
+
+ it('is rendered if path is set', () => {
+ createComponent({
+ provideData: {
+ requestAccessPath: 'path/to/request/access',
+ },
+ });
+ expect(findDropdownGroup().exists()).toBe(true);
+ });
+ });
+
describe('request access', () => {
it('does not render request access link', async () => {
createComponent();
diff --git a/spec/frontend/ide/lib/alerts/environment_spec.js b/spec/frontend/ide/lib/alerts/environment_spec.js
deleted file mode 100644
index d645209345c..00000000000
--- a/spec/frontend/ide/lib/alerts/environment_spec.js
+++ /dev/null
@@ -1,21 +0,0 @@
-import { GlLink } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import Environments from '~/ide/lib/alerts/environments.vue';
-
-describe('~/ide/lib/alerts/environment.vue', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = mount(Environments);
- });
-
- it('shows a message regarding environments', () => {
- expect(wrapper.text()).toBe(
- "No deployments detected. Use environments to control your software's continuous deployment. Learn more about deployment jobs.",
- );
- });
-
- it('links to the help page on environments', () => {
- expect(wrapper.findComponent(GlLink).attributes('href')).toBe('/help/ci/environments/index.md');
- });
-});
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index cd099e60070..8e63b5801e8 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -2,12 +2,10 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import getIdeProject from 'ee_else_ce/ide/queries/get_ide_project.query.graphql';
import Api from '~/api';
-import dismissUserCallout from '~/graphql_shared/mutations/dismiss_user_callout.mutation.graphql';
import services from '~/ide/services';
-import { query, mutate } from '~/ide/services/gql';
+import { query } from '~/ide/services/gql';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { escapeFileUrl } from '~/lib/utils/url_utility';
-import ciConfig from '~/ci/pipeline_editor/graphql/queries/ci_config.query.graphql';
import { projectData } from '../mock_data';
jest.mock('~/api');
@@ -276,35 +274,6 @@ describe('IDE services', () => {
});
});
});
- describe('getCiConfig', () => {
- const TEST_PROJECT_PATH = 'foo/bar';
- const TEST_CI_CONFIG = 'test config';
-
- it('queries with the given CI config and project', () => {
- const result = { data: { ciConfig: { test: 'data' } } };
- query.mockResolvedValue(result);
- return services.getCiConfig(TEST_PROJECT_PATH, TEST_CI_CONFIG).then((data) => {
- expect(data).toEqual(result.data.ciConfig);
- expect(query).toHaveBeenCalledWith({
- query: ciConfig,
- variables: { projectPath: TEST_PROJECT_PATH, content: TEST_CI_CONFIG },
- });
- });
- });
- });
- describe('dismissUserCallout', () => {
- it('mutates the callout to dismiss', () => {
- const result = { data: { callouts: { test: 'data' } } };
- mutate.mockResolvedValue(result);
- return services.dismissUserCallout('test').then((data) => {
- expect(data).toEqual(result.data);
- expect(mutate).toHaveBeenCalledWith({
- mutation: dismissUserCallout,
- variables: { input: { featureName: 'test' } },
- });
- });
- });
- });
describe('getProjectPermissionsData', () => {
const TEST_PROJECT_PATH = 'foo/bar';
diff --git a/spec/frontend/ide/stores/actions/alert_spec.js b/spec/frontend/ide/stores/actions/alert_spec.js
deleted file mode 100644
index 1321c402ebb..00000000000
--- a/spec/frontend/ide/stores/actions/alert_spec.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import testAction from 'helpers/vuex_action_helper';
-import service from '~/ide/services';
-import {
- detectEnvironmentsGuidance,
- dismissEnvironmentsGuidance,
-} from '~/ide/stores/actions/alert';
-import * as types from '~/ide/stores/mutation_types';
-
-jest.mock('~/ide/services');
-
-describe('~/ide/stores/actions/alert', () => {
- describe('detectEnvironmentsGuidance', () => {
- it('should try to fetch CI info', () => {
- const stages = ['a', 'b', 'c'];
- service.getCiConfig.mockResolvedValue({ stages });
-
- return testAction(
- detectEnvironmentsGuidance,
- 'the content',
- { currentProjectId: 'gitlab/test' },
- [{ type: types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT, payload: stages }],
- [],
- () => expect(service.getCiConfig).toHaveBeenCalledWith('gitlab/test', 'the content'),
- );
- });
- });
- describe('dismissCallout', () => {
- it('should try to dismiss the given callout', () => {
- const callout = { featureName: 'test', dismissedAt: 'now' };
-
- service.dismissUserCallout.mockResolvedValue({ userCalloutCreate: { userCallout: callout } });
-
- return testAction(
- dismissEnvironmentsGuidance,
- undefined,
- {},
- [{ type: types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT }],
- [],
- () =>
- expect(service.dismissUserCallout).toHaveBeenCalledWith(
- 'web_ide_ci_environments_guidance',
- ),
- );
- });
- });
-});
diff --git a/spec/frontend/ide/stores/getters/alert_spec.js b/spec/frontend/ide/stores/getters/alert_spec.js
deleted file mode 100644
index 7068b8e637f..00000000000
--- a/spec/frontend/ide/stores/getters/alert_spec.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { getAlert } from '~/ide/lib/alerts';
-import EnvironmentsMessage from '~/ide/lib/alerts/environments.vue';
-import { createStore } from '~/ide/stores';
-import * as getters from '~/ide/stores/getters/alert';
-import { file } from '../../helpers';
-
-describe('IDE store alert getters', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('alerts', () => {
- describe('shows an alert about environments', () => {
- let alert;
-
- beforeEach(() => {
- const f = file('.gitlab-ci.yml');
- localState.openFiles.push(f);
- localState.currentActivityView = 'repo-commit-section';
- localState.environmentsGuidanceAlertDetected = true;
- localState.environmentsGuidanceAlertDismissed = false;
-
- const alertKey = getters.getAlert(localState)(f);
- alert = getAlert(alertKey);
- });
-
- it('has a message suggesting to use environments', () => {
- expect(alert.message).toEqual(EnvironmentsMessage);
- });
-
- it('dispatches to dismiss the callout on dismiss', () => {
- jest.spyOn(localStore, 'dispatch').mockImplementation();
- alert.dismiss(localStore);
- expect(localStore.dispatch).toHaveBeenCalledWith('dismissEnvironmentsGuidance');
- });
-
- it('should be a tip alert', () => {
- expect(alert.props).toEqual({ variant: 'tip' });
- });
- });
- });
-});
diff --git a/spec/frontend/ide/stores/mutations/alert_spec.js b/spec/frontend/ide/stores/mutations/alert_spec.js
deleted file mode 100644
index 2840ec4ebb7..00000000000
--- a/spec/frontend/ide/stores/mutations/alert_spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-import * as types from '~/ide/stores/mutation_types';
-import mutations from '~/ide/stores/mutations/alert';
-
-describe('~/ide/stores/mutations/alert', () => {
- const state = {};
-
- describe(types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT, () => {
- it('checks the stages for any that configure environments', () => {
- mutations[types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT](state, {
- nodes: [{ groups: { nodes: [{ jobs: { nodes: [{}] } }] } }],
- });
- expect(state.environmentsGuidanceAlertDetected).toBe(true);
- mutations[types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT](state, {
- nodes: [{ groups: { nodes: [{ jobs: { nodes: [{ environment: {} }] } }] } }],
- });
- expect(state.environmentsGuidanceAlertDetected).toBe(false);
- });
- });
-
- describe(types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT, () => {
- it('stops environments guidance', () => {
- mutations[types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT](state);
- expect(state.environmentsGuidanceAlertDismissed).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/import_entities/import_groups/components/import_status_spec.js b/spec/frontend/import_entities/import_groups/components/import_status_spec.js
index 8d055d45dd8..e0cabb86dcf 100644
--- a/spec/frontend/import_entities/import_groups/components/import_status_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_status_spec.js
@@ -88,7 +88,6 @@ describe('Group import status component', () => {
id: 2,
entityId: 11,
hasFailures: true,
- showDetailsLink: true,
status: STATUSES.FINISHED,
},
});
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index c26d1d921a5..4f4288196ab 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -70,6 +70,7 @@ describe('InviteModalBase', () => {
const findDisabledInput = () => wrapper.findByTestId('disabled-input');
const findCancelButton = () => wrapper.findByTestId('invite-modal-cancel');
const findActionButton = () => wrapper.findByTestId('invite-modal-submit');
+ const findModal = () => wrapper.findComponent(GlModal);
describe('rendering the modal', () => {
let trackingSpy;
@@ -82,7 +83,7 @@ describe('InviteModalBase', () => {
});
it('renders the modal with the correct title', () => {
- expect(wrapper.findComponent(GlModal).props('title')).toBe(propsData.modalTitle);
+ expect(findModal().props('title')).toBe(propsData.modalTitle);
});
it('displays the introText', () => {
@@ -200,9 +201,7 @@ describe('InviteModalBase', () => {
});
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- const modal = wrapper.findComponent(GlModal);
-
- modal.vm.$emit('shown');
+ findModal().vm.$emit('shown');
expectTracking('render', ON_SHOW_TRACK_LABEL, 'default');
unmockTracking();
@@ -280,4 +279,14 @@ describe('InviteModalBase', () => {
state: false,
});
});
+
+ it('emits the shown event when the modal is shown', () => {
+ createComponent();
+ // Verify that the shown event isn't emitted when the component is first created.
+ expect(wrapper.emitted('shown')).toBeUndefined();
+
+ findModal().vm.$emit('shown');
+
+ expect(wrapper.emitted('shown')).toHaveLength(1);
+ });
});
diff --git a/spec/frontend/invite_members/utils/member_utils_spec.js b/spec/frontend/invite_members/utils/member_utils_spec.js
index 4d71a35ff99..abae43c3dbb 100644
--- a/spec/frontend/invite_members/utils/member_utils_spec.js
+++ b/spec/frontend/invite_members/utils/member_utils_spec.js
@@ -1,8 +1,4 @@
-import {
- memberName,
- triggerExternalAlert,
- inviteMembersTrackingOptions,
-} from '~/invite_members/utils/member_utils';
+import { memberName, triggerExternalAlert } from '~/invite_members/utils/member_utils';
jest.mock('~/lib/utils/url_utility');
@@ -22,13 +18,3 @@ describe('Trigger External Alert', () => {
expect(triggerExternalAlert()).toBe(false);
});
});
-
-describe('inviteMembersTrackingOptions', () => {
- it('returns options with a label', () => {
- expect(inviteMembersTrackingOptions({ label: '_label_' })).toEqual({ label: '_label_' });
- });
-
- it('handles options that has no label', () => {
- expect(inviteMembersTrackingOptions({})).toEqual({ label: undefined });
- });
-});
diff --git a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
index f4f4936a134..b81bdc6ac74 100644
--- a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
+++ b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
@@ -8,13 +8,15 @@ import ProjectDropdown from '~/jira_connect/branches/components/project_dropdown
import { PROJECTS_PER_PAGE } from '~/jira_connect/branches/constants';
import getProjectsQuery from '~/jira_connect/branches/graphql/queries/get_projects.query.graphql';
-import { mockProjects } from '../mock_data';
+import { mockProjects, mockProjects2 } from '../mock_data';
const mockProjectsQueryResponse = {
data: {
projects: {
+ __typename: 'ProjectsConnection',
nodes: mockProjects,
pageInfo: {
+ __typename: 'PageInfo',
hasNextPage: false,
hasPreviousPage: false,
startCursor: '',
@@ -121,6 +123,80 @@ describe('ProjectDropdown', () => {
});
});
+ describe('when projects query succeeds and has pagination', () => {
+ const mockProjectsWithPaginationQueryResponse = {
+ data: {
+ projects: {
+ __typename: 'ProjectsConnection',
+ nodes: mockProjects2,
+ pageInfo: {
+ __typename: 'PageInfo',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: '',
+ endCursor: 'end123',
+ },
+ },
+ },
+ };
+ const mockGetProjectsQuery = jest.fn();
+
+ beforeEach(async () => {
+ mockGetProjectsQuery
+ .mockResolvedValueOnce(mockProjectsWithPaginationQueryResponse)
+ .mockResolvedValueOnce(mockProjectsQueryResponse);
+
+ createComponent({
+ mockApollo: createMockApolloProvider({
+ mockGetProjectsQuery,
+ }),
+ });
+ await waitForPromises();
+ });
+
+ afterEach(() => {
+ mockGetProjectsQuery.mockReset();
+ });
+
+ it('uses infinite-scroll', () => {
+ expect(findDropdown().props()).toMatchObject({
+ infiniteScroll: true,
+ infiniteScrollLoading: false,
+ });
+ });
+
+ describe('when "bottom-reached" event is emitted', () => {
+ beforeEach(() => {
+ findDropdown().vm.$emit('bottom-reached');
+ });
+
+ it('sets infinite-scroll-loading to true', () => {
+ expect(findDropdown().props('infiniteScrollLoading')).toBe(true);
+ });
+
+ it('calls fetchMore to get next page', () => {
+ expect(mockGetProjectsQuery).toHaveBeenCalledTimes(2);
+ expect(mockGetProjectsQuery).toHaveBeenCalledWith(
+ expect.objectContaining({
+ after: 'end123',
+ }),
+ );
+ });
+
+ it('appends query results to "items"', async () => {
+ const allProjects = [...mockProjects2, ...mockProjects];
+
+ await waitForPromises();
+
+ expect(findDropdown().props('infiniteScrollLoading')).toBe(false);
+
+ const dropdownItems = findDropdown().props('items');
+ expect(dropdownItems).toHaveLength(allProjects.length);
+ expect(dropdownItems).toMatchObject(allProjects);
+ });
+ });
+ });
+
describe('when projects query fails', () => {
beforeEach(async () => {
createComponent({
diff --git a/spec/frontend/jira_connect/branches/mock_data.js b/spec/frontend/jira_connect/branches/mock_data.js
index 1720e0118c8..a9e7182cb86 100644
--- a/spec/frontend/jira_connect/branches/mock_data.js
+++ b/spec/frontend/jira_connect/branches/mock_data.js
@@ -31,6 +31,36 @@ export const mockProjects = [
},
},
];
+export const mockProjects2 = [
+ {
+ id: 'gitlab-test',
+ name: 'gitlab-test',
+ nameWithNamespace: 'gitlab-test',
+ avatarUrl: 'https://gitlab.com',
+ path: 'gitlab-test-path',
+ fullPath: 'gitlab-test-path',
+ repository: {
+ empty: false,
+ },
+ userPermissions: {
+ pushCode: true,
+ },
+ },
+ {
+ id: 'gitlab-shell',
+ name: 'GitLab Shell',
+ nameWithNamespace: 'gitlab-org/gitlab-shell',
+ avatarUrl: 'https://gitlab.com',
+ path: 'gitlab-shell',
+ fullPath: 'gitlab-org/gitlab-shell',
+ repository: {
+ empty: false,
+ },
+ userPermissions: {
+ pushCode: true,
+ },
+ },
+];
export const mockProjectQueryResponse = (branchNames = mockBranchNames) => ({
data: {
diff --git a/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
index 369b8f32c2d..e873da07a2a 100644
--- a/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
+++ b/spec/frontend/kubernetes_dashboard/components/workload_table_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
-import { TABLE_HEADING_CLASSES, PAGE_SIZE } from '~/kubernetes_dashboard/constants';
+import { PAGE_SIZE } from '~/kubernetes_dashboard/constants';
import { mockPodsTableItems } from '../graphql/mock_data';
let wrapper;
@@ -26,25 +26,24 @@ describe('Workload table component', () => {
{
key: 'name',
label: 'Name',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-half gl-lg-w-40p gl-word-break-word',
},
{
key: 'status',
label: 'Status',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-15',
},
{
key: 'namespace',
label: 'Namespace',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
+ tdClass: 'gl-md-w-30p gl-lg-w-40p gl-word-break-word',
},
{
key: 'age',
label: 'Age',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
@@ -57,13 +56,11 @@ describe('Workload table component', () => {
{
key: 'field-1',
label: 'Field-1',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'field-2',
label: 'Field-2',
- thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
diff --git a/spec/frontend/kubernetes_dashboard/graphql/mock_data.js b/spec/frontend/kubernetes_dashboard/graphql/mock_data.js
index 674425a5bc9..8f733d382b2 100644
--- a/spec/frontend/kubernetes_dashboard/graphql/mock_data.js
+++ b/spec/frontend/kubernetes_dashboard/graphql/mock_data.js
@@ -351,3 +351,249 @@ export const mockDaemonSetsTableItems = [
];
export const k8sDaemonSetsMock = [readyDaemonSet, failedDaemonSet];
+
+const completedJob = {
+ status: { failed: 0, succeeded: 1 },
+ spec: { completions: 1 },
+ metadata: {
+ name: 'job-1',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+const failedJob = {
+ status: { failed: 1, succeeded: 1 },
+ spec: { completions: 2 },
+ metadata: {
+ name: 'job-2',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+const anotherFailedJob = {
+ status: { failed: 0, succeeded: 1 },
+ spec: { completions: 2 },
+ metadata: {
+ name: 'job-3',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const mockJobsStats = [
+ {
+ title: 'Completed',
+ value: 1,
+ },
+ {
+ title: 'Failed',
+ value: 2,
+ },
+];
+
+export const mockJobsTableItems = [
+ {
+ name: 'job-1',
+ namespace: 'default',
+ status: 'Completed',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Job',
+ },
+ {
+ name: 'job-2',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Job',
+ },
+ {
+ name: 'job-3',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Job',
+ },
+];
+
+export const k8sJobsMock = [completedJob, failedJob, anotherFailedJob];
+
+const readyCronJob = {
+ status: { active: 0, lastScheduleTime: '2023-07-31T11:50:17Z' },
+ spec: { suspend: 0 },
+ metadata: {
+ name: 'cronJob-1',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+const suspendedCronJob = {
+ status: { active: 0, lastScheduleTime: null },
+ spec: { suspend: 1 },
+ metadata: {
+ name: 'cronJob-2',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+const failedCronJob = {
+ status: { active: 1, lastScheduleTime: null },
+ spec: { suspend: 0 },
+ metadata: {
+ name: 'cronJob-3',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+};
+
+export const mockCronJobsStats = [
+ {
+ title: 'Ready',
+ value: 1,
+ },
+ {
+ title: 'Failed',
+ value: 1,
+ },
+ {
+ title: 'Suspended',
+ value: 1,
+ },
+];
+
+export const mockCronJobsTableItems = [
+ {
+ name: 'cronJob-1',
+ namespace: 'default',
+ status: 'Ready',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'CronJob',
+ },
+ {
+ name: 'cronJob-2',
+ namespace: 'default',
+ status: 'Suspended',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'CronJob',
+ },
+ {
+ name: 'cronJob-3',
+ namespace: 'default',
+ status: 'Failed',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'CronJob',
+ },
+];
+
+export const k8sCronJobsMock = [readyCronJob, suspendedCronJob, failedCronJob];
+
+export const k8sServicesMock = [
+ {
+ metadata: {
+ name: 'my-first-service',
+ namespace: 'default',
+ creationTimestamp: '2023-07-31T11:50:17Z',
+ labels: {},
+ annotations: {},
+ },
+ spec: {
+ ports: [
+ {
+ name: 'https',
+ protocol: 'TCP',
+ port: 443,
+ targetPort: 8443,
+ },
+ ],
+ clusterIP: '10.96.0.1',
+ externalIP: '-',
+ type: 'ClusterIP',
+ },
+ },
+ {
+ metadata: {
+ name: 'my-second-service',
+ namespace: 'default',
+ creationTimestamp: '2023-11-21T11:50:59Z',
+ labels: {},
+ annotations: {},
+ },
+ spec: {
+ ports: [
+ {
+ name: 'http',
+ protocol: 'TCP',
+ appProtocol: 'http',
+ port: 80,
+ targetPort: 'http',
+ nodePort: 31989,
+ },
+ {
+ name: 'https',
+ protocol: 'TCP',
+ appProtocol: 'https',
+ port: 443,
+ targetPort: 'https',
+ nodePort: 32679,
+ },
+ ],
+ clusterIP: '10.105.219.238',
+ externalIP: '-',
+ type: 'NodePort',
+ },
+ },
+];
+
+export const mockServicesTableItems = [
+ {
+ name: 'my-first-service',
+ namespace: 'default',
+ type: 'ClusterIP',
+ clusterIP: '10.96.0.1',
+ externalIP: '-',
+ ports: '443/TCP',
+ age: '114d',
+ labels: {},
+ annotations: {},
+ kind: 'Service',
+ },
+ {
+ name: 'my-second-service',
+ namespace: 'default',
+ type: 'NodePort',
+ clusterIP: '10.105.219.238',
+ externalIP: '-',
+ ports: '80:31989/TCP, 443:32679/TCP',
+ age: '1d',
+ labels: {},
+ annotations: {},
+ kind: 'Service',
+ },
+];
diff --git a/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
index 516d91af947..01e2c3d2716 100644
--- a/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
+++ b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
@@ -1,16 +1,22 @@
-import { CoreV1Api, WatchApi, AppsV1Api } from '@gitlab/cluster-client';
+import { CoreV1Api, WatchApi, AppsV1Api, BatchV1Api } from '@gitlab/cluster-client';
import { resolvers } from '~/kubernetes_dashboard/graphql/resolvers';
import k8sDashboardPodsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_pods.query.graphql';
import k8sDashboardDeploymentsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_deployments.query.graphql';
import k8sDashboardStatefulSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_stateful_sets.query.graphql';
import k8sDashboardReplicaSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_replica_sets.query.graphql';
import k8sDashboardDaemonSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_daemon_sets.query.graphql';
+import k8sDashboardJobsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_jobs.query.graphql';
+import k8sDashboardCronJobsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_cron_jobs.query.graphql';
+import k8sDashboardServicesQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_services.query.graphql';
import {
k8sPodsMock,
k8sDeploymentsMock,
k8sStatefulSetsMock,
k8sReplicaSetsMock,
k8sDaemonSetsMock,
+ k8sJobsMock,
+ k8sCronJobsMock,
+ k8sServicesMock,
} from '../mock_data';
describe('~/frontend/environments/graphql/resolvers', () => {
@@ -456,4 +462,250 @@ describe('~/frontend/environments/graphql/resolvers', () => {
).rejects.toThrow('API error');
});
});
+
+ describe('k8sJobs', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockJobsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockJobsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sJobsMock,
+ });
+ });
+
+ const mockAllJobsListFn = jest.fn().mockImplementation(mockJobsListFn);
+
+ describe('when the Jobs data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(BatchV1Api.prototype, 'listBatchV1JobForAllNamespaces')
+ .mockImplementation(mockAllJobsListFn);
+ jest.spyOn(mockWatcher, 'subscribeToStream').mockImplementation(mockJobsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all Jobs from the cluster_client library and watch the events', async () => {
+ const Jobs = await mockResolvers.Query.k8sJobs(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllJobsListFn).toHaveBeenCalled();
+ expect(mockJobsListWatcherFn).toHaveBeenCalled();
+
+ expect(Jobs).toEqual(k8sJobsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sJobs(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardJobsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sJobs: [] },
+ });
+ });
+ });
+
+ it('should not watch Jobs from the cluster_client library when the Jobs data is not present', async () => {
+ jest.spyOn(BatchV1Api.prototype, 'listBatchV1JobForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sJobs(null, { configuration }, { client });
+
+ expect(mockJobsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(BatchV1Api.prototype, 'listBatchV1JobForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sJobs(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sCronJobs', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockCronJobsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockCronJobsListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sCronJobsMock,
+ });
+ });
+
+ const mockAllCronJobsListFn = jest.fn().mockImplementation(mockCronJobsListFn);
+
+ describe('when the CronJobs data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(BatchV1Api.prototype, 'listBatchV1CronJobForAllNamespaces')
+ .mockImplementation(mockAllCronJobsListFn);
+ jest.spyOn(mockWatcher, 'subscribeToStream').mockImplementation(mockCronJobsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all CronJobs from the cluster_client library and watch the events', async () => {
+ const CronJobs = await mockResolvers.Query.k8sCronJobs(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllCronJobsListFn).toHaveBeenCalled();
+ expect(mockCronJobsListWatcherFn).toHaveBeenCalled();
+
+ expect(CronJobs).toEqual(k8sCronJobsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sCronJobs(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardCronJobsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sCronJobs: [] },
+ });
+ });
+ });
+
+ it('should not watch CronJobs from the cluster_client library when the CronJobs data is not present', async () => {
+ jest.spyOn(BatchV1Api.prototype, 'listBatchV1CronJobForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sCronJobs(null, { configuration }, { client });
+
+ expect(mockCronJobsListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(BatchV1Api.prototype, 'listBatchV1CronJobForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sCronJobs(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
+
+ describe('k8sServices', () => {
+ const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockServicesListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
+ const mockServicesListFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: k8sServicesMock,
+ });
+ });
+
+ const mockAllServicesListFn = jest.fn().mockImplementation(mockServicesListFn);
+
+ describe('when the Services data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockImplementation(mockAllServicesListFn);
+ jest.spyOn(mockWatcher, 'subscribeToStream').mockImplementation(mockServicesListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all Services from the cluster_client library and watch the events', async () => {
+ const Services = await mockResolvers.Query.k8sServices(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllServicesListFn).toHaveBeenCalled();
+ expect(mockServicesListWatcherFn).toHaveBeenCalled();
+
+ expect(Services).toEqual(k8sServicesMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sServices(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardServicesQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sServices: [] },
+ });
+ });
+ });
+
+ it('should not watch Services from the cluster_client library when the Services data is not present', async () => {
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
+ );
+
+ await mockResolvers.Query.k8sServices(null, { configuration }, { client });
+
+ expect(mockServicesListWatcherFn).not.toHaveBeenCalled();
+ });
+
+ it('should throw an error if the API call fails', async () => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1ServiceForAllNamespaces')
+ .mockRejectedValue(new Error('API error'));
+
+ await expect(
+ mockResolvers.Query.k8sServices(null, { configuration }, { client }),
+ ).rejects.toThrow('API error');
+ });
+ });
});
diff --git a/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js b/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js
index 2892d657aea..1fd89e67e79 100644
--- a/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js
+++ b/spec/frontend/kubernetes_dashboard/helpers/k8s_integration_helper_spec.js
@@ -3,6 +3,9 @@ import {
calculateDeploymentStatus,
calculateStatefulSetStatus,
calculateDaemonSetStatus,
+ calculateJobStatus,
+ calculateCronJobStatus,
+ generateServicePortsString,
} from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
import { useFakeDate } from 'helpers/fake_date';
@@ -90,4 +93,81 @@ describe('k8s_integration_helper', () => {
expect(calculateDaemonSetStatus(item)).toBe(expected);
});
});
+
+ describe('calculateJobStatus', () => {
+ const completed = {
+ status: { failed: 0, succeeded: 2 },
+ spec: { completions: 2 },
+ };
+ const failed = {
+ status: { failed: 1, succeeded: 1 },
+ spec: { completions: 2 },
+ };
+ const anotherFailed = {
+ status: { failed: 0, succeeded: 1 },
+ spec: { completions: 2 },
+ };
+
+ it.each`
+ condition | item | expected
+ ${'there are no failed and succeeded amount is equal to completions number'} | ${completed} | ${'Completed'}
+ ${'there are some failed statuses'} | ${failed} | ${'Failed'}
+ ${'there are some failed and succeeded amount is not equal to completions number'} | ${anotherFailed} | ${'Failed'}
+ `('returns status as $expected when $condition', ({ item, expected }) => {
+ expect(calculateJobStatus(item)).toBe(expected);
+ });
+ });
+
+ describe('calculateCronJobStatus', () => {
+ const ready = {
+ status: { active: 0, lastScheduleTime: '2023-11-21T11:50:59Z' },
+ spec: { suspend: 0 },
+ };
+ const failed = {
+ status: { active: 1, lastScheduleTime: null },
+ spec: { suspend: 0 },
+ };
+ const suspended = {
+ status: { active: 0, lastScheduleTime: '2023-11-21T11:50:59Z' },
+ spec: { suspend: 1 },
+ };
+
+ it.each`
+ condition | item | expected
+ ${'there are no active and the lastScheduleTime is present'} | ${ready} | ${'Ready'}
+ ${'there are some active and the lastScheduleTime is not present'} | ${failed} | ${'Failed'}
+ ${'there are some suspend in the spec'} | ${suspended} | ${'Suspended'}
+ `('returns status as $expected when $condition', ({ item, expected }) => {
+ expect(calculateCronJobStatus(item)).toBe(expected);
+ });
+ });
+
+ describe('generateServicePortsString', () => {
+ const port = '8080';
+ const protocol = 'TCP';
+ const nodePort = '31732';
+
+ it('returns empty string if no ports provided', () => {
+ expect(generateServicePortsString([])).toBe('');
+ });
+
+ it('returns port and protocol when provided', () => {
+ expect(generateServicePortsString([{ port, protocol }])).toBe(`${port}/${protocol}`);
+ });
+
+ it('returns port, protocol and nodePort when provided', () => {
+ expect(generateServicePortsString([{ port, protocol, nodePort }])).toBe(
+ `${port}:${nodePort}/${protocol}`,
+ );
+ });
+
+ it('returns joined strings of ports if multiple are provided', () => {
+ expect(
+ generateServicePortsString([
+ { port, protocol },
+ { port, protocol, nodePort },
+ ]),
+ ).toBe(`${port}/${protocol}, ${port}:${nodePort}/${protocol}`);
+ });
+ });
});
diff --git a/spec/frontend/kubernetes_dashboard/pages/cron_jobs_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/cron_jobs_page_spec.js
new file mode 100644
index 00000000000..3d5eadf920a
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/cron_jobs_page_spec.js
@@ -0,0 +1,102 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import CronJobsPage from '~/kubernetes_dashboard/pages/cron_jobs_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import { k8sCronJobsMock, mockCronJobsStats, mockCronJobsTableItems } from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard cronJobs page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sCronJobs: jest.fn().mockReturnValue(k8sCronJobsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(CronJobsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of cronJobs loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets cronJobs data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockCronJobsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockCronJobsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sCronJobs: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/jobs_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/jobs_page_spec.js
new file mode 100644
index 00000000000..a7148ae2394
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/jobs_page_spec.js
@@ -0,0 +1,102 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import JobsPage from '~/kubernetes_dashboard/pages/jobs_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { useFakeDate } from 'helpers/fake_date';
+import { k8sJobsMock, mockJobsStats, mockJobsTableItems } from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard jobs page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sJobs: jest.fn().mockReturnValue(k8sJobsMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(JobsPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of jobs loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets jobs data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual(mockJobsStats);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockJobsTableItems);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sJobs: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/kubernetes_dashboard/pages/services_page_spec.js b/spec/frontend/kubernetes_dashboard/pages/services_page_spec.js
new file mode 100644
index 00000000000..c76f4330cd6
--- /dev/null
+++ b/spec/frontend/kubernetes_dashboard/pages/services_page_spec.js
@@ -0,0 +1,104 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import ServicesPage from '~/kubernetes_dashboard/pages/services_page.vue';
+import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
+import { SERVICES_TABLE_FIELDS } from '~/kubernetes_dashboard/constants';
+import { useFakeDate } from 'helpers/fake_date';
+import { k8sServicesMock, mockServicesTableItems } from '../graphql/mock_data';
+
+Vue.use(VueApollo);
+
+describe('Kubernetes dashboard services page', () => {
+ let wrapper;
+
+ const configuration = {
+ basePath: 'kas/tunnel/url',
+ baseOptions: {
+ headers: { 'GitLab-Agent-Id': '1' },
+ },
+ };
+
+ const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sServices: jest.fn().mockReturnValue(k8sServicesMock),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ const createWrapper = (apolloProvider = createApolloProvider()) => {
+ wrapper = shallowMount(ServicesPage, {
+ provide: { configuration },
+ apolloProvider,
+ });
+ };
+
+ describe('mounted', () => {
+ it('renders WorkloadLayout component', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().exists()).toBe(true);
+ });
+
+ it('sets loading prop for the WorkloadLayout', () => {
+ createWrapper();
+
+ expect(findWorkloadLayout().props('loading')).toBe(true);
+ });
+
+ it('removes loading prop from the WorkloadLayout when the list of services loaded', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('loading')).toBe(false);
+ });
+ });
+
+ describe('when gets services data', () => {
+ useFakeDate(2023, 10, 23, 10, 10);
+
+ it('sets correct stats object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('stats')).toEqual([]);
+ });
+
+ it('sets correct table items object for the WorkloadLayout', async () => {
+ createWrapper();
+ await waitForPromises();
+
+ expect(findWorkloadLayout().props('items')).toMatchObject(mockServicesTableItems);
+ expect(findWorkloadLayout().props('fields')).toMatchObject(SERVICES_TABLE_FIELDS);
+ });
+ });
+
+ describe('when gets an error from the cluster_client API', () => {
+ const error = new Error('Error from the cluster_client API');
+ const createErroredApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ k8sServices: jest.fn().mockRejectedValueOnce(error),
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(async () => {
+ createWrapper(createErroredApolloProvider());
+ await waitForPromises();
+ });
+
+ it('sets errorMessage prop for the WorkloadLayout', () => {
+ expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utils_spec.js
index 07e3e2f0422..831a2f975b6 100644
--- a/spec/frontend/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utils_spec.js
@@ -14,12 +14,6 @@ import {
isNumeric,
isPositiveInteger,
} from '~/lib/utils/number_utils';
-import {
- BYTES_FORMAT_BYTES,
- BYTES_FORMAT_KIB,
- BYTES_FORMAT_MIB,
- BYTES_FORMAT_GIB,
-} from '~/lib/utils/constants';
describe('Number Utils', () => {
describe('formatRelevantDigits', () => {
@@ -87,23 +81,23 @@ describe('Number Utils', () => {
describe('numberToHumanSizeSplit', () => {
it('should return bytes', () => {
- expect(numberToHumanSizeSplit(654)).toEqual(['654', BYTES_FORMAT_BYTES]);
- expect(numberToHumanSizeSplit(-654)).toEqual(['-654', BYTES_FORMAT_BYTES]);
+ expect(numberToHumanSizeSplit(654)).toEqual(['654', 'B']);
+ expect(numberToHumanSizeSplit(-654)).toEqual(['-654', 'B']);
});
it('should return KiB', () => {
- expect(numberToHumanSizeSplit(1079)).toEqual(['1.05', BYTES_FORMAT_KIB]);
- expect(numberToHumanSizeSplit(-1079)).toEqual(['-1.05', BYTES_FORMAT_KIB]);
+ expect(numberToHumanSizeSplit(1079)).toEqual(['1.05', 'KiB']);
+ expect(numberToHumanSizeSplit(-1079)).toEqual(['-1.05', 'KiB']);
});
it('should return MiB', () => {
- expect(numberToHumanSizeSplit(10485764)).toEqual(['10.00', BYTES_FORMAT_MIB]);
- expect(numberToHumanSizeSplit(-10485764)).toEqual(['-10.00', BYTES_FORMAT_MIB]);
+ expect(numberToHumanSizeSplit(10485764)).toEqual(['10.00', 'MiB']);
+ expect(numberToHumanSizeSplit(-10485764)).toEqual(['-10.00', 'MiB']);
});
it('should return GiB', () => {
- expect(numberToHumanSizeSplit(10737418240)).toEqual(['10.00', BYTES_FORMAT_GIB]);
- expect(numberToHumanSizeSplit(-10737418240)).toEqual(['-10.00', BYTES_FORMAT_GIB]);
+ expect(numberToHumanSizeSplit(10737418240)).toEqual(['10.00', 'GiB']);
+ expect(numberToHumanSizeSplit(-10737418240)).toEqual(['-10.00', 'GiB']);
});
});
diff --git a/spec/frontend/lib/utils/secret_detection_spec.js b/spec/frontend/lib/utils/secret_detection_spec.js
index a8da6e8969f..b97827208d6 100644
--- a/spec/frontend/lib/utils/secret_detection_spec.js
+++ b/spec/frontend/lib/utils/secret_detection_spec.js
@@ -14,6 +14,8 @@ describe('containsSensitiveToken', () => {
'1234567890',
'!@#$%^&*()_+',
'https://example.com',
+ 'Some tokens are prefixed with glpat- or glcbt-, for example.',
+ 'glpat-FAKE',
];
it.each(nonSensitiveMessages)('returns false for message: %s', (message) => {
@@ -32,6 +34,9 @@ describe('containsSensitiveToken', () => {
'https://example.com/feed?feed_token=123456789_abcdefghij',
'glpat-1234567890 and feed_token=ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'token: gldt-cgyKc1k_AsnEpmP-5fRL',
+ 'curl "https://gitlab.example.com/api/v4/groups/33/scim/identities" --header "PRIVATE-TOKEN: glsoat-cgyKc1k_AsnEpmP-5fRL',
+ 'CI_JOB_TOKEN=glcbt-FFFF_cgyKc1k_AsnEpmP-5fRL',
+ 'Use this secret job token: glcbt-1_cgyKc1k_AsnEpmP-5fRL',
];
it.each(sensitiveMessages)('returns true for message: %s', (message) => {
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 6821ed56857..692beac7ed3 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -42,6 +42,14 @@ describe('text_utility', () => {
it('returns string with first letter capitalized', () => {
expect(textUtils.capitalizeFirstCharacter('gitlab')).toEqual('Gitlab');
});
+
+ it('returns empty string when given string is empty', () => {
+ expect(textUtils.capitalizeFirstCharacter('')).toEqual('');
+ });
+
+ it('returns empty string when given string is invalid', () => {
+ expect(textUtils.capitalizeFirstCharacter(undefined)).toEqual('');
+ });
});
describe('slugify', () => {
diff --git a/spec/frontend/logo_spec.js b/spec/frontend/logo_spec.js
index 8e39e75bd3b..51f47fb89ba 100644
--- a/spec/frontend/logo_spec.js
+++ b/spec/frontend/logo_spec.js
@@ -10,7 +10,7 @@ describe('initPortraitLogoDetection', () => {
};
beforeEach(() => {
- setHTMLFixture('<img class="gl-visibility-hidden gl-h-9 js-portrait-logo-detection" />');
+ setHTMLFixture('<img class="gl-visibility-hidden gl-h-10 js-portrait-logo-detection" />');
initPortraitLogoDetection();
img = document.querySelector('img');
});
@@ -27,12 +27,12 @@ describe('initPortraitLogoDetection', () => {
it('removes gl-visibility-hidden', () => {
expect(img.classList).toContain('gl-visibility-hidden');
- expect(img.classList).toContain('gl-h-9');
+ expect(img.classList).toContain('gl-h-10');
loadImage();
expect(img.classList).not.toContain('gl-visibility-hidden');
- expect(img.classList).toContain('gl-h-9');
+ expect(img.classList).toContain('gl-h-10');
});
});
@@ -44,7 +44,7 @@ describe('initPortraitLogoDetection', () => {
it('removes gl-visibility-hidden', () => {
expect(img.classList).toContain('gl-visibility-hidden');
- expect(img.classList).toContain('gl-h-9');
+ expect(img.classList).toContain('gl-h-10');
loadImage();
diff --git a/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js b/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
index 66a447e73d3..07d8b4b8b3d 100644
--- a/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
+++ b/spec/frontend/ml/model_registry/apps/index_ml_models_spec.js
@@ -1,4 +1,4 @@
-import { GlBadge } from '@gitlab/ui';
+import { GlBadge, GlButton } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { IndexMlModels } from '~/ml/model_registry/apps';
import ModelRow from '~/ml/model_registry/components/model_row.vue';
@@ -8,13 +8,22 @@ import { BASE_SORT_FIELDS, MODEL_ENTITIES } from '~/ml/model_registry/constants'
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
import EmptyState from '~/ml/model_registry/components/empty_state.vue';
+import ActionsDropdown from '~/ml/model_registry/components/actions_dropdown.vue';
import { mockModels, startCursor, defaultPageInfo } from '../mock_data';
let wrapper;
-const createWrapper = (
- propsData = { models: mockModels, pageInfo: defaultPageInfo, modelCount: 2 },
-) => {
- wrapper = shallowMountExtended(IndexMlModels, { propsData });
+
+const createWrapper = (propsData = {}) => {
+ wrapper = shallowMountExtended(IndexMlModels, {
+ propsData: {
+ models: mockModels,
+ pageInfo: defaultPageInfo,
+ modelCount: 2,
+ createModelPath: 'path/to/create',
+ canWriteModelRegistry: false,
+ ...propsData,
+ },
+ });
};
const findModelRow = (index) => wrapper.findAllComponents(ModelRow).at(index);
@@ -24,8 +33,10 @@ const findSearchBar = () => wrapper.findComponent(SearchBar);
const findTitleArea = () => wrapper.findComponent(TitleArea);
const findModelCountMetadataItem = () => findTitleArea().findComponent(MetadataItem);
const findBadge = () => wrapper.findComponent(GlBadge);
+const findCreateButton = () => findTitleArea().findComponent(GlButton);
+const findActionsDropdown = () => wrapper.findComponent(ActionsDropdown);
-describe('MlModelsIndex', () => {
+describe('ml/model_registry/apps/index_ml_models', () => {
describe('empty state', () => {
beforeEach(() => createWrapper({ models: [], pageInfo: defaultPageInfo }));
@@ -40,6 +51,28 @@ describe('MlModelsIndex', () => {
it('does not show search bar', () => {
expect(findSearchBar().exists()).toBe(false);
});
+
+ it('renders the extra actions button', () => {
+ expect(findActionsDropdown().exists()).toBe(true);
+ });
+ });
+
+ describe('create button', () => {
+ describe('when user has no permission to write model registry', () => {
+ it('does not display create button', () => {
+ createWrapper();
+
+ expect(findCreateButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when user has permission to write model registry', () => {
+ it('displays create button', () => {
+ createWrapper({ canWriteModelRegistry: true });
+
+ expect(findCreateButton().attributes().href).toBe('path/to/create');
+ });
+ });
});
describe('with data', () => {
diff --git a/spec/frontend/ml/model_registry/apps/new_ml_model_spec.js b/spec/frontend/ml/model_registry/apps/new_ml_model_spec.js
new file mode 100644
index 00000000000..204c021c080
--- /dev/null
+++ b/spec/frontend/ml/model_registry/apps/new_ml_model_spec.js
@@ -0,0 +1,119 @@
+import {
+ GlAlert,
+ GlButton,
+ GlFormInput,
+ GlFormTextarea,
+ GlForm,
+ GlSprintf,
+ GlLink,
+} from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import NewMlModel from '~/ml/model_registry/apps/new_ml_model.vue';
+import createModelMutation from '~/ml/model_registry/graphql/mutations/create_model.mutation.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { visitUrl } from '~/lib/utils/url_utility';
+import * as Sentry from '~/sentry/sentry_browser_wrapper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createModelResponses } from '../graphql_mock_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
+
+describe('ml/model_registry/apps/new_ml_model.vue', () => {
+ let wrapper;
+ let apolloProvider;
+
+ Vue.use(VueApollo);
+
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ });
+
+ const mountComponent = (resolver = jest.fn().mockResolvedValue(createModelResponses.success)) => {
+ const requestHandlers = [[createModelMutation, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
+ wrapper = shallowMountExtended(NewMlModel, {
+ apolloProvider,
+ propsData: { projectPath: 'project/path' },
+ stubs: { GlSprintf },
+ });
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findInput = () => wrapper.findComponent(GlFormInput);
+ const findTextarea = () => wrapper.findComponent(GlFormTextarea);
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findDocAlert = () => wrapper.findComponent(GlAlert);
+ const findDocLink = () => findDocAlert().findComponent(GlLink);
+ const findErrorAlert = () => wrapper.findByTestId('new-model-errors');
+
+ const submitForm = async () => {
+ findForm().vm.$emit('submit', { preventDefault: () => {} });
+ await waitForPromises();
+ };
+
+ it('renders the button', () => {
+ mountComponent();
+
+ expect(findButton().text()).toBe('Create model');
+ });
+
+ it('shows link to docs', () => {
+ mountComponent();
+
+ expect(findDocAlert().text()).toBe(
+ 'Creating models is also possible through the MLflow client. Follow the documentation to learn more.',
+ );
+ expect(findDocLink().attributes().href).toBe('/help/user/project/ml/model_registry/index.md');
+ });
+
+ it('submits the query with correct parameters', async () => {
+ const resolver = jest.fn().mockResolvedValue(createModelResponses.success);
+ mountComponent(resolver);
+
+ findInput().vm.$emit('input', 'model_name');
+ findTextarea().vm.$emit('input', 'A description');
+
+ await submitForm();
+
+ expect(resolver).toHaveBeenLastCalledWith(
+ expect.objectContaining({
+ projectPath: 'project/path',
+ name: 'model_name',
+ description: 'A description',
+ }),
+ );
+ });
+
+ it('navigates to the new page when result is successful', async () => {
+ mountComponent();
+
+ await submitForm();
+
+ expect(visitUrl).toHaveBeenCalledWith('/some/project/-/ml/models/1');
+ });
+
+ it('shows errors when result is a top level error', async () => {
+ const error = new Error('Failure!');
+ mountComponent(jest.fn().mockRejectedValue({ error }));
+
+ await submitForm();
+
+ expect(findErrorAlert().text()).toBe('An error has occurred when saving the model.');
+ expect(visitUrl).not.toHaveBeenCalled();
+ });
+
+ it('shows errors when result is a validation error', async () => {
+ mountComponent(jest.fn().mockResolvedValue(createModelResponses.validationFailure));
+
+ await submitForm();
+
+ expect(findErrorAlert().text()).toBe("Name is invalid, Name can't be blank");
+ expect(visitUrl).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
index 1fe0f5f88b3..7e991687496 100644
--- a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
+++ b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
@@ -1,7 +1,7 @@
import { GlBadge, GlTab } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { ShowMlModel } from '~/ml/model_registry/apps';
import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue';
import CandidateList from '~/ml/model_registry/components/candidate_list.vue';
@@ -19,7 +19,7 @@ let wrapper;
Vue.use(VueApollo);
const createWrapper = (model = MODEL) => {
- wrapper = shallowMount(ShowMlModel, {
+ wrapper = shallowMountExtended(ShowMlModel, {
apolloProvider,
propsData: { model },
stubs: { GlTab },
@@ -37,6 +37,7 @@ const findCandidatesCountBadge = () => findCandidateTab().findComponent(GlBadge)
const findTitleArea = () => wrapper.findComponent(TitleArea);
const findEmptyState = () => wrapper.findComponent(EmptyState);
const findVersionCountMetadataItem = () => findTitleArea().findComponent(MetadataItem);
+const findVersionLink = () => wrapper.findByTestId('model-version-link');
describe('ShowMlModel', () => {
describe('Title', () => {
@@ -67,8 +68,10 @@ describe('ShowMlModel', () => {
expect(findModelVersionDetail().props('modelVersion')).toBe(MODEL.latestVersion);
});
- it('displays the title', () => {
- expect(findDetailTab().text()).toContain('Latest version: 1.2.3');
+ it('displays a link to latest version', () => {
+ expect(findDetailTab().text()).toContain('Latest version:');
+ expect(findVersionLink().attributes('href')).toBe(MODEL.latestVersion.path);
+ expect(findVersionLink().text()).toBe('1.2.3');
});
});
diff --git a/spec/frontend/ml/model_registry/components/actions_dropdown_spec.js b/spec/frontend/ml/model_registry/components/actions_dropdown_spec.js
new file mode 100644
index 00000000000..6285d7360c7
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/actions_dropdown_spec.js
@@ -0,0 +1,39 @@
+import { mount } from '@vue/test-utils';
+import { GlDisclosureDropdownItem } from '@gitlab/ui';
+import ActionsDropdown from '~/ml/model_registry/components/actions_dropdown.vue';
+
+describe('ml/model_registry/components/actions_dropdown', () => {
+ let wrapper;
+
+ const showToast = jest.fn();
+
+ const createWrapper = () => {
+ wrapper = mount(ActionsDropdown, {
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
+ provide: {
+ mlflowTrackingUrl: 'path/to/mlflow',
+ },
+ });
+ };
+
+ const findCopyLinkDropdownItem = () => wrapper.findComponent(GlDisclosureDropdownItem);
+
+ it('has data-clipboard-text set to the correct url', () => {
+ createWrapper();
+
+ expect(findCopyLinkDropdownItem().text()).toBe('Copy MLflow tracking URL');
+ expect(findCopyLinkDropdownItem().attributes()['data-clipboard-text']).toBe('path/to/mlflow');
+ });
+
+ it('shows a success toast after copying the url to the clipboard', () => {
+ createWrapper();
+
+ findCopyLinkDropdownItem().find('button').trigger('click');
+
+ expect(showToast).toHaveBeenCalledWith('Copied MLflow tracking URL to clipboard');
+ });
+});
diff --git a/spec/frontend/ml/model_registry/components/candidate_list_spec.js b/spec/frontend/ml/model_registry/components/candidate_list_spec.js
index c10222a99fd..8491c7be16f 100644
--- a/spec/frontend/ml/model_registry/components/candidate_list_spec.js
+++ b/spec/frontend/ml/model_registry/components/candidate_list_spec.js
@@ -1,13 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlAlert } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import CandidateList from '~/ml/model_registry/components/candidate_list.vue';
-import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
-import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue';
import getModelCandidatesQuery from '~/ml/model_registry/graphql/queries/get_model_candidates.query.graphql';
import { GRAPHQL_PAGE_SIZE } from '~/ml/model_registry/constants';
@@ -24,10 +22,7 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
let wrapper;
let apolloProvider;
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findLoader = () => wrapper.findComponent(PackagesListLoader);
- const findRegistryList = () => wrapper.findComponent(RegistryList);
- const findListRow = () => wrapper.findComponent(CandidateListRow);
+ const findSearchableList = () => wrapper.findComponent(SearchableList);
const findAllRows = () => wrapper.findAllComponents(CandidateListRow);
const mountComponent = ({
@@ -37,15 +32,12 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
const requestHandlers = [[getModelCandidatesQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
- wrapper = shallowMount(CandidateList, {
+ wrapper = mount(CandidateList, {
apolloProvider,
propsData: {
modelId: 2,
...props,
},
- stubs: {
- RegistryList,
- },
});
};
@@ -60,25 +52,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
- it('displays empty slot message', () => {
+ it('shows empty state', () => {
expect(wrapper.text()).toContain('This model has no candidates');
});
-
- it('does not display loader', () => {
- expect(findLoader().exists()).toBe(false);
- });
-
- it('does not display rows', () => {
- expect(findListRow().exists()).toBe(false);
- });
-
- it('does not display registry list', () => {
- expect(findRegistryList().exists()).toBe(false);
- });
-
- it('does not display alert', () => {
- expect(findAlert().exists()).toBe(false);
- });
});
describe('if load fails, alert', () => {
@@ -90,19 +66,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
});
it('is displayed', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('shows error message', () => {
- expect(findAlert().text()).toContain('Failed to load model candidates with error: Failure!');
- });
-
- it('is not dismissible', () => {
- expect(findAlert().props('dismissible')).toBe(false);
- });
-
- it('is of variant danger', () => {
- expect(findAlert().attributes('variant')).toBe('danger');
+ expect(findSearchableList().props('errorMessage')).toBe(
+ 'Failed to load model candidates with error: Failure!',
+ );
});
it('error is logged in sentry', () => {
@@ -116,21 +82,11 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
- it('displays package registry list', () => {
- expect(findRegistryList().exists()).toEqual(true);
+ it('Passes items to list', () => {
+ expect(findSearchableList().props('items')).toEqual(graphqlCandidates);
});
- it('binds the right props', () => {
- expect(findRegistryList().props()).toMatchObject({
- items: graphqlCandidates,
- pagination: {},
- isLoading: false,
- hiddenDelete: true,
- });
- });
-
- it('displays candidate rows', () => {
- expect(findAllRows().exists()).toEqual(true);
+ it('displays package version rows', () => {
expect(findAllRows()).toHaveLength(graphqlCandidates.length);
});
@@ -143,17 +99,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
candidate: expect.objectContaining(graphqlCandidates[1]),
});
});
-
- it('does not display loader', () => {
- expect(findLoader().exists()).toBe(false);
- });
-
- it('does not display empty message', () => {
- expect(findAlert().exists()).toBe(false);
- });
});
- describe('when user interacts with pagination', () => {
+ describe('when list requests update', () => {
const resolver = jest.fn().mockResolvedValue(modelCandidatesQuery());
beforeEach(async () => {
@@ -161,21 +109,17 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
- it('when list emits next-page fetches the next set of records', async () => {
- findRegistryList().vm.$emit('next-page');
- await waitForPromises();
-
- expect(resolver).toHaveBeenLastCalledWith(
- expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
- );
- });
+ it('when list emits fetch-page fetches the next set of records', async () => {
+ findSearchableList().vm.$emit('fetch-page', {
+ after: 'eyJpZCI6IjIifQ',
+ first: 30,
+ id: 'gid://gitlab/Ml::Model/2',
+ });
- it('when list emits prev-page fetches the prev set of records', async () => {
- findRegistryList().vm.$emit('prev-page');
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
- expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
+ expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
);
});
});
diff --git a/spec/frontend/ml/model_registry/components/model_version_list_spec.js b/spec/frontend/ml/model_registry/components/model_version_list_spec.js
index 41f7e71c543..f5d6acf3bae 100644
--- a/spec/frontend/ml/model_registry/components/model_version_list_spec.js
+++ b/spec/frontend/ml/model_registry/components/model_version_list_spec.js
@@ -1,13 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
-import { GlAlert } from '@gitlab/ui';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue';
-import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
-import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
import ModelVersionRow from '~/ml/model_registry/components/model_version_row.vue';
import getModelVersionsQuery from '~/ml/model_registry/graphql/queries/get_model_versions.query.graphql';
import EmptyState from '~/ml/model_registry/components/empty_state.vue';
@@ -25,11 +23,8 @@ describe('ModelVersionList', () => {
let wrapper;
let apolloProvider;
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findLoader = () => wrapper.findComponent(PackagesListLoader);
- const findRegistryList = () => wrapper.findComponent(RegistryList);
+ const findSearchableList = () => wrapper.findComponent(SearchableList);
const findEmptyState = () => wrapper.findComponent(EmptyState);
- const findListRow = () => wrapper.findComponent(ModelVersionRow);
const findAllRows = () => wrapper.findAllComponents(ModelVersionRow);
const mountComponent = ({
@@ -39,15 +34,12 @@ describe('ModelVersionList', () => {
const requestHandlers = [[getModelVersionsQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
- wrapper = shallowMountExtended(ModelVersionList, {
+ wrapper = mountExtended(ModelVersionList, {
apolloProvider,
propsData: {
modelId: 2,
...props,
},
- stubs: {
- RegistryList,
- },
});
};
@@ -65,22 +57,6 @@ describe('ModelVersionList', () => {
it('shows empty state', () => {
expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.modelVersion);
});
-
- it('does not display loader', () => {
- expect(findLoader().exists()).toBe(false);
- });
-
- it('does not display rows', () => {
- expect(findListRow().exists()).toBe(false);
- });
-
- it('does not display registry list', () => {
- expect(findRegistryList().exists()).toBe(false);
- });
-
- it('does not display alert', () => {
- expect(findAlert().exists()).toBe(false);
- });
});
describe('if load fails, alert', () => {
@@ -92,19 +68,9 @@ describe('ModelVersionList', () => {
});
it('is displayed', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('shows error message', () => {
- expect(findAlert().text()).toContain('Failed to load model versions with error: Failure!');
- });
-
- it('is not dismissible', () => {
- expect(findAlert().props('dismissible')).toBe(false);
- });
-
- it('is of variant danger', () => {
- expect(findAlert().attributes('variant')).toBe('danger');
+ expect(findSearchableList().props('errorMessage')).toBe(
+ 'Failed to load model versions with error: Failure!',
+ );
});
it('error is logged in sentry', () => {
@@ -118,21 +84,11 @@ describe('ModelVersionList', () => {
await waitForPromises();
});
- it('displays package registry list', () => {
- expect(findRegistryList().exists()).toEqual(true);
- });
-
- it('binds the right props', () => {
- expect(findRegistryList().props()).toMatchObject({
- items: graphqlModelVersions,
- pagination: {},
- isLoading: false,
- hiddenDelete: true,
- });
+ it('Passes items to list', () => {
+ expect(findSearchableList().props('items')).toEqual(graphqlModelVersions);
});
it('displays package version rows', () => {
- expect(findAllRows().exists()).toEqual(true);
expect(findAllRows()).toHaveLength(graphqlModelVersions.length);
});
@@ -145,17 +101,9 @@ describe('ModelVersionList', () => {
modelVersion: expect.objectContaining(graphqlModelVersions[1]),
});
});
-
- it('does not display loader', () => {
- expect(findLoader().exists()).toBe(false);
- });
-
- it('does not display empty state', () => {
- expect(findEmptyState().exists()).toBe(false);
- });
});
- describe('when user interacts with pagination', () => {
+ describe('when list requests update', () => {
const resolver = jest.fn().mockResolvedValue(modelVersionsQuery());
beforeEach(async () => {
@@ -163,21 +111,17 @@ describe('ModelVersionList', () => {
await waitForPromises();
});
- it('when list emits next-page fetches the next set of records', async () => {
- findRegistryList().vm.$emit('next-page');
- await waitForPromises();
-
- expect(resolver).toHaveBeenLastCalledWith(
- expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
- );
- });
+ it('when list emits fetch-page fetches the next set of records', async () => {
+ findSearchableList().vm.$emit('fetch-page', {
+ after: 'eyJpZCI6IjIifQ',
+ first: 30,
+ id: 'gid://gitlab/Ml::Model/2',
+ });
- it('when list emits prev-page fetches the prev set of records', async () => {
- findRegistryList().vm.$emit('prev-page');
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
- expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
+ expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
);
});
});
diff --git a/spec/frontend/ml/model_registry/components/searchable_list_spec.js b/spec/frontend/ml/model_registry/components/searchable_list_spec.js
new file mode 100644
index 00000000000..ea58a9a830a
--- /dev/null
+++ b/spec/frontend/ml/model_registry/components/searchable_list_spec.js
@@ -0,0 +1,170 @@
+import { GlAlert } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
+import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
+import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
+import { defaultPageInfo } from '../mock_data';
+
+describe('ml/model_registry/components/searchable_list.vue', () => {
+ let wrapper;
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoader = () => wrapper.findComponent(PackagesListLoader);
+ const findRegistryList = () => wrapper.findComponent(RegistryList);
+ const findEmptyState = () => wrapper.findByTestId('empty-state-slot');
+ const findFirstRow = () => wrapper.findByTestId('element');
+ const findRows = () => wrapper.findAllByTestId('element');
+
+ const defaultProps = {
+ items: ['a', 'b', 'c'],
+ pageInfo: defaultPageInfo,
+ isLoading: false,
+ errorMessage: '',
+ };
+
+ const mountComponent = (props = {}) => {
+ wrapper = shallowMountExtended(SearchableList, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ RegistryList,
+ },
+ slots: {
+ 'empty-state': '<div data-testid="empty-state-slot">This is empty</div>',
+ item: '<div data-testid="element"></div>',
+ },
+ });
+ };
+
+ describe('when list is loaded and has no data', () => {
+ beforeEach(() => mountComponent({ items: [] }));
+
+ it('shows empty state', () => {
+ expect(findEmptyState().text()).toBe('This is empty');
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display rows', () => {
+ expect(findFirstRow().exists()).toBe(false);
+ });
+
+ it('does not display registry list', () => {
+ expect(findRegistryList().exists()).toBe(false);
+ });
+
+ it('does not display alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('if errorMessage', () => {
+ beforeEach(() => mountComponent({ errorMessage: 'Failure!' }));
+
+ it('shows error message', () => {
+ expect(findAlert().text()).toContain('Failure!');
+ });
+
+ it('is not dismissible', () => {
+ expect(findAlert().props('dismissible')).toBe(false);
+ });
+
+ it('is of variant danger', () => {
+ expect(findAlert().attributes('variant')).toBe('danger');
+ });
+
+ it('hides loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('hides registry list', () => {
+ expect(findRegistryList().exists()).toBe(false);
+ });
+
+ it('hides empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
+
+ describe('if loading', () => {
+ beforeEach(() => mountComponent({ isLoading: true }));
+
+ it('shows loader', () => {
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('hides error message', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('hides registry list', () => {
+ expect(findRegistryList().exists()).toBe(false);
+ });
+
+ it('hides empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
+
+ describe('when list is loaded with data', () => {
+ beforeEach(() => mountComponent());
+
+ it('displays package registry list', () => {
+ expect(findRegistryList().exists()).toEqual(true);
+ });
+
+ it('binds the right props', () => {
+ expect(findRegistryList().props()).toMatchObject({
+ items: ['a', 'b', 'c'],
+ isLoading: false,
+ pagination: defaultPageInfo,
+ hiddenDelete: true,
+ });
+ });
+
+ it('displays package version rows', () => {
+ expect(findRows().exists()).toEqual(true);
+ expect(findRows()).toHaveLength(3);
+ });
+
+ it('does not display loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('does not display empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
+
+ describe('when user interacts with pagination', () => {
+ beforeEach(() => mountComponent());
+
+ it('when list emits next-page emits fetchPage with correct pageInfo', () => {
+ findRegistryList().vm.$emit('next-page');
+
+ const expectedNewPageInfo = {
+ after: 'eyJpZCI6IjIifQ',
+ first: 30,
+ last: null,
+ };
+
+ expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]);
+ });
+
+ it('when list emits prev-page emits fetchPage with correct pageInfo', () => {
+ findRegistryList().vm.$emit('prev-page');
+
+ const expectedNewPageInfo = {
+ before: 'eyJpZCI6IjE2In0',
+ first: null,
+ last: 30,
+ };
+
+ expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]);
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/graphql_mock_data.js b/spec/frontend/ml/model_registry/graphql_mock_data.js
index 1c31ee4627f..27424fbf0df 100644
--- a/spec/frontend/ml/model_registry/graphql_mock_data.js
+++ b/spec/frontend/ml/model_registry/graphql_mock_data.js
@@ -114,3 +114,27 @@ export const emptyCandidateQuery = {
},
},
};
+
+export const createModelResponses = {
+ success: {
+ data: {
+ mlModelCreate: {
+ model: {
+ id: 'gid://gitlab/Ml::Model/1',
+ _links: {
+ showPath: '/some/project/-/ml/models/1',
+ },
+ },
+ errors: [],
+ },
+ },
+ },
+ validationFailure: {
+ data: {
+ mlModelCreate: {
+ model: null,
+ errors: ['Name is invalid', "Name can't be blank"],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/ml/model_registry/mock_data.js b/spec/frontend/ml/model_registry/mock_data.js
index 4399df38990..d8bb6a8eedb 100644
--- a/spec/frontend/ml/model_registry/mock_data.js
+++ b/spec/frontend/ml/model_registry/mock_data.js
@@ -42,6 +42,7 @@ export const newCandidate = () => ({
const LATEST_VERSION = {
version: '1.2.3',
+ path: 'path/to/modelversion',
};
export const makeModel = ({ latestVersion } = { latestVersion: LATEST_VERSION }) => ({
diff --git a/spec/frontend/oauth_remember_me_spec.js b/spec/frontend/oauth_remember_me_spec.js
index 33295d46fea..4fea216302f 100644
--- a/spec/frontend/oauth_remember_me_spec.js
+++ b/spec/frontend/oauth_remember_me_spec.js
@@ -5,13 +5,13 @@ import OAuthRememberMe from '~/pages/sessions/new/oauth_remember_me';
describe('OAuthRememberMe', () => {
const findFormAction = (selector) => {
- return $(`#oauth-container .js-oauth-login${selector}`).parent('form').attr('action');
+ return $(`.js-oauth-login ${selector}`).parent('form').attr('action');
};
beforeEach(() => {
setHTMLFixture(htmlOauthRememberMe);
- new OAuthRememberMe({ container: $('#oauth-container') }).bindEvents();
+ new OAuthRememberMe({ container: $('.js-oauth-login') }).bindEvents();
});
afterEach(() => {
@@ -19,7 +19,7 @@ describe('OAuthRememberMe', () => {
});
it('adds and removes the "remember_me" query parameter from all OAuth login buttons', () => {
- $('#oauth-container #remember_me_omniauth').click();
+ $('.js-oauth-login #remember_me_omniauth').click();
expect(findFormAction('.twitter')).toBe('http://example.com/?remember_me=1');
expect(findFormAction('.github')).toBe('http://example.com/?remember_me=1');
@@ -27,7 +27,7 @@ describe('OAuthRememberMe', () => {
'http://example.com/?redirect_fragment=L1&remember_me=1',
);
- $('#oauth-container #remember_me_omniauth').click();
+ $('.js-oauth-login #remember_me_omniauth').click();
expect(findFormAction('.twitter')).toBe('http://example.com/');
expect(findFormAction('.github')).toBe('http://example.com/');
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index e7b68a2346e..0a852d9e000 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -279,6 +279,38 @@ describe('buildClient', () => {
'&attr_name=name1&attr_value=value1',
);
});
+ describe('date range time filter', () => {
+ it('handles custom date range period filter', async () => {
+ await client.fetchTraces({
+ filters: {
+ period: [{ operator: '=', value: '2023-01-01 - 2023-02-01' }],
+ },
+ });
+ expect(getQueryParam()).not.toContain('period=');
+ expect(getQueryParam()).toContain(
+ 'start_time=2023-01-01T00:00:00.000Z&end_time=2023-02-01T00:00:00.000Z',
+ );
+ });
+
+ it.each([
+ 'invalid - 2023-02-01',
+ '2023-02-01 - invalid',
+ 'invalid - invalid',
+ '2023-01-01 / 2023-02-01',
+ '2023-01-01 2023-02-01',
+ '2023-01-01 - 2023-02-01 - 2023-02-01',
+ ])('ignore invalid values', async (val) => {
+ await client.fetchTraces({
+ filters: {
+ period: [{ operator: '=', value: val }],
+ },
+ });
+
+ expect(getQueryParam()).not.toContain('start_time=');
+ expect(getQueryParam()).not.toContain('end_time=');
+ expect(getQueryParam()).not.toContain('period=');
+ });
+ });
it('handles repeated params', async () => {
await client.fetchTraces({
diff --git a/spec/frontend/organizations/new/components/app_spec.js b/spec/frontend/organizations/new/components/app_spec.js
index 4f31baedbf6..e3e1c5b9684 100644
--- a/spec/frontend/organizations/new/components/app_spec.js
+++ b/spec/frontend/organizations/new/components/app_spec.js
@@ -24,10 +24,14 @@ describe('OrganizationNewApp', () => {
let wrapper;
let mockApollo;
+ const file = new File(['foo'], 'foo.jpg', {
+ type: 'text/plain',
+ });
+
+ const successfulResponseHandler = jest.fn().mockResolvedValue(organizationCreateResponse);
+
const createComponent = ({
- handlers = [
- [organizationCreateMutation, jest.fn().mockResolvedValue(organizationCreateResponse)],
- ],
+ handlers = [[organizationCreateMutation, successfulResponseHandler]],
} = {}) => {
mockApollo = createMockApollo(handlers);
@@ -36,7 +40,12 @@ describe('OrganizationNewApp', () => {
const findForm = () => wrapper.findComponent(NewEditForm);
const submitForm = async () => {
- findForm().vm.$emit('submit', { name: 'Foo bar', path: 'foo-bar' });
+ findForm().vm.$emit('submit', {
+ name: 'Foo bar',
+ path: 'foo-bar',
+ description: 'Foo bar description',
+ avatar: file,
+ });
await nextTick();
};
@@ -74,7 +83,15 @@ describe('OrganizationNewApp', () => {
await waitForPromises();
});
- it('redirects user to organization web url', () => {
+ it('calls mutation with correct variables and redirects user to organization web url', () => {
+ expect(successfulResponseHandler).toHaveBeenCalledWith({
+ input: {
+ name: 'Foo bar',
+ path: 'foo-bar',
+ description: 'Foo bar description',
+ avatar: file,
+ },
+ });
expect(visitUrlWithAlerts).toHaveBeenCalledWith(
organizationCreateResponse.data.organizationCreate.organization.webUrl,
[
diff --git a/spec/frontend/organizations/settings/general/components/organization_settings_spec.js b/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
index d1c637331a8..52e81d7fb5d 100644
--- a/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
+++ b/spec/frontend/organizations/settings/general/components/organization_settings_spec.js
@@ -5,7 +5,12 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import OrganizationSettings from '~/organizations/settings/general/components/organization_settings.vue';
import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
-import { FORM_FIELD_NAME, FORM_FIELD_ID } from '~/organizations/shared/constants';
+import {
+ FORM_FIELD_NAME,
+ FORM_FIELD_ID,
+ FORM_FIELD_AVATAR,
+ FORM_FIELD_DESCRIPTION,
+} from '~/organizations/shared/constants';
import organizationUpdateMutation from '~/organizations/settings/general/graphql/mutations/organization_update.mutation.graphql';
import {
organizationUpdateResponse,
@@ -38,22 +43,33 @@ describe('OrganizationSettings', () => {
},
};
+ const file = new File(['foo'], 'foo.jpg', {
+ type: 'text/plain',
+ });
+
const successfulResponseHandler = jest.fn().mockResolvedValue(organizationUpdateResponse);
const createComponent = ({
handlers = [[organizationUpdateMutation, successfulResponseHandler]],
+ provide = {},
} = {}) => {
mockApollo = createMockApollo(handlers);
wrapper = shallowMountExtended(OrganizationSettings, {
- provide: defaultProvide,
+ provide: { ...defaultProvide, ...provide },
apolloProvider: mockApollo,
});
};
const findForm = () => wrapper.findComponent(NewEditForm);
- const submitForm = async () => {
- findForm().vm.$emit('submit', { name: 'Foo bar', path: 'foo-bar' });
+ const submitForm = async (data = {}) => {
+ findForm().vm.$emit('submit', {
+ name: 'Foo bar',
+ path: 'foo-bar',
+ description: 'Foo bar description',
+ avatar: file,
+ ...data,
+ });
await nextTick();
};
@@ -75,7 +91,7 @@ describe('OrganizationSettings', () => {
expect(findForm().props()).toMatchObject({
loading: false,
initialFormValues: defaultProvide.organization,
- fieldsToRender: [FORM_FIELD_NAME, FORM_FIELD_ID],
+ fieldsToRender: [FORM_FIELD_NAME, FORM_FIELD_ID, FORM_FIELD_DESCRIPTION, FORM_FIELD_AVATAR],
});
});
@@ -108,6 +124,8 @@ describe('OrganizationSettings', () => {
input: {
id: 'gid://gitlab/Organizations::Organization/1',
name: 'Foo bar',
+ description: 'Foo bar description',
+ avatar: file,
},
});
expect(visitUrlWithAlerts).toHaveBeenCalledWith(window.location.href, [
@@ -162,5 +180,48 @@ describe('OrganizationSettings', () => {
});
});
});
+
+ describe('when organization has avatar', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: { organization: { ...defaultProvide.organization, avatar: 'avatar.jpg' } },
+ });
+ });
+
+ describe('when avatar is explicitly removed', () => {
+ beforeEach(async () => {
+ await submitForm({ avatar: null });
+ await waitForPromises();
+ });
+
+ it('sets `avatar` argument to `null`', () => {
+ expect(successfulResponseHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/Organizations::Organization/1',
+ name: 'Foo bar',
+ description: 'Foo bar description',
+ avatar: null,
+ },
+ });
+ });
+ });
+
+ describe('when avatar is not changed', () => {
+ beforeEach(async () => {
+ await submitForm({ avatar: 'avatar.jpg' });
+ await waitForPromises();
+ });
+
+ it('does not pass `avatar` argument', () => {
+ expect(successfulResponseHandler).toHaveBeenCalledWith({
+ input: {
+ id: 'gid://gitlab/Organizations::Organization/1',
+ name: 'Foo bar',
+ description: 'Foo bar description',
+ },
+ });
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/organizations/shared/components/groups_view_spec.js b/spec/frontend/organizations/shared/components/groups_view_spec.js
index 8d6ea60ffd2..e51d6a98743 100644
--- a/spec/frontend/organizations/shared/components/groups_view_spec.js
+++ b/spec/frontend/organizations/shared/components/groups_view_spec.js
@@ -25,13 +25,20 @@ describe('GroupsView', () => {
newGroupPath: '/groups/new',
};
+ const defaultPropsData = {
+ listItemClass: 'gl-px-5',
+ };
+
const createComponent = ({ mockResolvers = resolvers, propsData = {} } = {}) => {
mockApollo = createMockApollo([], mockResolvers);
wrapper = shallowMountExtended(GroupsView, {
apolloProvider: mockApollo,
provide: defaultProvide,
- propsData,
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
});
};
@@ -115,6 +122,7 @@ describe('GroupsView', () => {
expect(wrapper.findComponent(GroupsList).props()).toEqual({
groups: formatGroups(organizationGroups.nodes),
showGroupIcon: true,
+ listItemClass: defaultPropsData.listItemClass,
});
});
});
diff --git a/spec/frontend/organizations/shared/components/new_edit_form_spec.js b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
index 1fcfc20bf1a..5be26ef7cc3 100644
--- a/spec/frontend/organizations/shared/components/new_edit_form_spec.js
+++ b/spec/frontend/organizations/shared/components/new_edit_form_spec.js
@@ -1,9 +1,16 @@
-import { GlButton } from '@gitlab/ui';
import { nextTick } from 'vue';
import NewEditForm from '~/organizations/shared/components/new_edit_form.vue';
import OrganizationUrlField from '~/organizations/shared/components/organization_url_field.vue';
-import { FORM_FIELD_NAME, FORM_FIELD_ID, FORM_FIELD_PATH } from '~/organizations/shared/constants';
+import AvatarUploadDropzone from '~/vue_shared/components/upload_dropzone/avatar_upload_dropzone.vue';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import { helpPagePath } from '~/helpers/help_page_helper';
+import {
+ FORM_FIELD_NAME,
+ FORM_FIELD_ID,
+ FORM_FIELD_PATH,
+ FORM_FIELD_AVATAR,
+} from '~/organizations/shared/constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
describe('NewEditForm', () => {
@@ -12,6 +19,7 @@ describe('NewEditForm', () => {
const defaultProvide = {
organizationsPath: '/-/organizations',
rootUrl: 'http://127.0.0.1:3000/',
+ previewMarkdownPath: '/-/organizations/preview_markdown',
};
const defaultPropsData = {
@@ -32,6 +40,8 @@ describe('NewEditForm', () => {
const findNameField = () => wrapper.findByLabelText('Organization name');
const findIdField = () => wrapper.findByLabelText('Organization ID');
const findUrlField = () => wrapper.findComponent(OrganizationUrlField);
+ const findDescriptionField = () => wrapper.findByLabelText('Organization description (optional)');
+ const findAvatarField = () => wrapper.findComponent(AvatarUploadDropzone);
const setUrlFieldValue = async (value) => {
findUrlField().vm.$emit('input', value);
@@ -53,6 +63,56 @@ describe('NewEditForm', () => {
expect(findUrlField().exists()).toBe(true);
});
+ it('renders `Organization avatar` field', () => {
+ createComponent();
+
+ expect(findAvatarField().props()).toMatchObject({
+ value: null,
+ entity: { [FORM_FIELD_NAME]: '', [FORM_FIELD_PATH]: '', [FORM_FIELD_AVATAR]: null },
+ label: 'Organization avatar',
+ });
+ });
+
+ it('renders `Organization description` field as markdown editor', () => {
+ createComponent();
+
+ expect(findDescriptionField().exists()).toBe(true);
+ expect(wrapper.findComponent(MarkdownField).props()).toMatchObject({
+ markdownPreviewPath: defaultProvide.previewMarkdownPath,
+ markdownDocsPath: helpPagePath('user/organization/index', {
+ anchor: 'organization-description-supported-markdown',
+ }),
+ textareaValue: '',
+ restrictedToolBarItems: [
+ 'code',
+ 'quote',
+ 'bullet-list',
+ 'numbered-list',
+ 'task-list',
+ 'collapsible-section',
+ 'table',
+ 'attach-file',
+ 'full-screen',
+ ],
+ });
+ });
+
+ describe('when `Organization avatar` field is changed', () => {
+ const file = new File(['foo'], 'foo.jpg', {
+ type: 'text/plain',
+ });
+
+ beforeEach(() => {
+ window.URL.revokeObjectURL = jest.fn();
+ createComponent();
+ findAvatarField().vm.$emit('input', file);
+ });
+
+ it('updates `value` prop', () => {
+ expect(findAvatarField().props('value')).toEqual(file);
+ });
+ });
+
it('requires `Organization URL` field to be a minimum of two characters', async () => {
createComponent();
@@ -121,11 +181,14 @@ describe('NewEditForm', () => {
await findNameField().setValue('Foo bar');
await setUrlFieldValue('foo-bar');
+ await findDescriptionField().setValue('Foo bar description');
await submitForm();
});
it('emits `submit` event with form values', () => {
- expect(wrapper.emitted('submit')).toEqual([[{ name: 'Foo bar', path: 'foo-bar' }]]);
+ expect(wrapper.emitted('submit')).toEqual([
+ [{ name: 'Foo bar', path: 'foo-bar', description: 'Foo bar description', avatar: null }],
+ ]);
});
});
@@ -186,7 +249,7 @@ describe('NewEditForm', () => {
});
it('shows button with loading icon', () => {
- expect(wrapper.findComponent(GlButton).props('loading')).toBe(true);
+ expect(wrapper.findByTestId('submit-button').props('loading')).toBe(true);
});
});
diff --git a/spec/frontend/organizations/shared/components/projects_view_spec.js b/spec/frontend/organizations/shared/components/projects_view_spec.js
index 490b0c89348..3cc71927bfa 100644
--- a/spec/frontend/organizations/shared/components/projects_view_spec.js
+++ b/spec/frontend/organizations/shared/components/projects_view_spec.js
@@ -25,13 +25,20 @@ describe('ProjectsView', () => {
newProjectPath: '/projects/new',
};
+ const defaultPropsData = {
+ listItemClass: 'gl-px-5',
+ };
+
const createComponent = ({ mockResolvers = resolvers, propsData = {} } = {}) => {
mockApollo = createMockApollo([], mockResolvers);
wrapper = shallowMountExtended(ProjectsView, {
apolloProvider: mockApollo,
provide: defaultProvide,
- propsData,
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
});
};
@@ -115,6 +122,7 @@ describe('ProjectsView', () => {
expect(wrapper.findComponent(ProjectsList).props()).toEqual({
projects: formatProjects(organizationProjects.nodes),
showProjectIcon: true,
+ listItemClass: defaultPropsData.listItemClass,
});
});
});
diff --git a/spec/frontend/organizations/show/components/app_spec.js b/spec/frontend/organizations/show/components/app_spec.js
index 46496e40bdd..6cf8845bdbe 100644
--- a/spec/frontend/organizations/show/components/app_spec.js
+++ b/spec/frontend/organizations/show/components/app_spec.js
@@ -1,6 +1,7 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import App from '~/organizations/show/components/app.vue';
import OrganizationAvatar from '~/organizations/show/components/organization_avatar.vue';
+import OrganizationDescription from '~/organizations/show/components/organization_description.vue';
import GroupsAndProjects from '~/organizations/show/components/groups_and_projects.vue';
import AssociationCount from '~/organizations/show/components/association_counts.vue';
@@ -34,6 +35,12 @@ describe('OrganizationShowApp', () => {
);
});
+ it('renders organization description and passes organization prop', () => {
+ expect(wrapper.findComponent(OrganizationDescription).props('organization')).toEqual(
+ defaultPropsData.organization,
+ );
+ });
+
it('renders groups and projects component and passes `groupsAndProjectsOrganizationPath` prop', () => {
expect(
wrapper.findComponent(GroupsAndProjects).props('groupsAndProjectsOrganizationPath'),
diff --git a/spec/frontend/organizations/show/components/organization_description_spec.js b/spec/frontend/organizations/show/components/organization_description_spec.js
new file mode 100644
index 00000000000..2aaf6f24a72
--- /dev/null
+++ b/spec/frontend/organizations/show/components/organization_description_spec.js
@@ -0,0 +1,46 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import OrganizationDescription from '~/organizations/show/components/organization_description.vue';
+
+describe('OrganizationDescription', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ organization: {
+ id: 1,
+ name: 'GitLab',
+ description_html: '<h1>Foo bar description</h1><script>alert("foo")</script>',
+ },
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = mountExtended(OrganizationDescription, {
+ propsData: { ...defaultPropsData, ...propsData },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('when organization has description', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders description as safe HTML', () => {
+ expect(wrapper.element.innerHTML).toBe('<h1>Foo bar description</h1>');
+ });
+ });
+
+ describe('when organization does not have description', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: { organization: { ...defaultPropsData.organization, description_html: '' } },
+ });
+ });
+
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
index 500fb0d7598..6fee0d1b825 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/details_header_spec.js
@@ -207,6 +207,7 @@ describe('Details Header', () => {
expect(findSize().props()).toMatchObject({
icon: 'disk',
text: numberToHumanSize(size),
+ textTooltip: 'Includes both tagged and untagged images',
});
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
index cbf2184d879..78d7f4183b7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/__snapshots__/package_list_row_spec.js.snap
@@ -28,19 +28,15 @@ exports[`packages_list_row renders 1`] = `
<div
class="gl-align-items-center gl-display-flex gl-min-w-0 gl-mr-3"
>
- <router-link-stub
- ariacurrentvalue="page"
+ <a
class="gl-min-w-0 gl-text-body"
data-testid="details-link"
- event="click"
- tag="a"
- to="[object Object]"
>
<gl-truncate-stub
position="end"
text="@gitlab-org/package-15"
/>
- </router-link-stub>
+ </a>
</div>
</div>
<div
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
index 9f8fd4e28e7..afcb1798878 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
@@ -1,6 +1,7 @@
import { GlFormCheckbox, GlSprintf, GlTruncate } from '@gitlab/ui';
import Vue from 'vue';
import VueRouter from 'vue-router';
+import { RouterLinkStub } from '@vue/test-utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -58,6 +59,7 @@ describe('packages_list_row', () => {
ListItem,
GlSprintf,
TimeagoTooltip,
+ RouterLink: RouterLinkStub,
},
propsData: {
packageEntity,
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
index 3ce8e91d43d..32ddc087b32 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -73,11 +73,11 @@ describe('Group Settings App', () => {
};
describe.each`
- finder | entitySpecificProps | successMessage | errorMessage
- ${findPackageSettings} | ${packageSettingsProps} | ${'Settings saved successfully'} | ${'An error occurred while saving the settings'}
- ${findPackageForwardingSettings} | ${packageForwardingSettingsProps} | ${'Settings saved successfully'} | ${'An error occurred while saving the settings'}
- ${findDependencyProxySettings} | ${dependencyProxyProps} | ${'Setting saved successfully'} | ${'An error occurred while saving the setting'}
- `('settings blocks', ({ finder, entitySpecificProps, successMessage, errorMessage }) => {
+ finder | entitySpecificProps
+ ${findPackageSettings} | ${packageSettingsProps}
+ ${findPackageForwardingSettings} | ${packageForwardingSettingsProps}
+ ${findDependencyProxySettings} | ${dependencyProxyProps}
+ `('settings blocks', ({ finder, entitySpecificProps }) => {
beforeEach(() => {
mountComponent();
return waitForApolloQueryAndRender();
@@ -94,7 +94,7 @@ describe('Group Settings App', () => {
describe('success event', () => {
it('shows a success toast', () => {
finder().vm.$emit('success');
- expect(show).toHaveBeenCalledWith(successMessage);
+ expect(show).toHaveBeenCalledWith('Settings saved successfully.');
});
it('hides the error alert', async () => {
@@ -121,7 +121,7 @@ describe('Group Settings App', () => {
});
it('alert has the right text', () => {
- expect(findAlert().text()).toBe(errorMessage);
+ expect(findAlert().text()).toBe('An error occurred while saving the settings.');
});
it('dismissing the alert removes it', async () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/packages_protection_rules_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_protection_rules_spec.js
new file mode 100644
index 00000000000..bdb3db7a1b9
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/packages_protection_rules_spec.js
@@ -0,0 +1,97 @@
+import { GlTable, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import PackagesProtectionRules from '~/packages_and_registries/settings/project/components/packages_protection_rules.vue';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+import packagesProtectionRuleQuery from '~/packages_and_registries/settings/project/graphql/queries/get_packages_protection_rules.query.graphql';
+
+import { packagesProtectionRuleQueryPayload, packagesProtectionRulesData } from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('Packages protection rules project settings', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ };
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findTableLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findTableRows = () => findTable().find('tbody').findAll('tr');
+
+ const mountComponent = (mountFn = shallowMount, provide = defaultProvidedValues, config) => {
+ wrapper = mountFn(PackagesProtectionRules, {
+ stubs: {
+ SettingsBlock,
+ },
+ provide,
+ ...config,
+ });
+ };
+
+ const createComponent = ({
+ mountFn = shallowMount,
+ provide = defaultProvidedValues,
+ resolver = jest.fn().mockResolvedValue(packagesProtectionRuleQueryPayload()),
+ } = {}) => {
+ const requestHandlers = [[packagesProtectionRuleQuery, resolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ mountComponent(mountFn, provide, {
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ it('renders the setting block with table', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findSettingsBlock().exists()).toBe(true);
+ expect(findTable().exists()).toBe(true);
+ });
+
+ describe('table package protection rules', () => {
+ it('renders table with packages protection rules', async () => {
+ createComponent({ mountFn: mount });
+
+ await waitForPromises();
+
+ expect(findTable().exists()).toBe(true);
+
+ packagesProtectionRulesData.forEach((protectionRule, i) => {
+ expect(findTableRows().at(i).text()).toContain(protectionRule.packageNamePattern);
+ expect(findTableRows().at(i).text()).toContain(protectionRule.packageType);
+ expect(findTableRows().at(i).text()).toContain(protectionRule.pushProtectedUpToAccessLevel);
+ });
+ });
+
+ it('displays table in busy state and shows loading icon inside table', async () => {
+ createComponent({ mountFn: mount });
+
+ expect(findTableLoadingIcon().exists()).toBe(true);
+ expect(findTableLoadingIcon().attributes('aria-label')).toBe('Loading');
+
+ expect(findTable().attributes('aria-busy')).toBe('true');
+
+ await waitForPromises();
+
+ expect(findTableLoadingIcon().exists()).toBe(false);
+ expect(findTable().attributes('aria-busy')).toBe('false');
+ });
+
+ it('renders table', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findTable().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
index dfcabd14489..1afc9b62ba2 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
@@ -6,6 +6,7 @@ import * as commonUtils from '~/lib/utils/common_utils';
import component from '~/packages_and_registries/settings/project/components/registry_settings_app.vue';
import ContainerExpirationPolicy from '~/packages_and_registries/settings/project/components/container_expiration_policy.vue';
import PackagesCleanupPolicy from '~/packages_and_registries/settings/project/components/packages_cleanup_policy.vue';
+import PackagesProtectionRules from '~/packages_and_registries/settings/project/components/packages_protection_rules.vue';
import DependencyProxyPackagesSettings from 'ee_component/packages_and_registries/settings/project/components/dependency_proxy_packages_settings.vue';
import {
SHOW_SETUP_SUCCESS_ALERT,
@@ -19,6 +20,7 @@ describe('Registry Settings app', () => {
const findContainerExpirationPolicy = () => wrapper.findComponent(ContainerExpirationPolicy);
const findPackagesCleanupPolicy = () => wrapper.findComponent(PackagesCleanupPolicy);
+ const findPackagesProtectionRules = () => wrapper.findComponent(PackagesProtectionRules);
const findDependencyProxyPackagesSettings = () =>
wrapper.findComponent(DependencyProxyPackagesSettings);
const findAlert = () => wrapper.findComponent(GlAlert);
@@ -29,6 +31,7 @@ describe('Registry Settings app', () => {
showPackageRegistrySettings: true,
showDependencyProxySettings: false,
...(IS_EE && { showDependencyProxySettings: true }),
+ glFeatures: { packagesProtectedPackages: true },
};
const mountComponent = (provide = defaultProvide) => {
@@ -95,6 +98,7 @@ describe('Registry Settings app', () => {
expect(findContainerExpirationPolicy().exists()).toBe(showContainerRegistrySettings);
expect(findPackagesCleanupPolicy().exists()).toBe(showPackageRegistrySettings);
+ expect(findPackagesProtectionRules().exists()).toBe(showPackageRegistrySettings);
},
);
@@ -108,5 +112,20 @@ describe('Registry Settings app', () => {
expect(findDependencyProxyPackagesSettings().exists()).toBe(value);
});
}
+
+ describe('when feature flag "packagesProtectedPackages" is disabled', () => {
+ it.each([true, false])(
+ 'package protection rules settings is hidden if showPackageRegistrySettings is %s',
+ (showPackageRegistrySettings) => {
+ mountComponent({
+ ...defaultProvide,
+ showPackageRegistrySettings,
+ glFeatures: { packagesProtectedPackages: false },
+ });
+
+ expect(findPackagesProtectionRules().exists()).toBe(false);
+ },
+ );
+ });
});
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
index 3204ca01f99..5c546289b14 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
@@ -79,3 +79,36 @@ export const packagesCleanupPolicyMutationPayload = ({ override, errors = [] } =
},
},
});
+
+export const packagesProtectionRulesData = [
+ {
+ id: `gid://gitlab/Packages::Protection::Rule/14`,
+ packageNamePattern: `@flight/flight-maintainer-14-*`,
+ packageType: 'NPM',
+ pushProtectedUpToAccessLevel: 'MAINTAINER',
+ },
+ {
+ id: `gid://gitlab/Packages::Protection::Rule/15`,
+ packageNamePattern: `@flight/flight-maintainer-15-*`,
+ packageType: 'NPM',
+ pushProtectedUpToAccessLevel: 'MAINTAINER',
+ },
+ {
+ id: 'gid://gitlab/Packages::Protection::Rule/16',
+ packageNamePattern: '@flight/flight-owner-16-*',
+ packageType: 'NPM',
+ pushProtectedUpToAccessLevel: 'OWNER',
+ },
+];
+
+export const packagesProtectionRuleQueryPayload = ({ override, errors = [] } = {}) => ({
+ data: {
+ project: {
+ id: '1',
+ packagesProtectionRules: {
+ nodes: override || packagesProtectionRulesData,
+ },
+ errors,
+ },
+ },
+});
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
index 3db77469d6b..1c9d8f17210 100644
--- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -1,13 +1,13 @@
import { GlEmptyState, GlLoadingIcon, GlTableLite } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { getParameterValues } from '~/lib/utils/url_utility';
import BulkImportsHistoryApp from '~/pages/import/bulk_imports/history/components/bulk_imports_history_app.vue';
+import ImportStatus from '~/import_entities/import_groups/components/import_status.vue';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
@@ -39,6 +39,7 @@ describe('BulkImportsHistoryApp', () => {
destination_slug: 'top-level-group-12',
destination_namespace: 'h5bp',
created_at: '2021-07-08T10:03:44.743Z',
+ has_failures: false,
failures: [],
},
{
@@ -56,6 +57,7 @@ describe('BulkImportsHistoryApp', () => {
project_id: null,
created_at: '2021-07-13T12:52:26.664Z',
updated_at: '2021-07-13T13:34:49.403Z',
+ has_failures: true,
failures: [
{
pipeline_class: 'BulkImports::Groups::Pipelines::GroupPipeline',
@@ -72,15 +74,19 @@ describe('BulkImportsHistoryApp', () => {
let mock;
const mockRealtimeChangesPath = '/import/realtime_changes.json';
- function createComponent({ shallow = true } = {}) {
+ function createComponent({ shallow = true, provide } = {}) {
const mountFn = shallow ? shallowMount : mount;
wrapper = mountFn(BulkImportsHistoryApp, {
- provide: { realtimeChangesPath: mockRealtimeChangesPath },
+ provide: {
+ realtimeChangesPath: mockRealtimeChangesPath,
+ ...provide,
+ },
});
}
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const findPaginationBar = () => wrapper.findComponent(PaginationBar);
+ const findImportStatusAt = (index) => wrapper.findAllComponents(ImportStatus).at(index);
beforeEach(() => {
gon.api_version = 'v4';
@@ -201,77 +207,59 @@ describe('BulkImportsHistoryApp', () => {
expect(findLocalStorageSync().props('value')).toBe(NEW_PAGE_SIZE);
});
- it('renders link to destination_full_path for destination group', async () => {
- createComponent({ shallow: false });
- await waitForPromises();
-
- expect(wrapper.find('tbody tr a').attributes().href).toBe(
- `/${DUMMY_RESPONSE[0].destination_full_path}`,
- );
- });
-
- it('renders destination as text when destination_full_path is not defined', async () => {
- const RESPONSE = [{ ...DUMMY_RESPONSE[0], destination_full_path: null }];
-
- mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
- createComponent({ shallow: false });
- await waitForPromises();
-
- expect(wrapper.find('tbody tr a').exists()).toBe(false);
- expect(wrapper.find('tbody tr span').text()).toBe(
- `${DUMMY_RESPONSE[0].destination_namespace}/${DUMMY_RESPONSE[0].destination_slug}/`,
- );
- });
-
- it('adds slash to group urls', async () => {
- createComponent({ shallow: false });
- await waitForPromises();
-
- expect(wrapper.find('tbody tr a').text()).toBe(`${DUMMY_RESPONSE[0].destination_full_path}/`);
- });
+ describe('table rendering', () => {
+ beforeEach(async () => {
+ createComponent({ shallow: false });
+ await waitForPromises();
+ });
- it('does not prefixes project urls with slash', async () => {
- createComponent({ shallow: false });
- await waitForPromises();
+ it('renders link to destination_full_path for destination group', () => {
+ expect(wrapper.find('tbody tr a').attributes().href).toBe(
+ `/${DUMMY_RESPONSE[0].destination_full_path}`,
+ );
+ });
- expect(wrapper.findAll('tbody tr a').at(1).text()).toBe(
- DUMMY_RESPONSE[1].destination_full_path,
- );
- });
+ it('renders destination as text when destination_full_path is not defined', async () => {
+ const RESPONSE = [{ ...DUMMY_RESPONSE[0], destination_full_path: null }];
- describe('details button', () => {
- beforeEach(() => {
- mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, DUMMY_RESPONSE, DEFAULT_HEADERS);
+ mock.onGet(BULK_IMPORTS_API_URL).reply(HTTP_STATUS_OK, RESPONSE, DEFAULT_HEADERS);
createComponent({ shallow: false });
- return waitForPromises();
+ await waitForPromises();
+
+ expect(wrapper.find('tbody tr a').exists()).toBe(false);
+ expect(wrapper.find('tbody tr span').text()).toBe(
+ `${DUMMY_RESPONSE[0].destination_namespace}/${DUMMY_RESPONSE[0].destination_slug}/`,
+ );
});
- it('renders details button if relevant item has failures', () => {
- expect(
- extendedWrapper(wrapper.find('tbody').findAll('tr').at(1)).findByText('Details').exists(),
- ).toBe(true);
+ it('adds slash to group urls', () => {
+ expect(wrapper.find('tbody tr a').text()).toBe(`${DUMMY_RESPONSE[0].destination_full_path}/`);
});
- it('does not render details button if relevant item has no failures', () => {
- expect(
- extendedWrapper(wrapper.find('tbody').findAll('tr').at(0)).findByText('Details').exists(),
- ).toBe(false);
+ it('does not prefix project urls with slash', () => {
+ expect(wrapper.findAll('tbody tr a').at(1).text()).toBe(
+ DUMMY_RESPONSE[1].destination_full_path,
+ );
});
- it('expands details when details button is clicked', async () => {
- const ORIGINAL_ROW_INDEX = 1;
- await extendedWrapper(wrapper.find('tbody').findAll('tr').at(ORIGINAL_ROW_INDEX))
- .findByText('Details')
- .trigger('click');
+ it('renders finished import status', () => {
+ expect(findImportStatusAt(0).text()).toBe('Complete');
+ });
- const detailsRowContent = wrapper
- .find('tbody')
- .findAll('tr')
- .at(ORIGINAL_ROW_INDEX + 1)
- .find('pre');
+ it('renders failed import status with details link', async () => {
+ createComponent({
+ shallow: false,
+ provide: {
+ detailsPath: '/mock-details',
+ },
+ });
+ await waitForPromises();
- expect(detailsRowContent.exists()).toBe(true);
- expect(JSON.parse(detailsRowContent.text())).toStrictEqual(DUMMY_RESPONSE[1].failures);
+ const failedImportStatus = findImportStatusAt(1);
+ const failedImportStatusLink = failedImportStatus.find('a');
+ expect(failedImportStatus.text()).toContain('Failed');
+ expect(failedImportStatusLink.text()).toBe('See failures');
+ expect(failedImportStatusLink.attributes('href')).toContain('/mock-details');
});
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
index 8145eb6fbd4..d64a05c93d2 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/ci_catalog_settings_spec.js
@@ -8,9 +8,9 @@ import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
-import catalogResourcesCreate from '~/pages/projects/shared/permissions/graphql/mutations/catalog_resources_create.mutation.graphql';
-import catalogResourcesDestroy from '~/pages/projects/shared/permissions/graphql/mutations/catalog_resources_destroy.mutation.graphql';
-import getCiCatalogSettingsQuery from '~/pages/projects/shared/permissions/graphql/queries/get_ci_catalog_settings.query.graphql';
+import catalogResourcesCreate from '~/ci/catalog/graphql/mutations/catalog_resources_create.mutation.graphql';
+import catalogResourcesDestroy from '~/ci/catalog/graphql/mutations/catalog_resources_destroy.mutation.graphql';
+import getCiCatalogSettingsQuery from '~/ci/catalog/graphql/queries/get_ci_catalog_settings.query.graphql';
import CiCatalogSettings from '~/pages/projects/shared/permissions/components/ci_catalog_settings.vue';
import { generateCatalogSettingsResponse } from './mock_data';
diff --git a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
index 6ff2bb42d8d..7607381a981 100644
--- a/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
+++ b/spec/frontend/pages/sessions/new/preserve_url_fragment_spec.js
@@ -5,7 +5,7 @@ import preserveUrlFragment from '~/pages/sessions/new/preserve_url_fragment';
describe('preserve_url_fragment', () => {
const findFormAction = (selector) => {
- return $(`.omniauth-container ${selector}`).parent('form').attr('action');
+ return $(`.js-oauth-login ${selector}`).parent('form').attr('action');
};
beforeEach(() => {
@@ -44,9 +44,7 @@ describe('preserve_url_fragment', () => {
});
it('when "remember-me" is present', () => {
- $('.js-oauth-login')
- .parent('form')
- .attr('action', (i, href) => `${href}?remember_me=1`);
+ $('.js-oauth-login form').attr('action', (i, href) => `${href}?remember_me=1`);
preserveUrlFragment('#L65');
diff --git a/spec/frontend/performance_bar/components/performance_bar_app_spec.js b/spec/frontend/performance_bar/components/performance_bar_app_spec.js
index 7a018236314..1ccb56a0697 100644
--- a/spec/frontend/performance_bar/components/performance_bar_app_spec.js
+++ b/spec/frontend/performance_bar/components/performance_bar_app_spec.js
@@ -17,6 +17,9 @@ describe('performance bar app', () => {
statsUrl: 'https://log.gprd.gitlab.net/app/dashboards#/view/',
peekUrl: '/-/peek/results',
},
+ stubs: {
+ GlEmoji: { template: '<div/>' },
+ },
});
};
diff --git a/spec/frontend/performance_bar/components/request_warning_spec.js b/spec/frontend/performance_bar/components/request_warning_spec.js
index a4f0d388e33..a85f83e9da7 100644
--- a/spec/frontend/performance_bar/components/request_warning_spec.js
+++ b/spec/frontend/performance_bar/components/request_warning_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
Vue.config.ignoredElements = ['gl-emoji'];
@@ -8,9 +8,20 @@ describe('request warning', () => {
let wrapper;
const htmlId = 'request-123';
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(RequestWarning, {
+ propsData,
+ stubs: {
+ GlEmoji: { template: `<div id="${htmlId}" />` },
+ },
+ });
+ };
+
+ const findEmoji = () => wrapper.findByTestId('warning');
+
describe('when the request has warnings', () => {
beforeEach(() => {
- wrapper = shallowMount(RequestWarning, {
+ createComponent({
propsData: {
htmlId,
warnings: ['gitaly calls: 30 over 10', 'gitaly duration: 1500 over 1000'],
@@ -19,14 +30,14 @@ describe('request warning', () => {
});
it('adds a warning emoji with the correct ID', () => {
- expect(wrapper.find('span gl-emoji[id]').attributes('id')).toEqual(htmlId);
- expect(wrapper.find('span gl-emoji[id]').element.dataset.name).toEqual('warning');
+ expect(findEmoji().attributes('id')).toEqual(htmlId);
+ expect(findEmoji().element.dataset.name).toEqual('warning');
});
});
describe('when the request does not have warnings', () => {
beforeEach(() => {
- wrapper = shallowMount(RequestWarning, {
+ createComponent({
propsData: {
htmlId,
warnings: [],
@@ -35,7 +46,7 @@ describe('request warning', () => {
});
it('does nothing', () => {
- expect(wrapper.html()).toBe('');
+ expect(findEmoji().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/profile/preferences/components/profile_preferences_spec.js b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
index 144d9e76869..83c4a7435a8 100644
--- a/spec/frontend/profile/preferences/components/profile_preferences_spec.js
+++ b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { createAlert, VARIANT_DANGER, VARIANT_INFO } from '~/alert';
+import { createAlert, VARIANT_DANGER } from '~/alert';
import IntegrationView from '~/profile/preferences/components/integration_view.vue';
import ProfilePreferences from '~/profile/preferences/components/profile_preferences.vue';
import { i18n } from '~/profile/preferences/constants';
@@ -32,11 +32,17 @@ describe('ProfilePreferences component', () => {
profilePreferencesPath: '/update-profile',
formEl: document.createElement('form'),
};
+ const showToast = jest.fn();
function createComponent(options = {}) {
const { props = {}, provide = {}, attachTo } = options;
return extendedWrapper(
shallowMount(ProfilePreferences, {
+ mocks: {
+ $toast: {
+ show: showToast,
+ },
+ },
provide: {
...defaultProvide,
...provide,
@@ -136,10 +142,7 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success');
form.dispatchEvent(successEvent);
- expect(createAlert).toHaveBeenCalledWith({
- message: i18n.defaultSuccess,
- variant: VARIANT_INFO,
- });
+ expect(showToast).toHaveBeenCalledWith(i18n.defaultSuccess);
});
it('displays the custom success message', () => {
@@ -147,7 +150,7 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success', { detail: [{ message }] });
form.dispatchEvent(successEvent);
- expect(createAlert).toHaveBeenCalledWith({ message, variant: VARIANT_INFO });
+ expect(showToast).toHaveBeenCalledWith(message);
});
it('displays the default error message', () => {
diff --git a/spec/frontend/projects/commit/components/commit_comments_button_spec.js b/spec/frontend/projects/commit/components/commit_comments_button_spec.js
deleted file mode 100644
index 873270c5be1..00000000000
--- a/spec/frontend/projects/commit/components/commit_comments_button_spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import CommitCommentsButton from '~/projects/commit/components/commit_comments_button.vue';
-
-describe('CommitCommentsButton', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = extendedWrapper(
- shallowMount(CommitCommentsButton, {
- propsData: {
- commentsCount: 1,
- ...props,
- },
- }),
- );
- };
-
- const tooltip = () => wrapper.findByTestId('comment-button-wrapper');
-
- describe('Comment Button', () => {
- it('has proper tooltip and button attributes for 1 comment', () => {
- createComponent();
-
- expect(tooltip().attributes('title')).toBe('1 comment on this commit');
- expect(tooltip().text()).toBe('1');
- });
-
- it('has proper tooltip and button attributes for multiple comments', () => {
- createComponent({ commentsCount: 2 });
-
- expect(tooltip().attributes('title')).toBe('2 comments on this commit');
- expect(tooltip().text()).toBe('2');
- });
-
- it('does not show when there are no comments', () => {
- createComponent({ commentsCount: 0 });
-
- expect(tooltip().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/projects/new/components/new_project_url_select_spec.js b/spec/frontend/projects/new/components/new_project_url_select_spec.js
index ceac4435282..be923c1f643 100644
--- a/spec/frontend/projects/new/components/new_project_url_select_spec.js
+++ b/spec/frontend/projects/new/components/new_project_url_select_spec.js
@@ -1,13 +1,12 @@
import {
GlButton,
- GlDropdown,
- GlDropdownItem,
- GlDropdownSectionHeader,
+ GlCollapsibleListbox,
+ GlListboxItem,
GlTruncate,
GlSearchBoxByType,
} from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -99,16 +98,17 @@ describe('NewProjectUrlSelect component', () => {
};
const findButtonLabel = () => wrapper.findComponent(GlButton);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
const findSelectedPath = () => wrapper.findComponent(GlTruncate);
const findHiddenNamespaceInput = () => wrapper.find(`[name="${defaultProvide.inputName}`);
+ const findAllListboxItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findToggleButton = () => findDropdown().findComponent(GlButton);
const findHiddenSelectedNamespaceInput = () =>
wrapper.find('[name="project[selected_namespace_id]"]');
const clickDropdownItem = async () => {
- wrapper.findComponent(GlDropdownItem).vm.$emit('click');
- await nextTick();
+ await findAllListboxItems().at(0).trigger('click');
};
const showDropdown = async () => {
@@ -135,7 +135,7 @@ describe('NewProjectUrlSelect component', () => {
});
it('renders a dropdown without the class', () => {
- expect(findDropdown().props('toggleClass')).not.toContain('gl-text-gray-500!');
+ expect(findToggleButton().classes()).not.toContain('gl-text-gray-500!');
});
it('renders a hidden input with the given namespace id', () => {
@@ -165,7 +165,7 @@ describe('NewProjectUrlSelect component', () => {
});
it('renders a dropdown with the class', () => {
- expect(findDropdown().props('toggleClass')).toContain('gl-text-gray-500!');
+ expect(findToggleButton().classes()).toContain('gl-text-gray-500!');
});
it("renders a hidden input with the user's namespace id", () => {
@@ -179,28 +179,22 @@ describe('NewProjectUrlSelect component', () => {
});
});
- it('focuses on the input when the dropdown is opened', async () => {
- wrapper = mountComponent();
-
- await showDropdown();
-
- expect(focusInputSpy).toHaveBeenCalledTimes(1);
- });
-
it('renders expected dropdown items', async () => {
wrapper = mountComponent({ mountFn: mount });
await showDropdown();
- const listItems = wrapper.findAll('li');
-
- expect(listItems).toHaveLength(6);
- expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Groups');
- expect(listItems.at(1).text()).toBe(data.currentUser.groups.nodes[0].fullPath);
- expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[1].fullPath);
- expect(listItems.at(3).text()).toBe(data.currentUser.groups.nodes[2].fullPath);
- expect(listItems.at(4).findComponent(GlDropdownSectionHeader).text()).toBe('Users');
- expect(listItems.at(5).text()).toBe(data.currentUser.namespace.fullPath);
+ const { fullPath: text, id: value } = data.currentUser.namespace;
+ const userOptions = [{ text, value }];
+ const groupOptions = data.currentUser.groups.nodes.map((node) => ({
+ text: node.fullPath,
+ value: node.id,
+ }));
+
+ expect(findDropdown().props('items')).toEqual([
+ { text: 'Groups', options: groupOptions },
+ { text: 'Users', options: userOptions },
+ ]);
});
it('does not render users section when user namespace id is not provided', async () => {
@@ -211,8 +205,12 @@ describe('NewProjectUrlSelect component', () => {
await showDropdown();
- expect(wrapper.findAllComponents(GlDropdownSectionHeader)).toHaveLength(1);
- expect(wrapper.findAllComponents(GlDropdownSectionHeader).at(0).text()).toBe('Groups');
+ const groupOptions = data.currentUser.groups.nodes.map((node) => ({
+ text: node.fullPath,
+ value: node.id,
+ }));
+
+ expect(findDropdown().props('items')).toEqual([{ text: 'Groups', options: groupOptions }]);
});
describe('query fetching', () => {
@@ -248,12 +246,15 @@ describe('NewProjectUrlSelect component', () => {
});
it('filters the dropdown items to the selected group and children', () => {
- const listItems = wrapper.findAll('li');
+ const filteredgroupOptions = data.currentUser.groups.nodes.filter((group) =>
+ group.fullPath.startsWith(fullPath),
+ );
+ const groupOptions = filteredgroupOptions.map((node) => ({
+ text: node.fullPath,
+ value: node.id,
+ }));
- expect(listItems).toHaveLength(3);
- expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Groups');
- expect(listItems.at(1).text()).toBe(data.currentUser.groups.nodes[1].fullPath);
- expect(listItems.at(2).text()).toBe(data.currentUser.groups.nodes[2].fullPath);
+ expect(findDropdown().props('items')).toEqual([{ text: 'Groups', options: groupOptions }]);
});
it('sets the selection to the group', () => {
@@ -278,7 +279,7 @@ describe('NewProjectUrlSelect component', () => {
wrapper = mountComponent({ search: 'no matches', queryResponse, mountFn: mount });
await waitForPromises();
- expect(wrapper.find('li').text()).toBe('No matches found');
+ expect(wrapper.find('[data-testid="listbox-no-results-text"]').text()).toBe('No matches found');
});
it('emits `update-visibility` event to update the visibility radio options', async () => {
diff --git a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
index 4e3554131c6..75b239d2d94 100644
--- a/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/components/new_access_dropdown_spec.js
@@ -74,20 +74,14 @@ describe('Access Level Dropdown', () => {
const createComponent = ({
accessLevelsData = mockAccessLevelsData,
accessLevel = ACCESS_LEVELS.PUSH,
- hasLicense,
- label,
- disabled,
- preselectedItems,
stubs = {},
+ ...optionalProps
} = {}) => {
wrapper = shallowMountExtended(AccessDropdown, {
propsData: {
accessLevelsData,
accessLevel,
- hasLicense,
- label,
- disabled,
- preselectedItems,
+ ...optionalProps,
},
stubs: {
GlSprintf,
@@ -114,10 +108,19 @@ describe('Access Level Dropdown', () => {
it('should make an api call for users, groups && deployKeys when user has a license', () => {
createComponent();
expect(getUsers).toHaveBeenCalled();
- expect(getGroups).toHaveBeenCalled();
+ expect(getGroups).toHaveBeenCalledWith({ withProjectAccess: false });
expect(getDeployKeys).toHaveBeenCalled();
});
+ describe('withProjectAccess', () => {
+ it('should make an api call for users, groups && deployKeys when user has a license', () => {
+ createComponent({ groupsWithProjectAccess: true });
+ expect(getUsers).toHaveBeenCalled();
+ expect(getGroups).toHaveBeenCalledWith({ withProjectAccess: true });
+ expect(getDeployKeys).toHaveBeenCalled();
+ });
+ });
+
it('should make an api call for deployKeys but not for users or groups when user does not have a license', () => {
createComponent({ hasLicense: false });
expect(getUsers).not.toHaveBeenCalled();
@@ -132,7 +135,7 @@ describe('Access Level Dropdown', () => {
findSearchBox().vm.$emit('input', query);
await nextTick();
expect(getUsers).toHaveBeenCalledWith(query);
- expect(getGroups).toHaveBeenCalled();
+ expect(getGroups).toHaveBeenCalledWith({ withProjectAccess: false });
expect(getDeployKeys).toHaveBeenCalledWith(query);
});
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/app_spec.js b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
index dd534bec25d..e86759ec6ca 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
@@ -1,16 +1,21 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlCollapsibleListbox, GlDisclosureDropdown } from '@gitlab/ui';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import BranchRules from '~/projects/settings/repository/branch_rules/app.vue';
import BranchRule from '~/projects/settings/repository/branch_rules/components/branch_rule.vue';
import branchRulesQuery from 'ee_else_ce/projects/settings/repository/branch_rules/graphql/queries/branch_rules.query.graphql';
+import createBranchRuleMutation from '~/projects/settings/repository/branch_rules/graphql/mutations/create_branch_rule.mutation.graphql';
+
import { createAlert } from '~/alert';
import {
branchRulesMockResponse,
appProvideMock,
+ createBranchRuleMockResponse,
} from 'ee_else_ce_jest/projects/settings/repository/branch_rules/mock_data';
import {
I18N,
@@ -31,16 +36,33 @@ Vue.use(VueApollo);
describe('Branch rules app', () => {
let wrapper;
let fakeApollo;
-
+ const openBranches = [
+ { text: 'branch1', id: 'branch1', title: 'branch1' },
+ { text: 'branch2', id: 'branch2', title: 'branch2' },
+ ];
const branchRulesQuerySuccessHandler = jest.fn().mockResolvedValue(branchRulesMockResponse);
-
- const createComponent = async ({ queryHandler = branchRulesQuerySuccessHandler } = {}) => {
- fakeApollo = createMockApollo([[branchRulesQuery, queryHandler]]);
+ const addRuleMutationSuccessHandler = jest.fn().mockResolvedValue(createBranchRuleMockResponse);
+
+ const createComponent = async ({
+ glFeatures = { addBranchRule: true },
+ queryHandler = branchRulesQuerySuccessHandler,
+ mutationHandler = addRuleMutationSuccessHandler,
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [branchRulesQuery, queryHandler],
+ [createBranchRuleMutation, mutationHandler],
+ ]);
wrapper = mountExtended(BranchRules, {
apolloProvider: fakeApollo,
- provide: appProvideMock,
- stubs: { GlModal: stubComponent(GlModal, { template: RENDER_ALL_SLOTS_TEMPLATE }) },
+ provide: {
+ ...appProvideMock,
+ glFeatures,
+ },
+ stubs: {
+ GlDisclosureDropdown,
+ GlModal: stubComponent(GlModal, { template: RENDER_ALL_SLOTS_TEMPLATE }),
+ },
directives: { GlModal: createMockDirective('gl-modal') },
});
@@ -51,9 +73,32 @@ describe('Branch rules app', () => {
const findEmptyState = () => wrapper.findByTestId('empty');
const findAddBranchRuleButton = () => wrapper.findByRole('button', I18N.addBranchRule);
const findModal = () => wrapper.findComponent(GlModal);
+ const findAddBranchRuleDropdown = () => wrapper.findComponent(GlDisclosureDropdown);
+ const findCreateBranchRuleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ beforeEach(() => {
+ window.gon = {
+ open_branches: openBranches,
+ };
+ setWindowLocation(TEST_HOST);
+ });
beforeEach(() => createComponent());
+ it('renders branch rules', () => {
+ const { nodes } = branchRulesMockResponse.data.project.branchRules;
+
+ expect(findAllBranchRules().length).toBe(nodes.length);
+
+ expect(findAllBranchRules().at(0).props('name')).toBe(nodes[0].name);
+
+ expect(findAllBranchRules().at(0).props('branchProtection')).toEqual(nodes[0].branchProtection);
+
+ expect(findAllBranchRules().at(1).props('name')).toBe(nodes[1].name);
+
+ expect(findAllBranchRules().at(1).props('branchProtection')).toEqual(nodes[1].branchProtection);
+ });
+
it('displays an error if branch rules query fails', async () => {
await createComponent({ queryHandler: jest.fn().mockRejectedValue() });
expect(createAlert).toHaveBeenCalledWith({ message: I18N.queryError });
@@ -64,21 +109,65 @@ describe('Branch rules app', () => {
expect(findEmptyState().text()).toBe(I18N.emptyState);
});
- it('renders branch rules', () => {
- const { nodes } = branchRulesMockResponse.data.project.branchRules;
-
- expect(findAllBranchRules().length).toBe(nodes.length);
+ describe('Add branch rule', () => {
+ it('renders an Add branch rule dropdown', () => {
+ expect(findAddBranchRuleDropdown().props('toggleText')).toBe('Add branch rule');
+ });
- expect(findAllBranchRules().at(0).props('name')).toBe(nodes[0].name);
+ it('renders a modal with correct props/attributes', () => {
+ expect(findModal().props()).toMatchObject({
+ title: I18N.createBranchRule,
+ modalId: BRANCH_PROTECTION_MODAL_ID,
+ actionCancel: {
+ text: 'Create branch rule',
+ },
+ actionPrimary: {
+ attributes: {
+ disabled: true,
+ variant: 'confirm',
+ },
+ text: 'Create protected branch',
+ },
+ });
+ });
- expect(findAllBranchRules().at(0).props('branchProtection')).toEqual(nodes[0].branchProtection);
+ it('renders listbox with branch names', () => {
+ expect(findCreateBranchRuleListbox().exists()).toBe(true);
+ expect(findCreateBranchRuleListbox().props('items')).toHaveLength(openBranches.length);
+ expect(findCreateBranchRuleListbox().props('toggleText')).toBe(
+ 'Select Branch or create wildcard',
+ );
+ });
- expect(findAllBranchRules().at(1).props('name')).toBe(nodes[1].name);
+ it('when the primary modal action is clicked it calls create rule mutation', async () => {
+ findCreateBranchRuleListbox().vm.$emit('select', openBranches[0].text);
+ await nextTick();
+ findModal().vm.$emit('primary');
+ await nextTick();
+ await nextTick();
+ expect(addRuleMutationSuccessHandler).toHaveBeenCalledWith({
+ name: 'branch1',
+ projectPath: 'some/project/path',
+ });
+ });
- expect(findAllBranchRules().at(1).props('branchProtection')).toEqual(nodes[1].branchProtection);
+ it('shows alert when mutation fails', async () => {
+ createComponent({ mutationHandler: jest.fn().mockRejectedValue() });
+ findCreateBranchRuleListbox().vm.$emit('select', openBranches[0].text);
+ await nextTick();
+ findModal().vm.$emit('primary');
+ await waitForPromises();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Something went wrong while creating branch rule.',
+ });
+ });
});
- describe('Add branch rule', () => {
+ describe('Add branch rule when addBranchRule FF disabled', () => {
+ beforeEach(() => {
+ window.gon.open_branches = openBranches;
+ createComponent({ glFeatures: { addBranchRule: false } });
+ });
it('renders an Add branch rule button', () => {
expect(findAddBranchRuleButton().exists()).toBe(true);
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
index d169397241d..5981647ce38 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
@@ -65,8 +65,22 @@ export const branchRulesMockResponse = {
},
};
+export const createBranchRuleMockResponse = {
+ data: {
+ branchRuleCreate: {
+ errors: [],
+ branchRule: {
+ name: '*dkd',
+ __typename: 'BranchRule',
+ },
+ __typename: 'BranchRuleCreatePayload',
+ },
+ },
+};
+
export const appProvideMock = {
projectPath: 'some/project/path',
+ branchRulesPath: 'settings/repository/branch_rules',
};
export const branchRuleProvideMock = {
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index c02c1bb959c..983db8846c6 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -165,7 +165,9 @@ Object {
dir="auto"
>
Best. Release.
- <strong>
+ <strong
+ data-sourcepos="1:16-1:24"
+ >
Ever.
</strong>
<gl-emoji
@@ -400,7 +402,9 @@ Object {
dir="auto"
>
Best. Release.
- <strong>
+ <strong
+ data-sourcepos="1:16-1:24"
+ >
Ever.
</strong>
<gl-emoji
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 15436832be8..90f31dca232 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -319,6 +319,25 @@ describe('Release edit/new component', () => {
expect(actions.saveRelease).not.toHaveBeenCalled();
});
});
+
+ describe('when tag notes are loading', () => {
+ beforeEach(async () => {
+ await factory({
+ store: {
+ modules: {
+ editNew: {
+ state: {
+ isFetchingTagNotes: true,
+ },
+ },
+ },
+ },
+ });
+ });
+ it('renders the submit button as disabled', () => {
+ expect(findSubmitButton().attributes('disabled')).toBeDefined();
+ });
+ });
});
describe('delete', () => {
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index b8507dc5fb4..4417dc67dc4 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -1,11 +1,13 @@
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import { GlAlert } from '@gitlab/ui';
import originalAllReleasesQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/all_releases.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
+import getCiCatalogSettingsQuery from '~/ci/catalog/graphql/queries/get_ci_catalog_settings.query.graphql';
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import { historyPushState } from '~/lib/utils/common_utils';
import ReleasesIndexApp from '~/releases/components/app_index.vue';
@@ -16,6 +18,7 @@ import ReleasesPagination from '~/releases/components/releases_pagination.vue';
import ReleasesSort from '~/releases/components/releases_sort.vue';
import { PAGE_SIZE, CREATED_ASC, DEFAULT_SORT } from '~/releases/constants';
import { deleteReleaseSessionKey } from '~/releases/release_notification_service';
+import { generateCatalogSettingsResponse } from '../mock_data';
Vue.use(VueApollo);
@@ -46,19 +49,22 @@ describe('app_index.vue', () => {
let noReleases;
let queryMock;
let toast;
+ let ciCatalogSettingsResponse;
const createComponent = ({
singleResponse = Promise.resolve(singleRelease),
fullResponse = Promise.resolve(allReleases),
} = {}) => {
- const apolloProvider = createMockApollo([
+ const handlers = [
[
allReleasesQuery,
queryMock.mockImplementation((vars) => {
return vars.first === 1 ? singleResponse : fullResponse;
}),
],
- ]);
+ [getCiCatalogSettingsQuery, ciCatalogSettingsResponse],
+ ];
+ const apolloProvider = createMockApollo(handlers);
toast = jest.fn();
@@ -98,6 +104,7 @@ describe('app_index.vue', () => {
const findAllReleaseBlocks = () => wrapper.findAllComponents(ReleaseBlock);
const findPagination = () => wrapper.findComponent(ReleasesPagination);
const findSort = () => wrapper.findComponent(ReleasesSort);
+ const findCatalogAlert = () => wrapper.findComponent(GlAlert);
// Tests
describe('component states', () => {
@@ -162,7 +169,9 @@ describe('app_index.vue', () => {
error: expect.any(Error),
});
} else {
- expect(createAlert).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalledWith({
+ error: expect.any(Error),
+ });
}
});
@@ -412,7 +421,6 @@ describe('app_index.vue', () => {
});
it('shows a toast', () => {
- expect(createAlert).toHaveBeenCalledTimes(1);
expect(createAlert).toHaveBeenCalledWith({
message: `Release ${release} has been successfully deleted.`,
variant: VARIANT_SUCCESS,
@@ -423,4 +431,32 @@ describe('app_index.vue', () => {
expect(window.sessionStorage.getItem(key)).toBe(null);
});
});
+
+ describe('CI/CD Catalog Alert', () => {
+ beforeEach(() => {
+ ciCatalogSettingsResponse = jest.fn();
+ });
+
+ describe('when the project is a catalog resource', () => {
+ beforeEach(async () => {
+ ciCatalogSettingsResponse.mockResolvedValue(generateCatalogSettingsResponse(true));
+ await createComponent();
+ });
+
+ it('renders the CI/CD Catalog alert', () => {
+ expect(findCatalogAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('when the project is not a catalog resource', () => {
+ beforeEach(async () => {
+ ciCatalogSettingsResponse.mockResolvedValue(generateCatalogSettingsResponse(false));
+ await createComponent();
+ });
+
+ it('does not render the CI/CD Catalog alert', () => {
+ expect(findCatalogAlert().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index c89182faa44..d0ed883fb5b 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -15,3 +15,14 @@ export const pageInfoHeadersWithPagination = {
'X-TOTAL': '21',
'X-TOTAL-PAGES': '2',
};
+
+export const generateCatalogSettingsResponse = (isCatalogResource = false) => {
+ return {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/149',
+ isCatalogResource,
+ },
+ },
+ };
+};
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index a55b6cdef92..4dc55c12464 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -4,6 +4,7 @@ import testAction from 'helpers/vuex_action_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { getTag } from '~/api/tags_api';
import { createAlert } from '~/alert';
+import { HTTP_STATUS_NOT_FOUND } from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
import AccessorUtilities from '~/lib/utils/accessor';
import { s__ } from '~/locale';
@@ -128,6 +129,38 @@ describe('Release edit/new actions', () => {
{ type: types.INITIALIZE_RELEASE, payload: release },
{ type: types.UPDATE_CREATE_FROM, payload: createFrom },
],
+ expectedActions: [{ type: 'fetchTagNotes', payload: release.tagName }],
+ });
+ });
+
+ it('with no tag name, does not fetch tag information', () => {
+ const release = {
+ tagName: '',
+ tagMessage: 'hello',
+ name: '',
+ description: '',
+ milestones: [],
+ groupMilestones: [],
+ releasedAt: new Date(),
+ assets: {
+ links: [],
+ },
+ };
+ const createFrom = 'main';
+
+ window.localStorage.setItem(`${state.projectPath}/release/new`, JSON.stringify(release));
+ window.localStorage.setItem(
+ `${state.projectPath}/release/new/createFrom`,
+ JSON.stringify(createFrom),
+ );
+
+ return testAction({
+ action: actions.loadDraftRelease,
+ state,
+ expectedMutations: [
+ { type: types.INITIALIZE_RELEASE, payload: release },
+ { type: types.UPDATE_CREATE_FROM, payload: createFrom },
+ ],
});
});
});
@@ -988,6 +1021,7 @@ describe('Release edit/new actions', () => {
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
});
+
it('creates an alert on error', async () => {
error = new Error();
getTag.mockRejectedValue(error);
@@ -1007,5 +1041,23 @@ describe('Release edit/new actions', () => {
});
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
});
+
+ it('assumes creating a tag on 404', async () => {
+ error = { response: { status: HTTP_STATUS_NOT_FOUND } };
+ getTag.mockRejectedValue(error);
+
+ await testAction({
+ action: actions.fetchTagNotes,
+ payload: tagName,
+ state,
+ expectedMutations: [
+ { type: types.REQUEST_TAG_NOTES },
+ { type: types.RECEIVE_TAG_NOTES_SUCCESS, payload: {} },
+ ],
+ expectedActions: [{ type: 'setNewTag' }, { type: 'setCreating' }],
+ });
+
+ expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
+ });
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 24490e19296..30a3c78641c 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -424,7 +424,7 @@ describe('Release edit/new getters', () => {
describe('formattedReleaseNotes', () => {
it.each`
- description | includeTagNotes | tagNotes | included | showCreateFrom
+ description | includeTagNotes | tagNotes | included | isNewTag
${'release notes'} | ${true} | ${'tag notes'} | ${true} | ${false}
${'release notes'} | ${true} | ${''} | ${false} | ${false}
${'release notes'} | ${false} | ${'tag notes'} | ${false} | ${false}
@@ -432,25 +432,24 @@ describe('Release edit/new getters', () => {
${'release notes'} | ${true} | ${''} | ${false} | ${true}
${'release notes'} | ${false} | ${'tag notes'} | ${false} | ${true}
`(
- 'should include tag notes=$included when includeTagNotes=$includeTagNotes and tagNotes=$tagNotes and showCreateFrom=$showCreateFrom',
- ({ description, includeTagNotes, tagNotes, included, showCreateFrom }) => {
+ 'should include tag notes=$included when includeTagNotes=$includeTagNotes and tagNotes=$tagNotes and isNewTag=$isNewTag',
+ ({ description, includeTagNotes, tagNotes, included, isNewTag }) => {
let state;
- if (showCreateFrom) {
+ if (isNewTag) {
state = {
release: { description, tagMessage: tagNotes },
includeTagNotes,
- showCreateFrom,
};
} else {
- state = { release: { description }, includeTagNotes, tagNotes, showCreateFrom };
+ state = { release: { description }, includeTagNotes, tagNotes };
}
const text = `### ${s__('Releases|Tag message')}\n\n${tagNotes}\n`;
if (included) {
- expect(getters.formattedReleaseNotes(state)).toContain(text);
+ expect(getters.formattedReleaseNotes(state, { isNewTag })).toContain(text);
} else {
- expect(getters.formattedReleaseNotes(state)).not.toContain(text);
+ expect(getters.formattedReleaseNotes(state, { isNewTag })).not.toContain(text);
}
},
);
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 889260fc478..1c70bfcf0ef 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -40,7 +40,15 @@ jest.mock('~/lib/utils/url_utility', () => ({
setUrlParams: jest.fn(),
joinPaths: jest.fn().mockReturnValue(''),
visitUrl: jest.fn(),
+ queryToObject: jest.fn().mockReturnValue({ scope: 'projects', search: '' }),
+ objectToQuery: jest.fn((params) =>
+ Object.keys(params)
+ .map((k) => `${encodeURIComponent(k)}=${encodeURIComponent(params[k])}`)
+ .join('&'),
+ ),
+ getBaseURL: jest.fn().mockReturnValue('http://gdk.test:3000'),
}));
+
jest.mock('~/lib/logger', () => ({
logError: jest.fn(),
}));
@@ -328,6 +336,23 @@ describe('Global Search Store Actions', () => {
});
});
+ describe('fetchSidebarCount uses wild card seach', () => {
+ beforeEach(() => {
+ state.navigation = mapValues(MOCK_NAVIGATION_DATA, (navItem) => ({
+ ...navItem,
+ count_link: '/search/count?scope=projects&search=',
+ }));
+ state.urlQuery.search = '';
+ });
+
+ it('should use wild card', async () => {
+ await testAction({ action: actions.fetchSidebarCount, state, expectedMutations: [] });
+ expect(mock.history.get[0].url).toBe(
+ 'http://gdk.test:3000/search/count?scope=projects&search=*',
+ );
+ });
+ });
+
describe.each`
action | axiosMock | type | expectedMutations | errorLogs
${actions.fetchAllAggregation} | ${{ method: 'onGet', code: HTTP_STATUS_OK }} | ${'success'} | ${MOCK_RECEIVE_AGGREGATIONS_SUCCESS_MUTATION} | ${0}
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
index 9efee2a409a..f1826e0e138 100644
--- a/spec/frontend/security_configuration/components/feature_card_spec.js
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -1,7 +1,7 @@
import { GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { securityFeatures } from '~/security_configuration/constants';
+import { securityFeatures } from 'jest/security_configuration/mock_data';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
import FeatureCardBadge from '~/security_configuration/components/feature_card_badge.vue';
import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index 208256afdbd..f47d4f69cd0 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -1,11 +1,17 @@
import {
SAST_NAME,
SAST_SHORT_NAME,
- SAST_DESCRIPTION,
- SAST_HELP_PATH,
- SAST_CONFIG_HELP_PATH,
+ SAST_IAC_NAME,
+ SAST_IAC_SHORT_NAME,
} from '~/security_configuration/constants';
-import { REPORT_TYPE_SAST } from '~/vue_shared/security_reports/constants';
+import { __, s__ } from '~/locale';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+import {
+ REPORT_TYPE_SAST,
+ REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
+ REPORT_TYPE_SAST_IAC,
+} from '~/vue_shared/security_reports/constants';
export const testProjectPath = 'foo/bar';
export const testProviderIds = [101, 102, 103];
@@ -16,6 +22,71 @@ export const testTrainingUrls = [
'https://www.vendornamethree.com/url',
];
+const SAST_DESCRIPTION = __('Analyze your source code for known vulnerabilities.');
+const SAST_HELP_PATH = helpPagePath('user/application_security/sast/index');
+const SAST_CONFIG_HELP_PATH = helpPagePath('user/application_security/sast/index', {
+ anchor: 'configuration',
+});
+
+const BAS_BADGE_TEXT = s__('SecurityConfiguration|Incubating feature');
+const BAS_BADGE_TOOLTIP = s__(
+ 'SecurityConfiguration|Breach and Attack Simulation is an incubating feature extending existing security testing by simulating adversary activity.',
+);
+const BAS_DESCRIPTION = s__(
+ 'SecurityConfiguration|Simulate breach and attack scenarios against your running application by attempting to detect and exploit known vulnerabilities.',
+);
+const BAS_HELP_PATH = helpPagePath('user/application_security/breach_and_attack_simulation/index');
+const BAS_NAME = s__('SecurityConfiguration|Breach and Attack Simulation (BAS)');
+const BAS_SHORT_NAME = s__('SecurityConfiguration|BAS');
+const BAS_DAST_FEATURE_FLAG_DESCRIPTION = s__(
+ 'SecurityConfiguration|Enable incubating Breach and Attack Simulation focused features such as callback attacks in your DAST scans.',
+);
+const BAS_DAST_FEATURE_FLAG_HELP_PATH = helpPagePath(
+ 'user/application_security/breach_and_attack_simulation/index',
+ { anchor: 'extend-dynamic-application-security-testing-dast' },
+);
+const BAS_DAST_FEATURE_FLAG_NAME = s__(
+ 'SecurityConfiguration|Out-of-Band Application Security Testing (OAST)',
+);
+
+const SAST_IAC_DESCRIPTION = __(
+ 'Analyze your infrastructure as code configuration files for known vulnerabilities.',
+);
+const SAST_IAC_HELP_PATH = helpPagePath('user/application_security/iac_scanning/index');
+const SAST_IAC_CONFIG_HELP_PATH = helpPagePath('user/application_security/iac_scanning/index', {
+ anchor: 'configuration',
+});
+
+export const securityFeatures = [
+ {
+ anchor: 'bas',
+ badge: {
+ alwaysDisplay: true,
+ text: BAS_BADGE_TEXT,
+ tooltipText: BAS_BADGE_TOOLTIP,
+ variant: 'info',
+ },
+ description: BAS_DESCRIPTION,
+ name: BAS_NAME,
+ helpPath: BAS_HELP_PATH,
+ secondary: {
+ configurationHelpPath: BAS_DAST_FEATURE_FLAG_HELP_PATH,
+ description: BAS_DAST_FEATURE_FLAG_DESCRIPTION,
+ name: BAS_DAST_FEATURE_FLAG_NAME,
+ },
+ shortName: BAS_SHORT_NAME,
+ type: REPORT_TYPE_BREACH_AND_ATTACK_SIMULATION,
+ },
+ {
+ name: SAST_IAC_NAME,
+ shortName: SAST_IAC_SHORT_NAME,
+ description: SAST_IAC_DESCRIPTION,
+ helpPath: SAST_IAC_HELP_PATH,
+ configurationHelpPath: SAST_IAC_CONFIG_HELP_PATH,
+ type: REPORT_TYPE_SAST_IAC,
+ },
+];
+
const createSecurityTrainingProviders = ({ providerOverrides = {} }) => [
{
id: testProviderIds[0],
diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js
index 3c6d4baa30f..f2eeaca8987 100644
--- a/spec/frontend/security_configuration/utils_spec.js
+++ b/spec/frontend/security_configuration/utils_spec.js
@@ -6,6 +6,46 @@ describe('augmentFeatures', () => {
{
name: 'SAST',
type: 'SAST',
+ security_features: {
+ type: 'SAST',
+ },
+ },
+ ];
+
+ const expectedMockSecurityFeatures = [
+ {
+ name: 'SAST',
+ type: 'SAST',
+ securityFeatures: {
+ type: 'SAST',
+ },
+ },
+ ];
+
+ const expectedInvalidMockSecurityFeatures = [
+ {
+ foo: 'bar',
+ name: 'SAST',
+ type: 'SAST',
+ securityFeatures: {
+ type: 'SAST',
+ },
+ },
+ ];
+
+ const expectedSecondarymockSecurityFeatures = [
+ {
+ name: 'DAST',
+ type: 'DAST',
+ helpPath: '/help/user/application_security/dast/index',
+ secondary: {
+ type: 'DAST PROFILES',
+ name: 'DAST PROFILES',
+ },
+ securityFeatures: {
+ type: 'DAST',
+ helpPath: '/help/user/application_security/dast/index',
+ },
},
];
@@ -17,6 +57,10 @@ describe('augmentFeatures', () => {
type: 'DAST PROFILES',
name: 'DAST PROFILES',
},
+ security_features: {
+ type: 'DAST',
+ help_path: '/help/user/application_security/dast/index',
+ },
},
];
@@ -31,6 +75,9 @@ describe('augmentFeatures', () => {
name: 'SAST',
type: 'SAST',
customField: 'customvalue',
+ securityFeatures: {
+ type: 'SAST',
+ },
},
];
@@ -38,6 +85,9 @@ describe('augmentFeatures', () => {
{
name: 'DAST',
type: 'dast',
+ security_features: {
+ type: 'DAST',
+ },
},
];
@@ -48,6 +98,9 @@ describe('augmentFeatures', () => {
customField: 'customvalue',
onDemandAvailable: false,
badge: {},
+ security_features: {
+ type: 'dast',
+ },
},
];
@@ -58,6 +111,9 @@ describe('augmentFeatures', () => {
customField: 'customvalue',
onDemandAvailable: true,
badge: {},
+ security_features: {
+ type: 'dast',
+ },
},
];
@@ -70,11 +126,15 @@ describe('augmentFeatures', () => {
];
const expectedOutputDefault = {
- augmentedSecurityFeatures: mockSecurityFeatures,
+ augmentedSecurityFeatures: expectedMockSecurityFeatures,
+ };
+
+ const expectedInvalidOutputDefault = {
+ augmentedSecurityFeatures: expectedInvalidMockSecurityFeatures,
};
const expectedOutputSecondary = {
- augmentedSecurityFeatures: mockSecurityFeatures,
+ augmentedSecurityFeatures: expectedSecondarymockSecurityFeatures,
};
const expectedOutputCustomFeature = {
@@ -88,6 +148,9 @@ describe('augmentFeatures', () => {
type: 'dast',
customField: 'customvalue',
onDemandAvailable: false,
+ securityFeatures: {
+ type: 'dast',
+ },
},
],
};
@@ -100,52 +163,62 @@ describe('augmentFeatures', () => {
customField: 'customvalue',
onDemandAvailable: true,
badge: {},
+ securityFeatures: {
+ type: 'dast',
+ },
},
],
};
describe('returns an object with augmentedSecurityFeatures when', () => {
- it('given an empty array', () => {
- expect(augmentFeatures(mockSecurityFeatures, [])).toEqual(expectedOutputDefault);
+ it('given an properly formatted array', () => {
+ expect(augmentFeatures(mockSecurityFeatures)).toEqual(expectedOutputDefault);
});
it('given an invalid populated array', () => {
- expect(augmentFeatures(mockSecurityFeatures, mockInvalidCustomFeature)).toEqual(
- expectedOutputDefault,
- );
+ expect(
+ augmentFeatures([{ ...mockSecurityFeatures[0], ...mockInvalidCustomFeature[0] }]),
+ ).toEqual(expectedInvalidOutputDefault);
});
it('features have secondary key', () => {
- expect(augmentFeatures(mockSecurityFeatures, mockFeaturesWithSecondary, [])).toEqual(
- expectedOutputSecondary,
- );
+ expect(
+ augmentFeatures([{ ...mockSecurityFeatures[0], ...mockFeaturesWithSecondary[0] }]),
+ ).toEqual(expectedOutputSecondary);
});
it('given a valid populated array', () => {
- expect(augmentFeatures(mockSecurityFeatures, mockValidCustomFeature)).toEqual(
- expectedOutputCustomFeature,
- );
+ expect(
+ augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeature[0] }]),
+ ).toEqual(expectedOutputCustomFeature);
});
});
describe('returns an object with camelcased keys', () => {
it('given a customfeature in snakecase', () => {
- expect(augmentFeatures(mockSecurityFeatures, mockValidCustomFeatureSnakeCase)).toEqual(
- expectedOutputCustomFeature,
- );
+ expect(
+ augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeatureSnakeCase[0] }]),
+ ).toEqual(expectedOutputCustomFeature);
});
});
describe('follows onDemandAvailable', () => {
it('deletes badge when false', () => {
expect(
- augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableFalse),
+ augmentFeatures([
+ {
+ ...mockSecurityFeaturesDast[0],
+ ...mockValidCustomFeatureWithOnDemandAvailableFalse[0],
+ },
+ ]),
).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableFalse);
});
it('keeps badge when true', () => {
expect(
- augmentFeatures(mockSecurityFeaturesDast, mockValidCustomFeatureWithOnDemandAvailableTrue),
+ augmentFeatures([
+ { ...mockSecurityFeaturesDast[0], ...mockValidCustomFeatureWithOnDemandAvailableTrue[0] },
+ ]),
).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableTrue);
});
});
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
index 5e2ff73878f..7180e10e7b1 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlLoadingIcon, GlLink } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -7,6 +7,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/alert';
import { workspaceLabelsQueries, workspaceCreateLabelMutation } from '~/sidebar/queries/constants';
import DropdownContentsCreateView from '~/sidebar/components/labels/labels_select_widget/dropdown_contents_create_view.vue';
+import SibebarColorPicker from '~/sidebar/components/sidebar_color_picker.vue';
import { DEFAULT_LABEL_COLOR } from '~/sidebar/components/labels/labels_select_widget/constants';
import {
mockCreateLabelResponse as createAbuseReportLabelSuccessfulResponse,
@@ -14,7 +15,6 @@ import {
} from '../../../../admin/abuse_report/mock_data';
import {
mockRegularLabel,
- mockSuggestedColors,
createLabelSuccessfulResponse,
workspaceLabelsQueryResponse,
workspaceLabelsQueryEmptyResponse,
@@ -22,8 +22,6 @@ import {
jest.mock('~/alert');
-const colors = Object.keys(mockSuggestedColors);
-
Vue.use(VueApollo);
const userRecoverableError = {
@@ -51,9 +49,7 @@ const createLabelErrorHandler = jest.fn().mockRejectedValue('Houston, we have a
describe('DropdownContentsCreateView', () => {
let wrapper;
- const findAllColors = () => wrapper.findAllComponents(GlLink);
- const findSelectedColor = () => wrapper.find('[data-testid="selected-color"]');
- const findSelectedColorText = () => wrapper.find('[data-testid="selected-color-text"]');
+ const findSibebarColorPicker = () => wrapper.findComponent(SibebarColorPicker);
const findCreateButton = () => wrapper.find('[data-testid="create-button"]');
const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
const findLabelTitleInput = () => wrapper.find('[data-testid="label-title-input"]');
@@ -62,7 +58,7 @@ describe('DropdownContentsCreateView', () => {
const fillLabelAttributes = () => {
findLabelTitleInput().vm.$emit('input', 'Test title');
- findAllColors().at(0).vm.$emit('click', new Event('mouseclick'));
+ findSibebarColorPicker().vm.$emit('input', '#009966');
};
const createComponent = ({
@@ -94,38 +90,9 @@ describe('DropdownContentsCreateView', () => {
});
};
- beforeEach(() => {
- gon.suggested_label_colors = mockSuggestedColors;
- });
-
- it('renders a palette of 21 colors', () => {
- createComponent();
- expect(findAllColors()).toHaveLength(21);
- });
-
- it('selects a color after clicking on colored block', async () => {
- createComponent();
- expect(findSelectedColorText().attributes('value')).toBe(DEFAULT_LABEL_COLOR);
-
- findAllColors().at(0).vm.$emit('click', new Event('mouseclick'));
- await nextTick();
-
- expect(findSelectedColor().attributes('value')).toBe('#009966');
- });
-
- it('shows correct color hex code after selecting a color', async () => {
- createComponent();
- expect(findSelectedColorText().attributes('value')).toBe(DEFAULT_LABEL_COLOR);
-
- findAllColors().at(0).vm.$emit('click', new Event('mouseclick'));
- await nextTick();
-
- expect(findSelectedColorText().attributes('value')).toBe(colors[0]);
- });
-
it('disables a Create button if label title is not set', async () => {
createComponent();
- findAllColors().at(0).vm.$emit('click', new Event('mouseclick'));
+ findSibebarColorPicker().vm.$emit('input', '#fff');
await nextTick();
expect(findCreateButton().props('disabled')).toBe(true);
@@ -134,7 +101,7 @@ describe('DropdownContentsCreateView', () => {
it('disables a Create button if color is not set', async () => {
createComponent();
findLabelTitleInput().vm.$emit('input', 'Test title');
- findSelectedColorText().vm.$emit('input', '');
+ findSibebarColorPicker().vm.$emit('input', '');
await nextTick();
expect(findCreateButton().props('disabled')).toBe(true);
diff --git a/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js b/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
index 5039f00fe4b..eb7ab2953c6 100644
--- a/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
+++ b/spec/frontend/sidebar/components/labels/labels_select_widget/mock_data.js
@@ -58,30 +58,6 @@ export const mockConfig = {
attrWorkspacePath: 'test',
};
-export const mockSuggestedColors = {
- '#009966': 'Green-cyan',
- '#8fbc8f': 'Dark sea green',
- '#3cb371': 'Medium sea green',
- '#00b140': 'Green screen',
- '#013220': 'Dark green',
- '#6699cc': 'Blue-gray',
- '#0000ff': 'Blue',
- '#e6e6fa': 'Lavender',
- '#9400d3': 'Dark violet',
- '#330066': 'Deep violet',
- '#808080': 'Gray',
- '#36454f': 'Charcoal grey',
- '#f7e7ce': 'Champagne',
- '#c21e56': 'Rose red',
- '#cc338b': 'Magenta-pink',
- '#dc143c': 'Crimson',
- '#ff0000': 'Red',
- '#cd5b45': 'Dark coral',
- '#eee600': 'Titanium yellow',
- '#ed9121': 'Carrot orange',
- '#c39953': 'Aztec Gold',
-};
-
export const createLabelSuccessfulResponse = {
data: {
labelCreate: {
diff --git a/spec/frontend/sidebar/components/mock_data.js b/spec/frontend/sidebar/components/mock_data.js
index a9a00b3cfdf..b1b52674eb5 100644
--- a/spec/frontend/sidebar/components/mock_data.js
+++ b/spec/frontend/sidebar/components/mock_data.js
@@ -56,3 +56,27 @@ export const issueCrmContactsUpdateResponse = {
},
},
};
+
+export const mockSuggestedColors = {
+ '#009966': 'Green-cyan',
+ '#8fbc8f': 'Dark sea green',
+ '#3cb371': 'Medium sea green',
+ '#00b140': 'Green screen',
+ '#013220': 'Dark green',
+ '#6699cc': 'Blue-gray',
+ '#0000ff': 'Blue',
+ '#e6e6fa': 'Lavender',
+ '#9400d3': 'Dark violet',
+ '#330066': 'Deep violet',
+ '#808080': 'Gray',
+ '#36454f': 'Charcoal grey',
+ '#f7e7ce': 'Champagne',
+ '#c21e56': 'Rose red',
+ '#cc338b': 'Magenta-pink',
+ '#dc143c': 'Crimson',
+ '#ff0000': 'Red',
+ '#cd5b45': 'Dark coral',
+ '#eee600': 'Titanium yellow',
+ '#ed9121': 'Carrot orange',
+ '#c39953': 'Aztec Gold',
+};
diff --git a/spec/frontend/sidebar/components/sidebar_color_picker_spec.js b/spec/frontend/sidebar/components/sidebar_color_picker_spec.js
new file mode 100644
index 00000000000..7ce556fe368
--- /dev/null
+++ b/spec/frontend/sidebar/components/sidebar_color_picker_spec.js
@@ -0,0 +1,58 @@
+import { GlFormInput, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SibebarColorPicker from '~/sidebar/components/sidebar_color_picker.vue';
+import { mockSuggestedColors } from './mock_data';
+
+describe('SibebarColorPicker', () => {
+ let wrapper;
+ const findAllColors = () => wrapper.findAllComponents(GlLink);
+ const findFirstColor = () => findAllColors().at(0);
+ const findColorPicker = () => wrapper.findComponent(GlFormInput);
+ const findColorPickerText = () => wrapper.findByTestId('selected-color-text');
+
+ const createComponent = ({ value = '' } = {}) => {
+ wrapper = shallowMountExtended(SibebarColorPicker, {
+ propsData: {
+ value,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ gon.suggested_label_colors = mockSuggestedColors;
+ });
+
+ it('renders a palette of 21 colors', () => {
+ createComponent();
+ expect(findAllColors()).toHaveLength(21);
+ });
+
+ it('renders value of the color in textbox', () => {
+ createComponent({ value: '#343434' });
+ expect(findColorPickerText().attributes('value')).toBe('#343434');
+ });
+
+ describe('color picker', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('emits color on click of suggested color link', () => {
+ findFirstColor().vm.$emit('click', new Event('mouseclick'));
+
+ expect(wrapper.emitted('input')).toEqual([['#009966']]);
+ });
+
+ it('emits color on selecting color from picker', () => {
+ findColorPicker().vm.$emit('input', '#ffffff');
+
+ expect(wrapper.emitted('input')).toEqual([['#ffffff']]);
+ });
+
+ it('emits color on typing the hex code in the input', () => {
+ findColorPickerText().vm.$emit('input', '#000000');
+
+ expect(wrapper.emitted('input')).toEqual([['#000000']]);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/create_menu_spec.js b/spec/frontend/super_sidebar/components/create_menu_spec.js
index ffbc789d220..c2f608b4f52 100644
--- a/spec/frontend/super_sidebar/components/create_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/create_menu_spec.js
@@ -31,6 +31,7 @@ describe('CreateMenu component', () => {
stubs: {
InviteMembersTrigger,
GlDisclosureDropdown,
+ GlEmoji: { template: '<div/>' },
},
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
diff --git a/spec/frontend/super_sidebar/components/user_menu_spec.js b/spec/frontend/super_sidebar/components/user_menu_spec.js
index 4af3247693b..7d50a2b3441 100644
--- a/spec/frontend/super_sidebar/components/user_menu_spec.js
+++ b/spec/frontend/super_sidebar/components/user_menu_spec.js
@@ -506,6 +506,64 @@ describe('UserMenu component', () => {
});
});
+ describe('Admin Mode items', () => {
+ const findEnterAdminModeItem = () => wrapper.findByTestId('enter-admin-mode-item');
+ const findLeaveAdminModeItem = () => wrapper.findByTestId('leave-admin-mode-item');
+
+ describe('when user is not admin', () => {
+ it('should not render', () => {
+ createWrapper({
+ admin_mode: {
+ user_is_admin: false,
+ },
+ });
+ expect(findEnterAdminModeItem().exists()).toBe(false);
+ expect(findLeaveAdminModeItem().exists()).toBe(false);
+ });
+ });
+
+ describe('when user is admin but admin mode feature is not enabled', () => {
+ it('should not render', () => {
+ createWrapper({
+ admin_mode: {
+ user_is_admin: true,
+ admin_mode_feature_enabled: false,
+ },
+ });
+ expect(findEnterAdminModeItem().exists()).toBe(false);
+ expect(findLeaveAdminModeItem().exists()).toBe(false);
+ });
+ });
+
+ describe('when user is admin, admin mode feature is enabled but inactive', () => {
+ it('should render only "enter admin mode" item', () => {
+ createWrapper({
+ admin_mode: {
+ user_is_admin: true,
+ admin_mode_feature_enabled: true,
+ admin_mode_active: false,
+ },
+ });
+ expect(findEnterAdminModeItem().exists()).toBe(true);
+ expect(findLeaveAdminModeItem().exists()).toBe(false);
+ });
+ });
+
+ describe('when user is admin, admin mode feature is enabled and active', () => {
+ it('should render only "leave admin mode" item', () => {
+ createWrapper({
+ admin_mode: {
+ user_is_admin: true,
+ admin_mode_feature_enabled: true,
+ admin_mode_active: true,
+ },
+ });
+ expect(findEnterAdminModeItem().exists()).toBe(false);
+ expect(findLeaveAdminModeItem().exists()).toBe(true);
+ });
+ });
+ });
+
describe('Sign out group', () => {
const findSignOutGroup = () => wrapper.findByTestId('sign-out-group');
diff --git a/spec/frontend/super_sidebar/mock_data.js b/spec/frontend/super_sidebar/mock_data.js
index fc264ad5e0a..067caec5ff4 100644
--- a/spec/frontend/super_sidebar/mock_data.js
+++ b/spec/frontend/super_sidebar/mock_data.js
@@ -175,6 +175,11 @@ export const userMenuMockPipelineMinutes = {
export const userMenuMockData = {
name: 'Orange Fox',
username: 'thefox',
+ admin_mode: {
+ user_is_admin: false,
+ admin_mode_feature_enabled: false,
+ admin_mode_active: false,
+ },
avatar_url: invalidUrl,
has_link_to_profile: true,
link_to_profile: '/thefox',
@@ -210,102 +215,6 @@ export const frecentProjectsMock = [
},
];
-export const cachedFrequentProjects = JSON.stringify([
- {
- id: 1,
- name: 'Cached project 1',
- namespace: 'Cached Namespace 1 / Cached project 1',
- webUrl: '/cached-namespace-1/cached-project-1',
- avatarUrl: '/uploads/-/avatar1.png',
- lastAccessedOn: 1676325329054,
- frequency: 10,
- },
- {
- id: 2,
- name: 'Cached project 2',
- namespace: 'Cached Namespace 2 / Cached project 2',
- webUrl: '/cached-namespace-2/cached-project-2',
- avatarUrl: '/uploads/-/avatar2.png',
- lastAccessedOn: 1674314684308,
- frequency: 8,
- },
- {
- id: 3,
- name: 'Cached project 3',
- namespace: 'Cached Namespace 3 / Cached project 3',
- webUrl: '/cached-namespace-3/cached-project-3',
- avatarUrl: '/uploads/-/avatar3.png',
- lastAccessedOn: 1664977333191,
- frequency: 12,
- },
- {
- id: 4,
- name: 'Cached project 4',
- namespace: 'Cached Namespace 4 / Cached project 4',
- webUrl: '/cached-namespace-4/cached-project-4',
- avatarUrl: '/uploads/-/avatar4.png',
- lastAccessedOn: 1674315407569,
- frequency: 3,
- },
- {
- id: 5,
- name: 'Cached project 5',
- namespace: 'Cached Namespace 5 / Cached project 5',
- webUrl: '/cached-namespace-5/cached-project-5',
- avatarUrl: '/uploads/-/avatar5.png',
- lastAccessedOn: 1677084729436,
- frequency: 21,
- },
- {
- id: 6,
- name: 'Cached project 6',
- namespace: 'Cached Namespace 6 / Cached project 6',
- webUrl: '/cached-namespace-6/cached-project-6',
- avatarUrl: '/uploads/-/avatar6.png',
- lastAccessedOn: 1676325329679,
- frequency: 5,
- },
-]);
-
-export const cachedFrequentGroups = JSON.stringify([
- {
- id: 1,
- name: 'Cached group 1',
- namespace: 'Cached Namespace 1',
- webUrl: '/cached-namespace-1/cached-group-1',
- avatarUrl: '/uploads/-/avatar1.png',
- lastAccessedOn: 1676325329054,
- frequency: 10,
- },
- {
- id: 2,
- name: 'Cached group 2',
- namespace: 'Cached Namespace 2',
- webUrl: '/cached-namespace-2/cached-group-2',
- avatarUrl: '/uploads/-/avatar2.png',
- lastAccessedOn: 1674314684308,
- frequency: 8,
- },
- {
- id: 3,
- name: 'Cached group 3',
- namespace: 'Cached Namespace 3',
- webUrl: '/cached-namespace-3/cached-group-3',
- avatarUrl: '/uploads/-/avatar3.png',
- lastAccessedOn: 1664977333191,
- frequency: 12,
- },
- {
- id: 4,
- name: 'Cached group 4',
- namespace: 'Cached Namespace 4',
- webUrl: '/cached-namespace-4/cached-group-4',
- avatarUrl: '/uploads/-/avatar4.png',
- lastAccessedOn: 1674315407569,
- frequency: 3,
- },
-]);
-
export const unsortedFrequentItems = [
{ id: 1, frequency: 12, lastAccessedOn: 1491400843391 },
{ id: 2, frequency: 14, lastAccessedOn: 1488240890738 },
diff --git a/spec/frontend/usage_quotas/storage/components/namespace_storage_app_spec.js b/spec/frontend/usage_quotas/storage/components/namespace_storage_app_spec.js
new file mode 100644
index 00000000000..e4f99d401a2
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/namespace_storage_app_spec.js
@@ -0,0 +1,51 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import NamespaceStorageApp from '~/usage_quotas/storage/components/namespace_storage_app.vue';
+import StorageUsageStatistics from '~/usage_quotas/storage/components/storage_usage_statistics.vue';
+import { defaultNamespaceProvideValues } from '../mock_data';
+
+const defaultProps = {
+ namespaceLoadingError: false,
+ projectsLoadingError: false,
+ isNamespaceStorageStatisticsLoading: false,
+ // hardcoding object until we move test_fixtures from ee/ to here
+ namespace: {
+ rootStorageStatistics: {
+ storageSize: 1234,
+ },
+ },
+};
+
+describe('NamespaceStorageApp', () => {
+ /** @type {import('helpers/vue_test_utils_helper').ExtendedWrapper} */
+ let wrapper;
+
+ const findStorageUsageStatistics = () => wrapper.findComponent(StorageUsageStatistics);
+
+ const createComponent = ({ provide = {}, props = {} } = {}) => {
+ wrapper = shallowMountExtended(NamespaceStorageApp, {
+ provide: {
+ ...defaultNamespaceProvideValues,
+ ...provide,
+ },
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ describe('Namespace usage overview', () => {
+ describe('StorageUsageStatistics', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('passes the correct props to StorageUsageStatistics', () => {
+ expect(findStorageUsageStatistics().props()).toMatchObject({
+ usedStorage: defaultProps.namespace.rootStorageStatistics.storageSize,
+ loading: false,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/components/storage_usage_overview_card_spec.js b/spec/frontend/usage_quotas/storage/components/storage_usage_overview_card_spec.js
new file mode 100644
index 00000000000..c79b6b94ac1
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/storage_usage_overview_card_spec.js
@@ -0,0 +1,44 @@
+import { GlSkeletonLoader } from '@gitlab/ui';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+import StorageUsageOverviewCard from '~/usage_quotas/storage/components/storage_usage_overview_card.vue';
+import NumberToHumanSize from '~/vue_shared/components/number_to_human_size/number_to_human_size.vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('StorageUsageOverviewCard', () => {
+ /** @type {import('helpers/vue_test_utils_helper').ExtendedWrapper} */
+ let wrapper;
+ const defaultProps = {
+ purchasedStorage: 0,
+ // hardcoding value until we move test_fixtures from ee/ to here
+ usedStorage: 1234,
+ loading: false,
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(StorageUsageOverviewCard, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ NumberToHumanSize,
+ },
+ });
+ };
+
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+
+ it('displays the used storage value', () => {
+ createComponent();
+ expect(wrapper.text()).toContain(numberToHumanSize(defaultProps.usedStorage, 1));
+ });
+
+ describe('skeleton loader', () => {
+ it('renders skeleton loader when loading prop is true', () => {
+ createComponent({ props: { loading: true } });
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('does not render skeleton loader when loading prop is false', () => {
+ createComponent({ props: { loading: false } });
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/components/storage_usage_statistics_spec.js b/spec/frontend/usage_quotas/storage/components/storage_usage_statistics_spec.js
new file mode 100644
index 00000000000..73d02dc273f
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/storage_usage_statistics_spec.js
@@ -0,0 +1,43 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import StorageUsageStatistics from '~/usage_quotas/storage/components/storage_usage_statistics.vue';
+import StorageUsageOverviewCard from '~/usage_quotas/storage/components/storage_usage_overview_card.vue';
+
+const defaultProps = {
+ // hardcoding value until we move test_fixtures from ee/ to here
+ usedStorage: 1234,
+ loading: false,
+};
+
+describe('StorageUsageStatistics', () => {
+ /** @type {import('helpers/vue_test_utils_helper').ExtendedWrapper} */
+ let wrapper;
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMountExtended(StorageUsageStatistics, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findOverviewSubtitle = () => wrapper.findByTestId('overview-subtitle');
+ const findStorageUsageOverviewCard = () => wrapper.findComponent(StorageUsageOverviewCard);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows the namespace storage overview subtitle', () => {
+ expect(findOverviewSubtitle().text()).toBe('Namespace overview');
+ });
+
+ describe('StorageStatisticsCard', () => {
+ it('passes the correct props to StorageUsageOverviewCard', () => {
+ expect(findStorageUsageOverviewCard().props()).toEqual({
+ usedStorage: defaultProps.usedStorage,
+ loading: false,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/mock_data.js b/spec/frontend/usage_quotas/storage/mock_data.js
index 16c03a13028..266c1150815 100644
--- a/spec/frontend/usage_quotas/storage/mock_data.js
+++ b/spec/frontend/usage_quotas/storage/mock_data.js
@@ -6,3 +6,5 @@ export const mockEmptyResponse = { data: { project: null } };
export const defaultProjectProvideValues = {
projectPath: '/project-path',
};
+
+export const defaultNamespaceProvideValues = {};
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js
index 9bd46267daa..88ee9375180 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js
@@ -398,4 +398,20 @@ describe('Merge request widget rebase component', () => {
expect(toast).toHaveBeenCalledWith('Rebase completed');
});
});
+
+ // This may happen when the session of a user is expired.
+ // see https://gitlab.com/gitlab-org/gitlab/-/issues/413627
+ describe('with empty project', () => {
+ it('does not throw any error', async () => {
+ const fn = async () => {
+ createWrapper({
+ handler: jest.fn().mockResolvedValue({ data: { project: null } }),
+ });
+
+ await waitForPromises();
+ };
+
+ await expect(fn()).resolves.not.toThrow();
+ });
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
index 1b7338744e8..c9cc34e2cfc 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -212,6 +212,19 @@ describe('ReadyToMerge', () => {
expect(findMergeButton().text()).toBe('Set to auto-merge');
expect(findMergeHelperText().text()).toBe('Merge when pipeline succeeds');
});
+
+ it('should show merge help text when pipeline has failed and has an auto merge strategy', () => {
+ createComponent({
+ mr: {
+ pipeline: { status: 'FAILED' },
+ availableAutoMergeStrategies: MWPS_MERGE_STRATEGY,
+ hasCI: true,
+ },
+ });
+
+ expect(findMergeButton().text()).toBe('Set to auto-merge');
+ expect(findMergeHelperText().text()).toBe('Merge when pipeline succeeds');
+ });
});
describe('merge immediately dropdown', () => {
@@ -858,6 +871,42 @@ describe('ReadyToMerge', () => {
});
});
+ describe('only allow merge if pipeline succeeds', () => {
+ beforeEach(() => {
+ const response = JSON.parse(JSON.stringify(readyToMergeResponse));
+ response.data.project.onlyAllowMergeIfPipelineSucceeds = true;
+ response.data.project.mergeRequest.headPipeline = {
+ id: 1,
+ active: true,
+ status: '',
+ path: '',
+ };
+
+ readyToMergeResponseSpy = jest.fn().mockResolvedValueOnce(response);
+ });
+
+ it('hides merge immediately dropdown when subscription returns', async () => {
+ createComponent({ mr: { id: 1 } });
+
+ await waitForPromises();
+
+ expect(findMergeImmediatelyDropdown().exists()).toBe(false);
+
+ mockedSubscription.next({
+ data: {
+ mergeRequestMergeStatusUpdated: {
+ ...readyToMergeResponse.data.project.mergeRequest,
+ headPipeline: { id: 1, active: true, status: '', path: '' },
+ },
+ },
+ });
+
+ await waitForPromises();
+
+ expect(findMergeImmediatelyDropdown().exists()).toBe(false);
+ });
+ });
+
describe('commit message', () => {
it('updates commit message from subscription', async () => {
createComponent({ mr: { id: 1 } });
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index 9296e548081..85166549771 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -264,7 +264,7 @@ describe('MrWidgetOptions', () => {
expect(findMergePipelineForkAlert().exists()).toBe(false);
});
- it('hides the alert when merge pipelines are not enabled', async () => {
+ it('hides the alert when merged results pipelines are not enabled', async () => {
createComponent({
updatedMrData: {
source_project_id: 1,
@@ -275,7 +275,7 @@ describe('MrWidgetOptions', () => {
expect(findMergePipelineForkAlert().exists()).toBe(false);
});
- it('shows the alert when merge pipelines are enabled and the source project and target project are different', async () => {
+ it('shows the alert when merged results pipelines are enabled and the source project and target project are different', async () => {
createComponent({
updatedMrData: {
source_project_id: 1,
diff --git a/spec/frontend/vue_shared/components/file_row_spec.js b/spec/frontend/vue_shared/components/file_row_spec.js
index 976866af27c..d063db1e34b 100644
--- a/spec/frontend/vue_shared/components/file_row_spec.js
+++ b/spec/frontend/vue_shared/components/file_row_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import { nextTick } from 'vue';
import { file } from 'jest/ide/helpers';
import { escapeFileUrl } from '~/lib/utils/url_utility';
@@ -153,4 +154,16 @@ describe('File row component', () => {
expect(wrapper.findComponent(FileIcon).props('submodule')).toBe(submodule);
});
+
+ it('renders pinned icon', () => {
+ createComponent({
+ file: {
+ ...file(),
+ pinned: true,
+ },
+ level: 0,
+ });
+
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('thumbtack');
+ });
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/daterange_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/daterange_token_spec.js
new file mode 100644
index 00000000000..ef0e3dbbb8e
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/daterange_token_spec.js
@@ -0,0 +1,170 @@
+import {
+ GlDaterangePicker,
+ GlFilteredSearchSuggestion,
+ GlFilteredSearchSuggestionList,
+ GlFilteredSearchToken,
+} from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import DaterangeToken from '~/vue_shared/components/filtered_search_bar/tokens/daterange_token.vue';
+import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
+
+const CUSTOM_DATE = 'custom-date';
+
+describe('DaterangeToken', () => {
+ let wrapper;
+
+ const defaultProps = {
+ active: true,
+ value: {
+ data: '',
+ },
+ config: {
+ operators: OPERATORS_IS,
+ options: [
+ {
+ value: 'last_week',
+ title: 'Last week',
+ },
+ {
+ value: 'last_month',
+ title: 'Last month',
+ },
+ ],
+ },
+ };
+
+ function createComponent(props = {}) {
+ return mountExtended(DaterangeToken, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ Portal: true,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: () => 'custom-class',
+ termsAsTokens: () => false,
+ },
+ });
+ }
+
+ const findGlFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
+ const findDateRangePicker = () => wrapper.findComponent(GlDaterangePicker);
+ const findAllSuggestions = () => wrapper.findAllComponents(GlFilteredSearchSuggestion);
+ const selectSuggestion = (suggestion) =>
+ wrapper.findComponent(GlFilteredSearchSuggestionList).vm.$emit('suggestion', suggestion);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('renders a filtered search token', () => {
+ expect(findGlFilteredSearchToken().exists()).toBe(true);
+ });
+
+ it('remove the options from the token config', () => {
+ expect(findGlFilteredSearchToken().props('config').options).toBeUndefined();
+ });
+
+ it('does not set the token as view-only', () => {
+ expect(findGlFilteredSearchToken().props('viewOnly')).toBe(false);
+ });
+
+ it('does not show the date picker by default', () => {
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+
+ it('does not re-activate the token', async () => {
+ await wrapper.setProps({ active: false });
+ expect(findGlFilteredSearchToken().props('active')).toBe(false);
+ });
+
+ it('does not override the value', async () => {
+ await wrapper.setProps({ value: { data: 'value' } });
+ expect(findGlFilteredSearchToken().props('value')).toEqual({ data: 'value' });
+ });
+
+ it('renders a list of suggestions as specified by the config', () => {
+ const suggestions = findAllSuggestions();
+ expect(suggestions.exists()).toBe(true);
+ expect(suggestions).toHaveLength(defaultProps.config.options.length + 1);
+ [...defaultProps.config.options, { value: CUSTOM_DATE, title: 'Custom' }].forEach(
+ (option, i) => {
+ expect(suggestions.at(i).props('value')).toBe(option.value);
+ expect(suggestions.at(i).text()).toBe(option.title);
+ },
+ );
+ });
+
+ it('sets the dataSegmentInputAttributes', () => {
+ expect(findGlFilteredSearchToken().props('dataSegmentInputAttributes')).toEqual({
+ id: 'time_range_data_segment_input',
+ });
+ });
+
+ describe('when a default option is selected', () => {
+ const option = defaultProps.config.options[0].value;
+ beforeEach(async () => {
+ await selectSuggestion(option);
+ });
+ it('does not show the date picker if default option is selected', () => {
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+
+ it('sets the value', () => {
+ expect(findGlFilteredSearchToken().emitted().select).toEqual([[option]]);
+ expect(findGlFilteredSearchToken().emitted().complete).toEqual([[option]]);
+ });
+ });
+
+ describe('when custom-date option is selected', () => {
+ beforeEach(async () => {
+ await selectSuggestion(CUSTOM_DATE);
+ });
+
+ it('sets the token as view-only', () => {
+ expect(findGlFilteredSearchToken().props('viewOnly')).toBe(true);
+ });
+
+ it('shows the date picker', () => {
+ expect(findDateRangePicker().exists()).toBe(true);
+ const today = new Date();
+ expect(findDateRangePicker().props('defaultStartDate')).toEqual(today);
+ expect(findDateRangePicker().props('startOpened')).toBe(true);
+ });
+
+ it('re-activate the token while the date picker is open', async () => {
+ await wrapper.setProps({ active: false });
+ expect(findGlFilteredSearchToken().props('active')).toBe(true);
+ });
+
+ it('overrides the value', async () => {
+ await wrapper.setProps({ value: { data: 'value' } });
+ expect(findGlFilteredSearchToken().props('value')).toEqual({ data: '' });
+ });
+
+ it('sets the dataSegmentInputAttributes', () => {
+ expect(findGlFilteredSearchToken().props('dataSegmentInputAttributes')).toEqual({
+ id: 'time_range_data_segment_input',
+ placeholder: 'YYYY-MM-DD - YYYY-MM-DD',
+ style: 'padding-left: 23px;',
+ });
+ });
+
+ it('sets the date range and hides the picker upon selection', async () => {
+ await findDateRangePicker().vm.$emit('input', {
+ startDate: new Date('October 13, 2014 11:13:00'),
+ endDate: new Date('October 13, 2014 11:13:00'),
+ });
+ expect(findGlFilteredSearchToken().emitted().complete).toEqual([
+ [CUSTOM_DATE],
+ [`"2014-10-13 - 2014-10-13"`],
+ ]);
+ expect(findGlFilteredSearchToken().emitted().select).toEqual([
+ [CUSTOM_DATE],
+ [`"2014-10-13 - 2014-10-13"`],
+ ]);
+ expect(findDateRangePicker().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/gl_countdown_spec.js b/spec/frontend/vue_shared/components/gl_countdown_spec.js
index 38d54eff872..a755f35332f 100644
--- a/spec/frontend/vue_shared/components/gl_countdown_spec.js
+++ b/spec/frontend/vue_shared/components/gl_countdown_spec.js
@@ -44,6 +44,10 @@ describe('GlCountdown', () => {
it('displays 00:00:00', () => {
expect(wrapper.text()).toContain('00:00:00');
});
+
+ it('emits `timer-expired` event', () => {
+ expect(wrapper.emitted('timer-expired')).toStrictEqual([[]]);
+ });
});
describe('when an invalid date is passed', () => {
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
index cba9f78790d..f0b33284125 100644
--- a/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_item_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatarLabeled, GlIcon } from '@gitlab/ui';
+import { GlAvatarLabeled, GlIcon, GlBadge } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import GroupsListItem from '~/vue_shared/components/groups_list/groups_list_item.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -7,7 +7,6 @@ import {
VISIBILITY_LEVEL_INTERNAL_STRING,
GROUP_VISIBILITY_TYPE,
} from '~/visibility_level/constants';
-import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.vue';
import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
import ListActions from '~/vue_shared/components/list_actions/list_actions.vue';
import { ACTION_EDIT, ACTION_DELETE } from '~/vue_shared/components/list_actions/constants';
@@ -112,7 +111,7 @@ describe('GroupsListItem', () => {
it('renders access role badge', () => {
createComponent();
- expect(findAvatarLabeled().findComponent(UserAccessRoleBadge).text()).toBe(
+ expect(findAvatarLabeled().findComponent(GlBadge).text()).toBe(
ACCESS_LEVEL_LABELS[group.accessLevel.integerValue],
);
});
diff --git a/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
index ec6a1dc9576..072b27b4807 100644
--- a/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
+++ b/spec/frontend/vue_shared/components/groups_list/groups_list_spec.js
@@ -8,6 +8,7 @@ describe('GroupsList', () => {
const defaultPropsData = {
groups,
+ listItemClass: 'gl-px-5',
};
const createComponent = () => {
@@ -23,6 +24,9 @@ describe('GroupsList', () => {
const expectedProps = groupsListItemWrappers.map((groupsListItemWrapper) =>
groupsListItemWrapper.props(),
);
+ const expectedClasses = groupsListItemWrappers.map((groupsListItemWrapper) =>
+ groupsListItemWrapper.classes(),
+ );
expect(expectedProps).toEqual(
defaultPropsData.groups.map((group) => ({
@@ -30,6 +34,9 @@ describe('GroupsList', () => {
showGroupIcon: false,
})),
);
+ expect(expectedClasses).toEqual(
+ defaultPropsData.groups.map(() => [defaultPropsData.listItemClass]),
+ );
});
describe('when `GroupsListItem` emits `delete` event', () => {
diff --git a/spec/frontend/vue_shared/components/help_page_link/help_page_link_spec.js b/spec/frontend/vue_shared/components/help_page_link/help_page_link_spec.js
new file mode 100644
index 00000000000..5c17558b9cf
--- /dev/null
+++ b/spec/frontend/vue_shared/components/help_page_link/help_page_link_spec.js
@@ -0,0 +1,51 @@
+import { shallowMount, Wrapper } from '@vue/test-utils'; // eslint-disable-line no-unused-vars
+import { GlLink } from '@gitlab/ui';
+import HelpPageLink from '~/vue_shared/components/help_page_link/help_page_link.vue';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+/** @type { Wrapper } */
+let wrapper;
+
+const createComponent = (props = {}, slots = {}) => {
+ wrapper = shallowMount(HelpPageLink, {
+ propsData: {
+ ...props,
+ },
+ slots,
+ stubs: {
+ GlLink: true,
+ },
+ });
+};
+
+const findGlLink = () => wrapper.findComponent(GlLink);
+
+describe('HelpPageLink', () => {
+ it('renders a link', () => {
+ const href = 'user/usage_quotas';
+ createComponent({ href });
+
+ const link = findGlLink();
+ const expectedHref = helpPagePath(href, { anchor: null });
+ expect(link.attributes().href).toBe(expectedHref);
+ });
+
+ it('adds the anchor', () => {
+ const href = 'user/usage_quotas';
+ const anchor = 'namespace-storage-limit';
+ createComponent({ href, anchor });
+
+ const link = findGlLink();
+ const expectedHref = helpPagePath(href, { anchor });
+ expect(link.attributes().href).toBe(expectedHref);
+ });
+
+ it('renders slot content', () => {
+ const href = 'user/usage_quotas';
+ const slotContent = 'slot content';
+ createComponent({ href }, { default: slotContent });
+
+ const link = findGlLink();
+ expect(link.text()).toBe(slotContent);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
index 11c57fc5768..01122fe1103 100644
--- a/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/comment_templates_dropdown_spec.js
@@ -98,7 +98,7 @@ describe('Comment templates dropdown', () => {
await selectSavedReply();
expect(trackingSpy).toHaveBeenCalledWith(
- expect.any(String),
+ undefined,
TRACKING_SAVED_REPLIES_USE,
expect.any(Object),
);
@@ -111,7 +111,7 @@ describe('Comment templates dropdown', () => {
await selectSavedReply();
expect(trackingSpy).toHaveBeenCalledWith(
- expect.any(String),
+ undefined,
TRACKING_SAVED_REPLIES_USE_IN_MR,
expect.any(Object),
);
@@ -137,7 +137,7 @@ describe('Comment templates dropdown', () => {
await selectSavedReply();
expect(trackingSpy).toHaveBeenCalledWith(
- expect.any(String),
+ undefined,
TRACKING_SAVED_REPLIES_USE_IN_OTHER,
expect.any(Object),
);
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
index edb11bd581b..3b8422d8351 100644
--- a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -142,23 +142,6 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
);
});
- describe('if gitlab is installed under a relative url', () => {
- beforeEach(() => {
- window.gon = { relative_url_root: '/gitlab' };
- });
-
- it('passes render_quick_actions param to renderMarkdownPath if quick actions are enabled', async () => {
- buildWrapper({ propsData: { supportsQuickActions: true } });
-
- await enableContentEditor();
-
- expect(mock.history.post).toHaveLength(1);
- expect(mock.history.post[0].url).toBe(
- `${window.location.origin}/gitlab/api/markdown?render_quick_actions=true`,
- );
- });
- });
-
it('does not pass render_quick_actions param to renderMarkdownPath if quick actions are disabled', async () => {
buildWrapper({ propsData: { supportsQuickActions: false } });
diff --git a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
index 7cf560745b6..a5a5a43effe 100644
--- a/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/projects_list/projects_list_item_spec.js
@@ -12,7 +12,6 @@ import {
VISIBILITY_LEVEL_PRIVATE_STRING,
PROJECT_VISIBILITY_TYPE,
} from '~/visibility_level/constants';
-import UserAccessRoleBadge from '~/vue_shared/components/user_access_role_badge.vue';
import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
import { FEATURABLE_DISABLED, FEATURABLE_ENABLED } from '~/featurable/constants';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@@ -92,7 +91,7 @@ describe('ProjectsListItem', () => {
it('renders access role badge', () => {
createComponent();
- expect(findAvatarLabeled().findComponent(UserAccessRoleBadge).text()).toBe(
+ expect(findAvatarLabeled().findComponent(GlBadge).text()).toBe(
ACCESS_LEVEL_LABELS[project.permissions.projectAccess.accessLevel],
);
});
diff --git a/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js b/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
index fb195dfe08e..6530157811c 100644
--- a/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
+++ b/spec/frontend/vue_shared/components/projects_list/projects_list_spec.js
@@ -9,6 +9,7 @@ describe('ProjectsList', () => {
const defaultPropsData = {
projects: convertObjectPropsToCamelCase(projects, { deep: true }),
+ listItemClass: 'gl-px-5',
};
const createComponent = () => {
@@ -24,6 +25,9 @@ describe('ProjectsList', () => {
const expectedProps = projectsListItemWrappers.map((projectsListItemWrapper) =>
projectsListItemWrapper.props(),
);
+ const expectedClasses = projectsListItemWrappers.map((projectsListItemWrapper) =>
+ projectsListItemWrapper.classes(),
+ );
expect(expectedProps).toEqual(
defaultPropsData.projects.map((project) => ({
@@ -31,6 +35,9 @@ describe('ProjectsList', () => {
showProjectIcon: false,
})),
);
+ expect(expectedClasses).toEqual(
+ defaultPropsData.projects.map(() => [defaultPropsData.listItemClass]),
+ );
});
describe('when `ProjectListItem` emits `delete` event', () => {
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
deleted file mode 100644
index 260eddbb37d..00000000000
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
+++ /dev/null
@@ -1,33 +0,0 @@
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue';
-import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
-
-describe('RunnerInstructions component', () => {
- let wrapper;
-
- const findModalButton = () => wrapper.findByTestId('show-modal-button');
- const findModal = () => wrapper.findComponent(RunnerInstructionsModal);
-
- const createComponent = () => {
- wrapper = shallowMountExtended(RunnerInstructions, {
- directives: {
- GlModal: createMockDirective('gl-tooltip'),
- },
- });
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- it('should show the "Show runner installation instructions" button', () => {
- expect(findModalButton().text()).toBe('Show runner installation instructions');
- });
-
- it('should render the modal', () => {
- const modalId = getBinding(findModal().element, 'gl-modal');
-
- expect(findModalButton().attributes('modal-id')).toBe(modalId);
- });
-});
diff --git a/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js b/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
index 623a8739907..a3bf3ca23e3 100644
--- a/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
+++ b/spec/frontend/vue_shared/components/segmented_control_button_group_spec.js
@@ -122,6 +122,7 @@ describe('~/vue_shared/components/segmented_control_button_group.vue', () => {
[[{ value: '1' }]],
[[{ value: 1, disabled: true }]],
[[{ value: true, disabled: false }]],
+ [[{ value: true, props: { 'data-testid': 'test' } }]],
])('with options=%j, passes validation', (options) => {
createComponent({ options });
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
index 86dc9afaacc..745886161ce 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -42,6 +42,8 @@ describe('Source Viewer component', () => {
let wrapper;
let fakeApollo;
const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
+ const projectPath = 'test';
+ const currentRef = 'main';
const hash = '#L142';
const blameDataQueryHandlerSuccess = jest.fn().mockResolvedValue(BLAME_DATA_QUERY_RESPONSE_MOCK);
@@ -57,8 +59,8 @@ describe('Source Viewer component', () => {
propsData: {
blob: { ...blob, ...BLOB_DATA_MOCK },
chunks: CHUNKS_MOCK,
- projectPath: 'test',
- currentRef: 'main',
+ projectPath,
+ currentRef,
showBlame,
},
});
@@ -116,6 +118,18 @@ describe('Source Viewer component', () => {
expect(findBlameComponents().at(0).props()).toMatchObject({ blameInfo });
});
+ it('calls the blame data query', async () => {
+ await triggerChunkAppear();
+
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
+ expect.objectContaining({
+ filePath: BLOB_DATA_MOCK.path,
+ fullPath: projectPath,
+ ref: currentRef,
+ }),
+ );
+ });
+
it('calls the query only once per chunk', async () => {
// We trigger the `appear` event multiple times here in order to simulate the user scrolling past the chunk more than once.
// In this scenario we only want to query the backend once.
diff --git a/spec/frontend/vue_shared/components/upload_dropzone/avatar_upload_dropzone_spec.js b/spec/frontend/vue_shared/components/upload_dropzone/avatar_upload_dropzone_spec.js
new file mode 100644
index 00000000000..6313bf588a0
--- /dev/null
+++ b/spec/frontend/vue_shared/components/upload_dropzone/avatar_upload_dropzone_spec.js
@@ -0,0 +1,116 @@
+import { GlAvatar, GlButton, GlTruncate } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import AvatarUploadDropzone from '~/vue_shared/components/upload_dropzone/avatar_upload_dropzone.vue';
+import UploadDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue';
+import { AVATAR_SHAPE_OPTION_RECT } from '~/vue_shared/constants';
+
+describe('AvatarUploadDropzone', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ entity: { id: 1, name: 'Foo' },
+ value: null,
+ label: 'Avatar',
+ };
+
+ const file = new File(['foo'], 'foo.jpg', {
+ type: 'text/plain',
+ });
+ const file2 = new File(['bar'], 'bar.jpg', {
+ type: 'text/plain',
+ });
+ const blob = 'blob:http://127.0.0.1:3000/0046cf8c-ea21-4720-91ef-2e354d570c75';
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMountExtended(AvatarUploadDropzone, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ const findUploadDropzone = () => wrapper.findComponent(UploadDropzone);
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ beforeEach(() => {
+ window.URL.createObjectURL = jest.fn().mockImplementation(() => blob);
+ window.URL.revokeObjectURL = jest.fn();
+ });
+
+ it('renders `GlAvatar` with correct props', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(GlAvatar).props()).toMatchObject({
+ entityId: defaultPropsData.entity.id,
+ entityName: defaultPropsData.entity.name,
+ shape: AVATAR_SHAPE_OPTION_RECT,
+ size: 96,
+ src: null,
+ });
+ });
+
+ it('renders label', () => {
+ createComponent();
+
+ expect(wrapper.findByText(defaultPropsData.label).exists()).toBe(true);
+ });
+
+ describe('when `value` prop is updated', () => {
+ beforeEach(() => {
+ createComponent();
+
+ // setProps is justified here because we are testing the component's
+ // reactive behavior which constitutes an exception
+ // See https://docs.gitlab.com/ee/development/fe_guide/style/vue.html#setting-component-state
+ wrapper.setProps({ value: file });
+ });
+
+ it('updates `GlAvatar` `src` prop', () => {
+ expect(wrapper.findComponent(GlAvatar).props('src')).toBe(blob);
+ });
+
+ it('renders remove button', () => {
+ expect(findButton().exists()).toBe(true);
+ });
+
+ it('renders truncated file name', () => {
+ expect(wrapper.findComponent(GlTruncate).props('text')).toBe('foo.jpg');
+ });
+
+ it('does not render upload dropzone', () => {
+ expect(findUploadDropzone().exists()).toBe(false);
+ });
+
+ describe('when `value` prop is updated a second time', () => {
+ beforeEach(() => {
+ wrapper.setProps({ value: file2 });
+ });
+
+ it('revokes the object URL of the previous avatar', () => {
+ expect(window.URL.revokeObjectURL).toHaveBeenCalledWith(blob);
+ });
+ });
+
+ describe('when avatar is removed', () => {
+ beforeEach(() => {
+ findButton().vm.$emit('click');
+ });
+
+ it('emits `input` event with `null` payload', () => {
+ expect(wrapper.emitted('input')).toEqual([[null]]);
+ });
+ });
+ });
+
+ describe('when `UploadDropzone` emits `change` event', () => {
+ beforeEach(() => {
+ createComponent();
+ findUploadDropzone().vm.$emit('change', file);
+ });
+
+ it('emits `input` event', () => {
+ expect(wrapper.emitted('input')).toEqual([[file]]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 119b892392f..e1b79ad7b14 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -3,6 +3,7 @@ import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import searchUsersQuery from '~/graphql_shared/queries/project_autocomplete_users.query.graphql';
@@ -39,6 +40,8 @@ const waitForSearch = async () => {
await waitForPromises();
};
+const focusInput = jest.fn();
+
Vue.use(VueApollo);
describe('User select dropdown', () => {
@@ -100,6 +103,11 @@ describe('User select dropdown', () => {
hide: hideDropdownMock,
},
},
+ GlSearchBoxByType: stubComponent(GlSearchBoxByType, {
+ methods: {
+ focusInput,
+ },
+ }),
},
});
};
@@ -409,6 +417,43 @@ describe('User select dropdown', () => {
expect(findUnselectedParticipants()).toHaveLength(0);
expect(findEmptySearchResults().exists()).toBe(true);
});
+
+ it('clears search term and focuses search field after selecting a user', async () => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue(searchAutocompleteQueryResponse),
+ });
+ await waitForPromises();
+
+ findSearchField().vm.$emit('input', 'roo');
+ await waitForSearch();
+
+ findUnselectedParticipants().at(0).trigger('click');
+ await nextTick();
+
+ expect(findSearchField().props('value')).toBe('');
+ expect(focusInput).toHaveBeenCalled();
+ });
+
+ it('clears search term and focuses search field after unselecting a user', async () => {
+ createComponent({
+ props: {
+ value: [searchAutocompleteQueryResponse.data.workspace.users[0]],
+ },
+ searchQueryHandler: jest.fn().mockResolvedValue(searchAutocompleteQueryResponse),
+ });
+ await waitForPromises();
+
+ expect(findSelectedParticipants()).toHaveLength(1);
+
+ findSearchField().vm.$emit('input', 'roo');
+ await waitForSearch();
+
+ findSelectedParticipants().at(0).trigger('click');
+ await nextTick();
+
+ expect(findSearchField().props('value')).toBe('');
+ expect(focusInput).toHaveBeenCalled();
+ });
});
describe('when on merge request sidebar', () => {
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 98a87ddbcce..e898b3977d8 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
+import { GlBadge, GlLink, GlLabel, GlIcon, GlFormCheckbox, GlSprintf } from '@gitlab/ui';
import { nextTick } from 'vue';
import { useFakeDate } from 'helpers/fake_date';
import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper';
@@ -46,10 +46,11 @@ describe('IssuableItem', () => {
const mockAuthor = mockIssuable.author;
let wrapper;
- const findTimestampWrapper = () => wrapper.find('[data-testid="issuable-timestamp"]');
+ const findTimestampWrapper = () => wrapper.findByTestId('issuable-timestamp');
const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
const findIssuableTitleLink = () => wrapper.findComponentByTestId('issuable-title-link');
const findIssuableItemWrapper = () => wrapper.findByTestId('issuable-item-wrapper');
+ const findStatusEl = () => wrapper.findByTestId('issuable-status');
beforeEach(() => {
gon.gitlab_url = MOCK_GITLAB_URL;
@@ -290,7 +291,7 @@ describe('IssuableItem', () => {
await nextTick();
- const titleEl = wrapper.find('[data-testid="issuable-title"]');
+ const titleEl = wrapper.findByTestId('issuable-title');
expect(titleEl.exists()).toBe(true);
expect(titleEl.findComponent(GlLink).attributes('href')).toBe(expectedHref);
@@ -329,7 +330,7 @@ describe('IssuableItem', () => {
await nextTick();
expect(
- wrapper.find('[data-testid="issuable-title"]').findComponent(GlLink).attributes('target'),
+ wrapper.findByTestId('issuable-title').findComponent(GlLink).attributes('target'),
).toBe('_blank');
});
@@ -343,7 +344,7 @@ describe('IssuableItem', () => {
await nextTick();
- const confidentialEl = wrapper.find('[data-testid="issuable-title"]').findComponent(GlIcon);
+ const confidentialEl = wrapper.findByTestId('issuable-title').findComponent(GlIcon);
expect(confidentialEl.exists()).toBe(true);
expect(confidentialEl.props('name')).toBe('eye-slash');
@@ -368,7 +369,7 @@ describe('IssuableItem', () => {
it('renders task status', () => {
wrapper = createComponent();
- const taskStatus = wrapper.find('[data-testid="task-status"]');
+ const taskStatus = wrapper.findByTestId('task-status');
const expected = `${mockIssuable.taskCompletionStatus.completedCount} of ${mockIssuable.taskCompletionStatus.count} checklist items completed`;
expect(taskStatus.text()).toBe(expected);
@@ -389,7 +390,7 @@ describe('IssuableItem', () => {
it('renders issuable reference', () => {
wrapper = createComponent();
- const referenceEl = wrapper.find('[data-testid="issuable-reference"]');
+ const referenceEl = wrapper.findByTestId('issuable-reference');
expect(referenceEl.exists()).toBe(true);
expect(referenceEl.text()).toBe(`#${mockIssuable.iid}`);
@@ -414,7 +415,7 @@ describe('IssuableItem', () => {
it('renders issuable createdAt info', () => {
wrapper = createComponent();
- const createdAtEl = wrapper.find('[data-testid="issuable-created-at"]');
+ const createdAtEl = wrapper.findByTestId('issuable-created-at');
expect(createdAtEl.exists()).toBe(true);
expect(createdAtEl.attributes('title')).toBe(
@@ -426,7 +427,7 @@ describe('IssuableItem', () => {
it('renders issuable author info', () => {
wrapper = createComponent();
- const authorEl = wrapper.find('[data-testid="issuable-author"]');
+ const authorEl = wrapper.findByTestId('issuable-author');
expect(authorEl.exists()).toBe(true);
expect(authorEl.attributes()).toMatchObject({
@@ -497,20 +498,52 @@ describe('IssuableItem', () => {
});
});
- it('renders issuable status via slot', () => {
- wrapper = createComponent({
- issuableSymbol: '#',
- issuable: mockIssuable,
- slots: {
- status: `
- <b class="js-status">${mockIssuable.state}</b>
- `,
- },
+ describe('status', () => {
+ it('renders issuable status via slot', () => {
+ wrapper = createComponent({
+ issuableSymbol: '#',
+ issuable: mockIssuable,
+ slots: {
+ status: `
+ <b data-testid="js-status">${mockIssuable.state}</b>
+ `,
+ },
+ });
+ const statusEl = wrapper.findByTestId('js-status');
+
+ expect(statusEl.exists()).toBe(true);
+ expect(statusEl.text()).toBe(`${mockIssuable.state}`);
+ });
+
+ it('renders issuable status as badge', () => {
+ const closedMockIssuable = { ...mockIssuable, state: 'closed' };
+ wrapper = createComponent({
+ issuableSymbol: '#',
+ issuable: closedMockIssuable,
+ slots: {
+ status: closedMockIssuable.state,
+ },
+ });
+ const statusEl = findStatusEl();
+
+ expect(statusEl.findComponent(GlBadge).exists()).toBe(true);
+ expect(statusEl.text()).toBe(`${closedMockIssuable.state}`);
});
- const statusEl = wrapper.find('.js-status');
- expect(statusEl.exists()).toBe(true);
- expect(statusEl.text()).toBe(`${mockIssuable.state}`);
+ it('renders issuable status without badge if open', () => {
+ wrapper = createComponent({
+ issuableSymbol: '#',
+ issuable: mockIssuable,
+ slots: {
+ status: mockIssuable.state,
+ },
+ });
+
+ const statusEl = findStatusEl();
+
+ expect(statusEl.findComponent(GlBadge).exists()).toBe(false);
+ expect(statusEl.text()).toBe(`${mockIssuable.state}`);
+ });
});
it('renders discussions count', () => {
@@ -543,7 +576,7 @@ describe('IssuableItem', () => {
it('renders issuable updatedAt info', () => {
wrapper = createComponent();
- const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
+ const timestampEl = wrapper.findByTestId('issuable-timestamp');
expect(timestampEl.attributes('title')).toBe(
localeDateFormat.asDateTimeFull.format(mockIssuable.updatedAt),
@@ -566,7 +599,7 @@ describe('IssuableItem', () => {
issuable: { ...mockIssuable, closedAt, state: 'closed' },
});
- const timestampEl = wrapper.find('[data-testid="issuable-timestamp"]');
+ const timestampEl = wrapper.findByTestId('issuable-timestamp');
expect(timestampEl.attributes('title')).toBe(
localeDateFormat.asDateTimeFull.format(closedAt),
diff --git a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
index fe89c525fea..cbde3c4a065 100644
--- a/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
+++ b/spec/frontend/work_items/components/notes/work_item_comment_form_spec.js
@@ -208,6 +208,20 @@ describe('Work item comment form component', () => {
['Something went wrong while updating the task. Please try again.'],
]);
});
+
+ it('emits `submitForm` event on closing of work item', async () => {
+ createComponent({
+ isNewDiscussion: true,
+ });
+
+ findWorkItemToggleStateButton().vm.$emit('submit-comment');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('submitForm')).toEqual([
+ [{ commentText: draftComment, isNoteInternal: false }],
+ ]);
+ });
});
describe('internal note', () => {
@@ -239,6 +253,17 @@ describe('Work item comment form component', () => {
expect(findConfirmButton().text()).toBe(WorkItemCommentForm.i18n.addInternalNote);
});
+
+ it('emits `submitForm` event on closing of work item', async () => {
+ findInternalNoteCheckbox().vm.$emit('input', true);
+ findWorkItemToggleStateButton().vm.$emit('submit-comment');
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('submitForm')).toEqual([
+ [{ commentText: draftComment, isNoteInternal: true }],
+ ]);
+ });
});
});
});
diff --git a/spec/frontend/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit_spec.js b/spec/frontend/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit_spec.js
new file mode 100644
index 00000000000..171493e87f8
--- /dev/null
+++ b/spec/frontend/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit_spec.js
@@ -0,0 +1,161 @@
+import { GlForm, GlCollapsibleListbox, GlLoadingIcon } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
+import WorkItemSidebarDropdownWidgetWithEdit from '~/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit.vue';
+
+describe('WorkItemSidebarDropdownWidgetWithEdit component', () => {
+ let wrapper;
+
+ const findHeader = () => wrapper.find('h3');
+ const findEditButton = () => wrapper.findByTestId('edit-button');
+ const findApplyButton = () => wrapper.findByTestId('apply-button');
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findLabel = () => wrapper.find('label');
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+
+ const createComponent = ({
+ itemValue = null,
+ canUpdate = true,
+ isEditing = false,
+ updateInProgress = false,
+ } = {}) => {
+ wrapper = mountExtended(WorkItemSidebarDropdownWidgetWithEdit, {
+ propsData: {
+ dropdownLabel: __('Iteration'),
+ dropdownName: 'iteration',
+ listItems: [],
+ itemValue,
+ canUpdate,
+ updateInProgress,
+ headerText: __('Select iteration'),
+ },
+ });
+
+ if (isEditing) {
+ findEditButton().vm.$emit('click');
+ }
+ };
+
+ describe('label', () => {
+ it('shows header when not editing', () => {
+ createComponent();
+
+ expect(findHeader().exists()).toBe(true);
+ expect(findHeader().classes('gl-sr-only')).toBe(false);
+ expect(findLabel().exists()).toBe(false);
+ });
+
+ it('shows label and hides header while editing', async () => {
+ createComponent();
+
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findLabel().exists()).toBe(true);
+ expect(findHeader().classes('gl-sr-only')).toBe(true);
+ });
+ });
+
+ describe('edit button', () => {
+ it('is not shown if user cannot edit', () => {
+ createComponent({ canUpdate: false });
+
+ expect(findEditButton().exists()).toBe(false);
+ });
+
+ it('is shown if user can edit', () => {
+ createComponent({ canUpdate: true });
+
+ expect(findEditButton().exists()).toBe(true);
+ });
+
+ it('triggers edit mode on click', async () => {
+ createComponent();
+
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findLabel().exists()).toBe(true);
+ expect(findForm().exists()).toBe(true);
+ });
+
+ it('is replaced by Apply button while editing', async () => {
+ createComponent();
+
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findEditButton().exists()).toBe(false);
+ expect(findApplyButton().exists()).toBe(true);
+ });
+ });
+
+ describe('loading icon', () => {
+ it('shows loading icon while update is in progress', async () => {
+ createComponent({ updateInProgress: true });
+
+ await nextTick();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('value', () => {
+ it('shows None when no item value is set', () => {
+ createComponent({ itemValue: null });
+
+ expect(wrapper.text()).toContain('None');
+ });
+ });
+
+ describe('form', () => {
+ it('is not shown while not editing', () => {
+ createComponent();
+
+ expect(findForm().exists()).toBe(false);
+ });
+
+ it('is shown while editing', async () => {
+ createComponent({ isEditing: true });
+ await nextTick();
+
+ expect(findForm().exists()).toBe(true);
+ });
+ });
+
+ describe('Dropdown', () => {
+ it('is not shown while not editing', () => {
+ createComponent();
+
+ expect(findCollapsibleListbox().exists()).toBe(false);
+ });
+
+ it('renders the collapsible listbox with required props', async () => {
+ createComponent({ isEditing: true });
+
+ await nextTick();
+
+ expect(findCollapsibleListbox().exists()).toBe(true);
+ expect(findCollapsibleListbox().props()).toMatchObject({
+ items: [],
+ headerText: 'Select iteration',
+ category: 'primary',
+ loading: false,
+ isCheckCentered: true,
+ searchable: true,
+ searching: false,
+ infiniteScroll: false,
+ noResultsText: 'No matching results',
+ toggleText: 'None',
+ searchPlaceholder: 'Search',
+ resetButtonLabel: 'Clear',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/shared/work_item_token_input_spec.js b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
index 5726aaaa2d0..f9ba34b3bb6 100644
--- a/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
+++ b/spec/frontend/work_items/components/shared/work_item_token_input_spec.js
@@ -8,23 +8,78 @@ import WorkItemTokenInput from '~/work_items/components/shared/work_item_token_i
import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
import groupWorkItemsQuery from '~/work_items/graphql/group_work_items.query.graphql';
import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
-import {
- availableWorkItemsResponse,
- searchWorkItemsTextResponse,
- searchWorkItemsIidResponse,
- searchWorkItemsTextIidResponse,
-} from '../../mock_data';
+import workItemsByReferencesQuery from '~/work_items/graphql/work_items_by_references.query.graphql';
+import { searchWorkItemsResponse } from '../../mock_data';
Vue.use(VueApollo);
describe('WorkItemTokenInput', () => {
let wrapper;
- const availableWorkItemsResolver = jest.fn().mockResolvedValue(availableWorkItemsResponse);
- const groupSearchedWorkItemResolver = jest.fn().mockResolvedValue(searchWorkItemsTextResponse);
- const searchWorkItemTextResolver = jest.fn().mockResolvedValue(searchWorkItemsTextResponse);
- const searchWorkItemIidResolver = jest.fn().mockResolvedValue(searchWorkItemsIidResponse);
- const searchWorkItemTextIidResolver = jest.fn().mockResolvedValue(searchWorkItemsTextIidResponse);
+ const availableWorkItemsResolver = jest.fn().mockResolvedValue(
+ searchWorkItemsResponse({
+ workItems: [
+ {
+ id: 'gid://gitlab/WorkItem/458',
+ iid: '2',
+ title: 'Task 1',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/459',
+ iid: '3',
+ title: 'Task 2',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/460',
+ iid: '4',
+ title: 'Task 3',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ }),
+ );
+
+ const mockWorkItem = {
+ id: 'gid://gitlab/WorkItem/459',
+ iid: '3',
+ title: 'Task 2',
+ confidential: false,
+ __typename: 'WorkItem',
+ };
+ const groupSearchedWorkItemResolver = jest.fn().mockResolvedValue(
+ searchWorkItemsResponse({
+ workItems: [mockWorkItem],
+ }),
+ );
+ const searchWorkItemTextResolver = jest.fn().mockResolvedValue(
+ searchWorkItemsResponse({
+ workItems: [mockWorkItem],
+ }),
+ );
+ const mockworkItemReferenceQueryResponse = {
+ data: {
+ workItemsByReference: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/705',
+ iid: '111',
+ title: 'Objective linked items 104',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ ],
+ __typename: 'WorkItemConnection',
+ },
+ },
+ };
+ const workItemReferencesQueryResolver = jest
+ .fn()
+ .mockResolvedValue(mockworkItemReferenceQueryResponse);
const createComponent = async ({
workItemsToAdd = [],
@@ -38,6 +93,7 @@ describe('WorkItemTokenInput', () => {
apolloProvider: createMockApollo([
[projectWorkItemsQuery, workItemsResolver],
[groupWorkItemsQuery, groupSearchedWorkItemResolver],
+ [workItemsByReferencesQuery, workItemReferencesQueryResolver],
]),
provide: {
isGroup,
@@ -58,6 +114,7 @@ describe('WorkItemTokenInput', () => {
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
const findGlAlert = () => wrapper.findComponent(GlAlert);
+ const findNoMatchFoundMessage = () => wrapper.findByTestId('no-match-found-namespace-message');
it('searches for available work items on focus', async () => {
createComponent({ workItemsResolver: availableWorkItemsResolver });
@@ -68,42 +125,155 @@ describe('WorkItemTokenInput', () => {
fullPath: 'test-project-path',
searchTerm: '',
types: [WORK_ITEM_TYPE_ENUM_TASK],
- in: undefined,
iid: null,
- isNumber: false,
+ searchByIid: false,
+ searchByText: true,
});
expect(findTokenSelector().props('dropdownItems')).toHaveLength(3);
});
- it.each`
- inputType | input | resolver | searchTerm | iid | isNumber | length
- ${'iid'} | ${'101'} | ${searchWorkItemIidResolver} | ${'101'} | ${'101'} | ${true} | ${1}
- ${'text'} | ${'Task 2'} | ${searchWorkItemTextResolver} | ${'Task 2'} | ${null} | ${false} | ${1}
- ${'iid and text'} | ${'123'} | ${searchWorkItemTextIidResolver} | ${'123'} | ${'123'} | ${true} | ${2}
- `(
- 'searches by $inputType for available work items when typing in input',
- async ({ input, resolver, searchTerm, iid, isNumber, length }) => {
- createComponent({ workItemsResolver: resolver });
+ it('renders red border around token selector input when work item is not valid', () => {
+ createComponent({
+ areWorkItemsToAddValid: false,
+ });
+
+ expect(findTokenSelector().props('containerClass')).toBe('gl-inset-border-1-red-500!');
+ });
+
+ describe('when input data is provided', () => {
+ const fillWorkItemInput = (input) => {
findTokenSelector().vm.$emit('focus');
findTokenSelector().vm.$emit('text-input', input);
+ };
+
+ const mockWorkItemResponseItem1 = {
+ id: 'gid://gitlab/WorkItem/460',
+ iid: '101',
+ title: 'Task 3',
+ confidential: false,
+ __typename: 'WorkItem',
+ };
+ const mockWorkItemResponseItem2 = {
+ id: 'gid://gitlab/WorkItem/461',
+ iid: '3',
+ title: 'Task 123',
+ confidential: false,
+ __typename: 'WorkItem',
+ };
+ const mockWorkItemResponseItem3 = {
+ id: 'gid://gitlab/WorkItem/462',
+ iid: '123',
+ title: 'Task 2',
+ confidential: false,
+ __typename: 'WorkItem',
+ };
+
+ const searchWorkItemIidResolver = jest.fn().mockResolvedValue(
+ searchWorkItemsResponse({
+ workItemsByIid: [mockWorkItemResponseItem1],
+ }),
+ );
+ const searchWorkItemTextIidResolver = jest.fn().mockResolvedValue(
+ searchWorkItemsResponse({
+ workItems: [mockWorkItemResponseItem2],
+ workItemsByIid: [mockWorkItemResponseItem3],
+ }),
+ );
+
+ const emptyWorkItemResolver = jest.fn().mockResolvedValue(searchWorkItemsResponse());
+
+ const validIid = mockWorkItemResponseItem1.iid;
+ const validWildCardIid = `#${mockWorkItemResponseItem1.iid}`;
+ const searchedText = mockWorkItem.title;
+ const searchedIidText = mockWorkItemResponseItem3.iid;
+ const nonExistentIid = '111';
+ const nonExistentWorkItem = 'Key result';
+ const validWorkItemUrl = 'http://localhost/gitlab-org/test-project-path/-/work_items/111';
+ const validWorkItemReference = 'gitlab-org/test-project-path#111';
+ const invalidWorkItemUrl = 'invalid-url/gitlab-org/test-project-path/-/work_items/101';
+
+ it.each`
+ inputType | input | resolver | searchTerm | iid | searchByText | searchByIid | length
+ ${'iid'} | ${validIid} | ${searchWorkItemIidResolver} | ${validIid} | ${validIid} | ${true} | ${true} | ${1}
+ ${'text'} | ${searchedText} | ${searchWorkItemTextResolver} | ${searchedText} | ${null} | ${true} | ${false} | ${1}
+ ${'iid and text'} | ${searchedIidText} | ${searchWorkItemTextIidResolver} | ${searchedIidText} | ${searchedIidText} | ${true} | ${true} | ${2}
+ `(
+ 'lists work items when $inputType is valid',
+ async ({ input, resolver, searchTerm, iid, searchByIid, searchByText, length }) => {
+ createComponent({ workItemsResolver: resolver });
+
+ fillWorkItemInput(input);
+
+ await waitForPromises();
+
+ expect(resolver).toHaveBeenCalledWith({
+ fullPath: 'test-project-path',
+ types: [WORK_ITEM_TYPE_ENUM_TASK],
+ searchTerm,
+ in: 'TITLE',
+ iid,
+ searchByIid,
+ searchByText,
+ });
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(length);
+ },
+ );
+
+ it.each`
+ inputType | input | searchTerm | iid | searchByText | searchByIid
+ ${'iid'} | ${nonExistentIid} | ${nonExistentIid} | ${nonExistentIid} | ${true} | ${true}
+ ${'text'} | ${nonExistentWorkItem} | ${nonExistentWorkItem} | ${null} | ${true} | ${false}
+ ${'url'} | ${invalidWorkItemUrl} | ${invalidWorkItemUrl} | ${null} | ${true} | ${false}
+ `(
+ 'list is empty when $inputType is invalid',
+ async ({ input, searchTerm, iid, searchByIid, searchByText }) => {
+ createComponent({ workItemsResolver: emptyWorkItemResolver });
+
+ fillWorkItemInput(input);
+
+ await waitForPromises();
+
+ expect(emptyWorkItemResolver).toHaveBeenCalledWith({
+ fullPath: 'test-project-path',
+ types: [WORK_ITEM_TYPE_ENUM_TASK],
+ searchTerm,
+ in: 'TITLE',
+ iid,
+ searchByIid,
+ searchByText,
+ });
+ expect(findTokenSelector().props('dropdownItems')).toHaveLength(0);
+ },
+ );
+
+ it.each`
+ inputType | input | refs | length
+ ${'iid with wildcard'} | ${validWildCardIid} | ${[validWildCardIid]} | ${1}
+ ${'url'} | ${validWorkItemUrl} | ${[validWorkItemUrl]} | ${1}
+ ${'reference'} | ${validWorkItemReference} | ${[validWorkItemReference]} | ${1}
+ `('lists work items when valid $inputType is pasted', async ({ input, refs, length }) => {
+ createComponent({ workItemsResolver: workItemReferencesQueryResolver });
+
+ fillWorkItemInput(input);
+
await waitForPromises();
- expect(resolver).toHaveBeenCalledWith({
- searchTerm,
- in: 'TITLE',
- iid,
- isNumber,
+ expect(workItemReferencesQueryResolver).toHaveBeenCalledWith({
+ contextNamespacePath: 'test-project-path',
+ refs,
});
expect(findTokenSelector().props('dropdownItems')).toHaveLength(length);
- },
- );
-
- it('renders red border around token selector input when work item is not valid', () => {
- createComponent({
- areWorkItemsToAddValid: false,
});
- expect(findTokenSelector().props('containerClass')).toBe('gl-inset-border-1-red-500!');
+ it('shows proper message if provided with cross project URL', async () => {
+ createComponent({ workItemsResolver: emptyWorkItemResolver });
+
+ fillWorkItemInput('http://localhost/gitlab-org/cross-project-path/-/work_items/101');
+
+ await waitForPromises();
+
+ expect(findNoMatchFoundMessage().text()).toBe('No matches found');
+ });
});
describe('when project context', () => {
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 196e19791df..6c0042bdad7 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -103,6 +103,9 @@ describe('WorkItemAssignees component', () => {
},
attachTo: document.body,
apolloProvider,
+ stubs: {
+ GlEmoji: { template: '<div/>' },
+ },
});
};
diff --git a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
index 48ec84ceb85..43f7027406f 100644
--- a/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
+++ b/spec/frontend/work_items/components/work_item_attributes_wrapper_spec.js
@@ -3,7 +3,8 @@ import { shallowMount } from '@vue/test-utils';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemDueDate from '~/work_items/components/work_item_due_date.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
-import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
+import WorkItemMilestoneInline from '~/work_items/components/work_item_milestone_inline.vue';
+import WorkItemMilestoneWithEdit from '~/work_items/components/work_item_milestone_with_edit.vue';
import WorkItemParentInline from '~/work_items/components/work_item_parent_inline.vue';
import WorkItemParent from '~/work_items/components/work_item_parent_with_edit.vue';
import waitForPromises from 'helpers/wait_for_promises';
@@ -24,7 +25,8 @@ describe('WorkItemAttributesWrapper component', () => {
const findWorkItemDueDate = () => wrapper.findComponent(WorkItemDueDate);
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
- const findWorkItemMilestone = () => wrapper.findComponent(WorkItemMilestone);
+ const findWorkItemMilestone = () => wrapper.findComponent(WorkItemMilestoneWithEdit);
+ const findWorkItemMilestoneInline = () => wrapper.findComponent(WorkItemMilestoneInline);
const findWorkItemParentInline = () => wrapper.findComponent(WorkItemParentInline);
const findWorkItemParent = () => wrapper.findComponent(WorkItemParent);
@@ -110,6 +112,26 @@ describe('WorkItemAttributesWrapper component', () => {
expect(findWorkItemMilestone().exists()).toBe(exists);
});
+
+ it.each`
+ description | milestoneWidgetInlinePresent | milestoneWidgetWithEditPresent | workItemsMvc2FlagEnabled
+ ${'renders WorkItemMilestone when workItemsMvc2 enabled'} | ${false} | ${true} | ${true}
+ ${'renders WorkItemMilestoneInline when workItemsMvc2 disabled'} | ${true} | ${false} | ${false}
+ `(
+ '$description',
+ async ({
+ milestoneWidgetInlinePresent,
+ milestoneWidgetWithEditPresent,
+ workItemsMvc2FlagEnabled,
+ }) => {
+ createComponent({ workItemsMvc2: workItemsMvc2FlagEnabled });
+
+ await waitForPromises();
+
+ expect(findWorkItemMilestone().exists()).toBe(milestoneWidgetWithEditPresent);
+ expect(findWorkItemMilestoneInline().exists()).toBe(milestoneWidgetInlinePresent);
+ },
+ );
});
describe('parent widget', () => {
diff --git a/spec/frontend/work_items/components/work_item_description_rendered_spec.js b/spec/frontend/work_items/components/work_item_description_rendered_spec.js
index 4f1d49ee2e5..c4c88c7643f 100644
--- a/spec/frontend/work_items/components/work_item_description_rendered_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_rendered_spec.js
@@ -20,11 +20,13 @@ describe('WorkItemDescription', () => {
const createComponent = ({
workItemDescription = defaultWorkItemDescription,
canEdit = false,
+ disableInlineEditing = false,
} = {}) => {
wrapper = shallowMount(WorkItemDescriptionRendered, {
propsData: {
workItemDescription,
canEdit,
+ disableInlineEditing,
},
});
};
@@ -81,8 +83,8 @@ describe('WorkItemDescription', () => {
});
describe('Edit button', () => {
- it('is not visible when canUpdate = false', async () => {
- await createComponent({
+ it('is not visible when canUpdate = false', () => {
+ createComponent({
canUpdate: false,
});
@@ -100,5 +102,14 @@ describe('WorkItemDescription', () => {
expect(wrapper.emitted('startEditing')).toEqual([[]]);
});
+
+ it('is not visible when `disableInlineEditing` is true and the user can edit', () => {
+ createComponent({
+ disableInlineEditing: true,
+ canEdit: true,
+ });
+
+ expect(findEditButton().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
index 1d25bb74986..3b137008b5b 100644
--- a/spec/frontend/work_items/components/work_item_description_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -56,6 +56,8 @@ describe('WorkItemDescription', () => {
isEditing = false,
isGroup = false,
workItemIid = '1',
+ disableInlineEditing = false,
+ editMode = false,
} = {}) => {
workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
groupWorkItemResponseHandler = jest
@@ -73,6 +75,8 @@ describe('WorkItemDescription', () => {
fullPath: 'test-project-path',
workItemId: id,
workItemIid,
+ disableInlineEditing,
+ editMode,
},
provide: {
isGroup,
@@ -283,4 +287,36 @@ describe('WorkItemDescription', () => {
expect(groupWorkItemResponseHandler).toHaveBeenCalled();
});
});
+
+ describe('when inline editing is disabled', () => {
+ describe('when edit mode is inactive', () => {
+ beforeEach(() => {
+ createComponent({ disableInlineEditing: true });
+ });
+
+ it('passes the correct props for work item rendered description', () => {
+ expect(findRenderedDescription().props('disableInlineEditing')).toBe(true);
+ });
+
+ it('does not show edit mode of markdown editor in default mode', () => {
+ expect(findMarkdownEditor().exists()).toBe(false);
+ });
+ });
+
+ describe('when edit mode is active', () => {
+ beforeEach(() => {
+ createComponent({ disableInlineEditing: true, editMode: true });
+ });
+
+ it('shows markdown editor in edit mode only when the correct props are passed', () => {
+ expect(findMarkdownEditor().exists()).toBe(true);
+ });
+
+ it('emits the `updateDraft` event when clicked on submit button in edit mode', () => {
+ const updatedDesc = 'updated desc with inline editing disabled';
+ findMarkdownEditor().vm.$emit('input', updatedDesc);
+ expect(wrapper.emitted('updateDraft')).toEqual([[updatedDesc]]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index d63bb94c3f0..45c8c66cebf 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -19,6 +19,7 @@ import WorkItemRelationships from '~/work_items/components/work_item_relationshi
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import WorkItemStickyHeader from '~/work_items/components/work_item_sticky_header.vue';
+import WorkItemTitleWithEdit from '~/work_items/components/work_item_title_with_edit.vue';
import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import WorkItemTodos from '~/work_items/components/work_item_todos.vue';
import { i18n } from '~/work_items/constants';
@@ -32,6 +33,7 @@ import {
mockParent,
workItemByIidResponseFactory,
objectiveType,
+ epicType,
mockWorkItemCommentNote,
mockBlockingLinkedItem,
} from '../mock_data';
@@ -81,6 +83,8 @@ describe('WorkItemDetail component', () => {
const findStickyHeader = () => wrapper.findComponent(WorkItemStickyHeader);
const findWorkItemTwoColumnViewContainer = () => wrapper.findByTestId('work-item-overview');
const findRightSidebar = () => wrapper.findByTestId('work-item-overview-right-sidebar');
+ const findEditButton = () => wrapper.findByTestId('work-item-edit-form-button');
+ const findWorkItemTitleWithEdit = () => wrapper.findComponent(WorkItemTitleWithEdit);
const createComponent = ({
isGroup = false,
@@ -426,9 +430,18 @@ describe('WorkItemDetail component', () => {
workItemType: objectiveType,
confidential: true,
});
- const handler = jest.fn().mockResolvedValue(objectiveWorkItem);
+ const objectiveHandler = jest.fn().mockResolvedValue(objectiveWorkItem);
- it('renders children tree when work item is an Objective', async () => {
+ const epicWorkItem = workItemByIidResponseFactory({
+ workItemType: epicType,
+ });
+ const epicHandler = jest.fn().mockResolvedValue(epicWorkItem);
+
+ it.each`
+ type | handler
+ ${'Objective'} | ${objectiveHandler}
+ ${'Epic'} | ${epicHandler}
+ `('renders children tree when work item type is $type', async ({ handler }) => {
createComponent({ handler });
await waitForPromises();
@@ -436,14 +449,14 @@ describe('WorkItemDetail component', () => {
});
it('renders a modal', async () => {
- createComponent({ handler });
+ createComponent({ handler: objectiveHandler });
await waitForPromises();
expect(findModal().exists()).toBe(true);
});
it('opens the modal with the child when `show-modal` is emitted', async () => {
- createComponent({ handler, workItemsMvc2Enabled: true });
+ createComponent({ handler: objectiveHandler, workItemsMvc2Enabled: true });
await waitForPromises();
const event = {
@@ -466,7 +479,7 @@ describe('WorkItemDetail component', () => {
beforeEach(async () => {
createComponent({
isModal: true,
- handler,
+ handler: objectiveHandler,
workItemsMvc2Enabled: true,
});
@@ -686,4 +699,65 @@ describe('WorkItemDetail component', () => {
});
});
});
+
+ describe('edit button for work item title and description', () => {
+ describe('when `workItemsMvc2Enabled` is false', () => {
+ beforeEach(async () => {
+ createComponent({ workItemsMvc2Enabled: false });
+ await waitForPromises();
+ });
+
+ it('does not show the edit button', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+
+ it('renders the work item title inline editable component', () => {
+ expect(findWorkItemTitle().exists()).toBe(true);
+ });
+
+ it('does not render the work item title with edit component', () => {
+ expect(findWorkItemTitleWithEdit().exists()).toBe(false);
+ });
+ });
+
+ describe('when `workItemsMvc2Enabled` is true', () => {
+ beforeEach(async () => {
+ createComponent({ workItemsMvc2Enabled: true });
+ await waitForPromises();
+ });
+
+ it('shows the edit button', () => {
+ expect(findEditButton().exists()).toBe(true);
+ });
+
+ it('does not render the work item title inline editable component', () => {
+ expect(findWorkItemTitle().exists()).toBe(false);
+ });
+
+ it('renders the work item title with edit component', () => {
+ expect(findWorkItemTitleWithEdit().exists()).toBe(true);
+ expect(findWorkItemTitleWithEdit().props('isEditing')).toBe(false);
+ });
+
+ it('work item description is not shown in edit mode by default', () => {
+ expect(findWorkItemDescription().props('editMode')).toBe(false);
+ });
+
+ describe('when edit is clicked', () => {
+ beforeEach(async () => {
+ findEditButton().vm.$emit('click');
+ await nextTick();
+ });
+
+ it('work item title component shows in edit mode', () => {
+ expect(findWorkItemTitleWithEdit().props('isEditing')).toBe(true);
+ });
+
+ it('work item description component shows in edit mode', () => {
+ expect(findWorkItemDescription().props('disableInlineEditing')).toBe(true);
+ expect(findWorkItemDescription().props('editMode')).toBe(true);
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index 0a9da17d284..ba09c7e9ce2 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -15,12 +15,14 @@ import {
I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_TOOLTIP,
} from '~/work_items/constants';
import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import groupWorkItemTypesQuery from '~/work_items/graphql/group_work_item_types.query.graphql';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import {
availableWorkItemsResponse,
projectWorkItemTypesQueryResponse,
+ groupWorkItemTypesQueryResponse,
createWorkItemMutationResponse,
updateWorkItemMutationResponse,
mockIterationWidgetResponse,
@@ -34,22 +36,27 @@ describe('WorkItemLinksForm', () => {
const updateMutationResolver = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const createMutationResolver = jest.fn().mockResolvedValue(createWorkItemMutationResponse);
const availableWorkItemsResolver = jest.fn().mockResolvedValue(availableWorkItemsResponse);
+ const projectWorkItemTypesResolver = jest
+ .fn()
+ .mockResolvedValue(projectWorkItemTypesQueryResponse);
+ const groupWorkItemTypesResolver = jest.fn().mockResolvedValue(groupWorkItemTypesQueryResponse);
const mockParentIteration = mockIterationWidgetResponse;
const createComponent = async ({
- typesResponse = projectWorkItemTypesQueryResponse,
parentConfidential = false,
hasIterationsFeature = false,
parentIteration = null,
formType = FORM_TYPES.create,
parentWorkItemType = WORK_ITEM_TYPE_VALUE_ISSUE,
childrenType = WORK_ITEM_TYPE_ENUM_TASK,
+ isGroup = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemLinksForm, {
apolloProvider: createMockApollo([
[projectWorkItemsQuery, availableWorkItemsResolver],
- [projectWorkItemTypesQuery, jest.fn().mockResolvedValue(typesResponse)],
+ [projectWorkItemTypesQuery, projectWorkItemTypesResolver],
+ [groupWorkItemTypesQuery, groupWorkItemTypesResolver],
[updateWorkItemMutation, updateMutationResolver],
[createWorkItemMutation, createMutationResolver],
]),
@@ -64,7 +71,7 @@ describe('WorkItemLinksForm', () => {
},
provide: {
hasIterationsFeature,
- isGroup: false,
+ isGroup,
},
});
@@ -79,6 +86,19 @@ describe('WorkItemLinksForm', () => {
const findAddChildButton = () => wrapper.findByTestId('add-child-button');
const findValidationElement = () => wrapper.findByTestId('work-items-invalid');
+ it.each`
+ workspace | isGroup | queryResolver
+ ${'project'} | ${false} | ${projectWorkItemTypesResolver}
+ ${'group'} | ${true} | ${groupWorkItemTypesResolver}
+ `(
+ 'fetches $workspace work item types when isGroup is $isGroup',
+ async ({ isGroup, queryResolver }) => {
+ await createComponent({ isGroup });
+
+ expect(queryResolver).toHaveBeenCalled();
+ },
+ );
+
describe('creating a new work item', () => {
beforeEach(async () => {
await createComponent();
diff --git a/spec/frontend/work_items/components/work_item_milestone_spec.js b/spec/frontend/work_items/components/work_item_milestone_inline_spec.js
index fc2c5eb2af2..75c5763914a 100644
--- a/spec/frontend/work_items/components/work_item_milestone_spec.js
+++ b/spec/frontend/work_items/components/work_item_milestone_inline_spec.js
@@ -2,7 +2,9 @@ import { GlCollapsibleListbox, GlListboxItem, GlSkeletonLoader, GlFormGroup } fr
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import WorkItemMilestone, { noMilestoneId } from '~/work_items/components/work_item_milestone.vue';
+import WorkItemMilestoneInline, {
+ noMilestoneId,
+} from '~/work_items/components/work_item_milestone_inline.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -18,7 +20,7 @@ import {
updateWorkItemMutationResponse,
} from '../mock_data';
-describe('WorkItemMilestone component', () => {
+describe('WorkItemMilestoneInline component', () => {
Vue.use(VueApollo);
let wrapper;
@@ -51,7 +53,7 @@ describe('WorkItemMilestone component', () => {
searchQueryHandler = successSearchQueryHandler,
mutationHandler = successUpdateWorkItemMutationHandler,
} = {}) => {
- wrapper = shallowMountExtended(WorkItemMilestone, {
+ wrapper = shallowMountExtended(WorkItemMilestoneInline, {
apolloProvider: createMockApollo([
[projectMilestonesQuery, searchQueryHandler],
[updateWorkItemMutation, mutationHandler],
@@ -73,13 +75,13 @@ describe('WorkItemMilestone component', () => {
createComponent();
expect(findInputGroup().exists()).toBe(true);
- expect(findInputGroup().attributes('label')).toBe(WorkItemMilestone.i18n.MILESTONE);
+ expect(findInputGroup().attributes('label')).toBe(WorkItemMilestoneInline.i18n.MILESTONE);
});
describe('Default text with canUpdate false and milestone value', () => {
describe.each`
description | milestone | value
- ${'when no milestone'} | ${null} | ${WorkItemMilestone.i18n.NONE}
+ ${'when no milestone'} | ${null} | ${WorkItemMilestoneInline.i18n.NONE}
${'when milestone set'} | ${mockMilestoneWidgetResponse} | ${mockMilestoneWidgetResponse.title}
`('$description', ({ milestone, value }) => {
it(`has a value of "${value}"`, () => {
@@ -95,7 +97,9 @@ describe('WorkItemMilestone component', () => {
it(`has a value of "Add to milestone"`, () => {
createComponent({ canUpdate: true, milestone: null });
- expect(findDropdown().props('toggleText')).toBe(WorkItemMilestone.i18n.MILESTONE_PLACEHOLDER);
+ expect(findDropdown().props('toggleText')).toBe(
+ WorkItemMilestoneInline.i18n.MILESTONE_PLACEHOLDER,
+ );
});
});
@@ -111,7 +115,7 @@ describe('WorkItemMilestone component', () => {
searchQueryHandler: successSearchWithNoMatchingMilestones,
});
- expect(findNoResultsText().text()).toBe(WorkItemMilestone.i18n.NO_MATCHING_RESULTS);
+ expect(findNoResultsText().text()).toBe(WorkItemMilestoneInline.i18n.NO_MATCHING_RESULTS);
expect(findDropdownItems()).toHaveLength(1);
});
});
@@ -162,7 +166,7 @@ describe('WorkItemMilestone component', () => {
await waitForPromises();
expect(findDropdown().props()).toMatchObject({
loading: false,
- toggleText: WorkItemMilestone.i18n.MILESTONE_PLACEHOLDER,
+ toggleText: WorkItemMilestoneInline.i18n.MILESTONE_PLACEHOLDER,
toggleClass: expect.arrayContaining(['gl-text-gray-500!']),
});
});
diff --git a/spec/frontend/work_items/components/work_item_milestone_with_edit_spec.js b/spec/frontend/work_items/components/work_item_milestone_with_edit_spec.js
new file mode 100644
index 00000000000..58a57978126
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_milestone_with_edit_spec.js
@@ -0,0 +1,209 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import WorkItemMilestone from '~/work_items/components/work_item_milestone_with_edit.vue';
+import WorkItemSidebarDropdownWidgetWithEdit from '~/work_items/components/shared/work_item_sidebar_dropdown_widget_with_edit.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import projectMilestonesQuery from '~/sidebar/queries/project_milestones.query.graphql';
+import {
+ projectMilestonesResponse,
+ projectMilestonesResponseWithNoMilestones,
+ mockMilestoneWidgetResponse,
+ updateWorkItemMutationErrorResponse,
+ updateWorkItemMutationResponse,
+} from '../mock_data';
+
+describe('WorkItemMilestoneWithEdit component', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const workItemId = 'gid://gitlab/WorkItem/1';
+ const workItemType = 'Task';
+
+ const findSidebarDropdownWidget = () =>
+ wrapper.findComponent(WorkItemSidebarDropdownWidgetWithEdit);
+
+ const successSearchQueryHandler = jest.fn().mockResolvedValue(projectMilestonesResponse);
+ const successSearchWithNoMatchingMilestones = jest
+ .fn()
+ .mockResolvedValue(projectMilestonesResponseWithNoMilestones);
+ const successUpdateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponse);
+
+ const showDropdown = () => findSidebarDropdownWidget().vm.$emit('dropdownShown');
+
+ const createComponent = ({
+ mountFn = shallowMountExtended,
+ canUpdate = true,
+ milestone = mockMilestoneWidgetResponse,
+ searchQueryHandler = successSearchQueryHandler,
+ mutationHandler = successUpdateWorkItemMutationHandler,
+ } = {}) => {
+ wrapper = mountFn(WorkItemMilestone, {
+ apolloProvider: createMockApollo([
+ [projectMilestonesQuery, searchQueryHandler],
+ [updateWorkItemMutation, mutationHandler],
+ ]),
+ propsData: {
+ fullPath: 'full-path',
+ canUpdate,
+ workItemMilestone: milestone,
+ workItemId,
+ workItemType,
+ },
+ });
+ };
+
+ it('has "Milestone" label', () => {
+ createComponent();
+
+ expect(findSidebarDropdownWidget().props('dropdownLabel')).toBe('Milestone');
+ });
+
+ describe('Default text with canUpdate false and milestone value', () => {
+ describe.each`
+ description | milestone | value
+ ${'when no milestone'} | ${null} | ${'None'}
+ ${'when milestone set'} | ${mockMilestoneWidgetResponse} | ${mockMilestoneWidgetResponse.title}
+ `('$description', ({ milestone, value }) => {
+ it(`has a value of "${value}"`, () => {
+ createComponent({ mountFn: mountExtended, canUpdate: false, milestone });
+
+ expect(findSidebarDropdownWidget().props('canUpdate')).toBe(false);
+ expect(wrapper.text()).toContain(value);
+ });
+ });
+ });
+
+ describe('Dropdown search', () => {
+ it('shows no matching results when no items', () => {
+ createComponent({
+ searchQueryHandler: successSearchWithNoMatchingMilestones,
+ });
+
+ expect(findSidebarDropdownWidget().props('listItems')).toHaveLength(0);
+ });
+ });
+
+ describe('Dropdown options', () => {
+ beforeEach(() => {
+ createComponent({ canUpdate: true });
+ });
+
+ it('calls successSearchQueryHandler with variables when dropdown is opened', async () => {
+ showDropdown();
+ await nextTick();
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+
+ await waitForPromises();
+
+ expect(successSearchQueryHandler).toHaveBeenCalledWith({
+ first: 20,
+ fullPath: 'full-path',
+ state: 'active',
+ title: '',
+ });
+ });
+
+ it('shows the skeleton loader when the items are being fetched on click', async () => {
+ showDropdown();
+ await nextTick();
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+
+ await nextTick();
+
+ expect(findSidebarDropdownWidget().props('loading')).toBe(true);
+ });
+
+ it('shows the milestones in dropdown when the items have finished fetching', async () => {
+ showDropdown();
+ await nextTick();
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+
+ await waitForPromises();
+
+ expect(findSidebarDropdownWidget().props('loading')).toBe(false);
+ expect(findSidebarDropdownWidget().props('listItems')).toHaveLength(
+ projectMilestonesResponse.data.workspace.attributes.nodes.length,
+ );
+ });
+
+ it('changes the milestone to null when clicked on no milestone', async () => {
+ showDropdown();
+ await nextTick();
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+
+ findSidebarDropdownWidget().vm.$emit('updateValue', null);
+
+ await nextTick();
+ expect(findSidebarDropdownWidget().props('updateInProgress')).toBe(true);
+
+ await waitForPromises();
+ expect(findSidebarDropdownWidget().props('updateInProgress')).toBe(false);
+ expect(findSidebarDropdownWidget().props('itemValue')).toBe(null);
+ });
+
+ it('changes the milestone to the selected milestone', async () => {
+ const milestoneAtIndex = projectMilestonesResponse.data.workspace.attributes.nodes[0];
+
+ showDropdown();
+ await nextTick();
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+
+ await waitForPromises();
+ findSidebarDropdownWidget().vm.$emit('updateValue', milestoneAtIndex.id);
+
+ await nextTick();
+
+ expect(findSidebarDropdownWidget().props('itemValue').title).toBe(milestoneAtIndex.title);
+ });
+ });
+
+ describe('Error handlers', () => {
+ it.each`
+ errorType | expectedErrorMessage | mockValue | resolveFunction
+ ${'graphql error'} | ${'Something went wrong while updating the task. Please try again.'} | ${updateWorkItemMutationErrorResponse} | ${'mockResolvedValue'}
+ ${'network error'} | ${'Something went wrong while updating the task. Please try again.'} | ${new Error()} | ${'mockRejectedValue'}
+ `(
+ 'emits an error when there is a $errorType',
+ async ({ mockValue, expectedErrorMessage, resolveFunction }) => {
+ createComponent({
+ mutationHandler: jest.fn()[resolveFunction](mockValue),
+ canUpdate: true,
+ });
+
+ showDropdown();
+ findSidebarDropdownWidget().vm.$emit('updateValue', null);
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[expectedErrorMessage]]);
+ },
+ );
+ });
+
+ describe('Tracking event', () => {
+ it('tracks updating the milestone', async () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ createComponent({ canUpdate: true });
+
+ showDropdown();
+ findSidebarDropdownWidget().vm.$emit('updateValue', null);
+
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_milestone', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_milestone',
+ property: 'type_Task',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_parent_inline_spec.js b/spec/frontend/work_items/components/work_item_parent_inline_spec.js
index 3e4f99d5935..0cd314c377a 100644
--- a/spec/frontend/work_items/components/work_item_parent_inline_spec.js
+++ b/spec/frontend/work_items/components/work_item_parent_inline_spec.js
@@ -157,7 +157,8 @@ describe('WorkItemParentInline component', () => {
types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
in: undefined,
iid: null,
- isNumber: false,
+ searchByIid: false,
+ searchByText: true,
});
await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
@@ -168,7 +169,8 @@ describe('WorkItemParentInline component', () => {
types: [WORK_ITEM_TYPE_ENUM_OBJECTIVE],
in: 'TITLE',
iid: null,
- isNumber: false,
+ searchByIid: false,
+ searchByText: true,
});
await nextTick();
diff --git a/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js b/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js
index 61e43456479..d5fab9353ac 100644
--- a/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js
+++ b/spec/frontend/work_items/components/work_item_parent_with_edit_spec.js
@@ -290,6 +290,8 @@ describe('WorkItemParent component', () => {
in: undefined,
iid: null,
isNumber: false,
+ searchByIid: false,
+ searchByText: true,
});
await findCollapsibleListbox().vm.$emit('search', 'Objective 101');
@@ -301,6 +303,8 @@ describe('WorkItemParent component', () => {
in: 'TITLE',
iid: null,
isNumber: false,
+ searchByIid: false,
+ searchByText: true,
});
await nextTick();
diff --git a/spec/frontend/work_items/components/work_item_state_toggle_spec.js b/spec/frontend/work_items/components/work_item_state_toggle_spec.js
index a210bd50422..988df402d60 100644
--- a/spec/frontend/work_items/components/work_item_state_toggle_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_toggle_spec.js
@@ -32,6 +32,7 @@ describe('Work Item State toggle button component', () => {
canUpdate = true,
workItemState = STATE_OPEN,
workItemType = 'Task',
+ hasComment = false,
} = {}) => {
wrapper = shallowMount(WorkItemStateToggle, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
@@ -40,6 +41,7 @@ describe('Work Item State toggle button component', () => {
workItemState,
workItemType,
canUpdate,
+ hasComment,
},
});
};
@@ -61,6 +63,23 @@ describe('Work Item State toggle button component', () => {
expect(findStateToggleButton().text()).toBe(buttonText);
},
);
+
+ it.each`
+ workItemState | workItemType | buttonText
+ ${STATE_OPEN} | ${'Task'} | ${'Comment & close task'}
+ ${STATE_CLOSED} | ${'Task'} | ${'Comment & reopen task'}
+ ${STATE_OPEN} | ${'Objective'} | ${'Comment & close objective'}
+ ${STATE_CLOSED} | ${'Objective'} | ${'Comment & reopen objective'}
+ ${STATE_OPEN} | ${'Key result'} | ${'Comment & close key result'}
+ ${STATE_CLOSED} | ${'Key result'} | ${'Comment & reopen key result'}
+ `(
+ 'is "$buttonText" when "$workItemType" state is "$workItemState" and hasComment is true',
+ ({ workItemState, workItemType, buttonText }) => {
+ createComponent({ workItemState, workItemType, hasComment: true });
+
+ expect(findStateToggleButton().text()).toBe(buttonText);
+ },
+ );
});
describe('when updating the state', () => {
@@ -92,6 +111,15 @@ describe('Work Item State toggle button component', () => {
});
});
+ it('emits `submit-comment` when hasComment is true', async () => {
+ createComponent({ hasComment: true });
+
+ findStateToggleButton().vm.$emit('click');
+ await waitForPromises();
+
+ expect(wrapper.emitted('submit-comment')).toBeDefined();
+ });
+
it('emits an error message when the mutation was unsuccessful', async () => {
createComponent({ mutationHandler: jest.fn().mockRejectedValue('Error!') });
diff --git a/spec/frontend/work_items/components/work_item_title_with_edit_spec.js b/spec/frontend/work_items/components/work_item_title_with_edit_spec.js
new file mode 100644
index 00000000000..7868e241821
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_title_with_edit_spec.js
@@ -0,0 +1,59 @@
+import { GlFormGroup, GlFormInput } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WorkItemTitleWithEdit from '~/work_items/components/work_item_title_with_edit.vue';
+
+describe('Work Item title with edit', () => {
+ let wrapper;
+ const mockTitle = 'Work Item title';
+
+ const createComponent = ({ isEditing = false } = {}) => {
+ wrapper = shallowMountExtended(WorkItemTitleWithEdit, {
+ propsData: {
+ title: mockTitle,
+ isEditing,
+ },
+ });
+ };
+
+ const findTitle = () => wrapper.findByTestId('work-item-title');
+ const findEditableTitleForm = () => wrapper.findComponent(GlFormGroup);
+ const findEditableTitleInput = () => wrapper.findComponent(GlFormInput);
+
+ describe('Default mode', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders title', () => {
+ expect(findTitle().exists()).toBe(true);
+ expect(findTitle().text()).toBe(mockTitle);
+ });
+
+ it('does not render edit mode', () => {
+ expect(findEditableTitleForm().exists()).toBe(false);
+ });
+ });
+
+ describe('Edit mode', () => {
+ beforeEach(() => {
+ createComponent({ isEditing: true });
+ });
+
+ it('does not render read only title', () => {
+ expect(findTitle().exists()).toBe(false);
+ });
+
+ it('renders the editable title with label', () => {
+ expect(findEditableTitleForm().exists()).toBe(true);
+ expect(findEditableTitleForm().attributes('label')).toBe(
+ WorkItemTitleWithEdit.i18n.titleLabel,
+ );
+ });
+
+ it('emits `updateDraft` event on change of the input', () => {
+ findEditableTitleInput().vm.$emit('input', 'updated title');
+
+ expect(wrapper.emitted('updateDraft')).toEqual([['updated title']]);
+ });
+ });
+});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 9d4606eb95a..aade1ed4735 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -229,6 +229,7 @@ export const updateWorkItemMutationResponse = {
state: 'OPEN',
description: 'description',
confidential: false,
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
createdAt: '2022-08-03T12:41:54Z',
updatedAt: '2022-08-08T12:41:54Z',
closedAt: null,
@@ -339,6 +340,7 @@ export const convertWorkItemMutationResponse = {
title: 'Updated title',
state: 'OPEN',
description: 'description',
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
confidential: false,
createdAt: '2022-08-03T12:41:54Z',
updatedAt: '2022-08-08T12:41:54Z',
@@ -473,6 +475,13 @@ export const issueType = {
iconName: 'issue-type-issue',
};
+export const epicType = {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/2411',
+ name: 'Epic',
+ iconName: 'issue-type-epic',
+};
+
export const mockEmptyLinkedItems = {
type: WIDGET_TYPE_LINKED_ITEMS,
blocked: false,
@@ -644,6 +653,7 @@ export const workItemResponseFactory = ({
title: 'Updated title',
state,
description: 'description',
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
confidential,
createdAt,
updatedAt,
@@ -726,6 +736,12 @@ export const workItemResponseFactory = ({
title: 'Iteration default title',
startDate: '2022-09-22',
dueDate: '2022-09-30',
+ webUrl: 'http://127.0.0.1:3000/groups/flightjs/-/iterations/23205',
+ iterationCadence: {
+ id: 'gid://gitlab/Iterations::Cadence/5852',
+ title: 'A dolores assumenda harum non facilis similique delectus quod.',
+ __typename: 'IterationCadence',
+ },
},
}
: { type: 'MOCK TYPE' },
@@ -907,6 +923,18 @@ export const projectWorkItemTypesQueryResponse = {
},
};
+export const groupWorkItemTypesQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Group',
+ id: 'gid://gitlab/Group/2',
+ workItemTypes: {
+ nodes: [{ id: 'gid://gitlab/WorkItems::Type/6', name: 'Epic' }],
+ },
+ },
+ },
+};
+
export const createWorkItemMutationResponse = {
data: {
workItemCreate: {
@@ -923,6 +951,7 @@ export const createWorkItemMutationResponse = {
createdAt: '2022-08-03T12:41:54Z',
updatedAt: null,
closedAt: null,
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
author: {
...mockAssignees[0],
},
@@ -1010,6 +1039,7 @@ export const workItemHierarchyEmptyResponse = {
__typename: 'WorkItemType',
},
title: 'New title',
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
description: '',
createdAt: '2022-08-03T12:41:54Z',
updatedAt: null,
@@ -1229,6 +1259,7 @@ export const workItemHierarchyResponse = {
__typename: 'WorkItemType',
},
title: 'New title',
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
userPermissions: {
deleteWorkItem: true,
updateWorkItem: true,
@@ -1495,6 +1526,7 @@ export const changeIndirectWorkItemParentMutationResponse = {
__typename: 'WorkItemPermissions',
},
description: null,
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
id: 'gid://gitlab/WorkItem/13',
iid: '13',
archived: false,
@@ -1564,6 +1596,7 @@ export const changeWorkItemParentMutationResponse = {
__typename: 'WorkItemPermissions',
},
description: null,
+ webUrl: 'http://gdk.test/gitlab-org/gitlab/-/issues/1',
id: 'gid://gitlab/WorkItem/2',
iid: '2',
archived: false,
@@ -1734,6 +1767,28 @@ export const searchWorkItemsIidResponse = {
},
};
+export const searchWorkItemsURLRefResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: [],
+ },
+ workItemsByIid: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/460',
+ iid: '101',
+ title: 'Task 3',
+ __typename: 'WorkItem',
+ },
+ ],
+ },
+ },
+ },
+};
+
export const searchWorkItemsTextIidResponse = {
data: {
workspace: {
@@ -1765,6 +1820,23 @@ export const searchWorkItemsTextIidResponse = {
},
};
+export const searchWorkItemsResponse = ({ workItems = [], workItemsByIid = [] } = {}) => {
+ return {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ workItems: {
+ nodes: workItems,
+ },
+ workItemsByIid: {
+ nodes: workItemsByIid,
+ },
+ },
+ },
+ };
+};
+
export const projectMembersResponseWithCurrentUser = {
data: {
workspace: {
diff --git a/spec/frontend/work_items/utils_spec.js b/spec/frontend/work_items/utils_spec.js
index aa24b80cf08..166712de20b 100644
--- a/spec/frontend/work_items/utils_spec.js
+++ b/spec/frontend/work_items/utils_spec.js
@@ -1,4 +1,4 @@
-import { autocompleteDataSources, markdownPreviewPath } from '~/work_items/utils';
+import { autocompleteDataSources, markdownPreviewPath, isReference } from '~/work_items/utils';
describe('autocompleteDataSources', () => {
beforeEach(() => {
@@ -25,3 +25,25 @@ describe('markdownPreviewPath', () => {
);
});
});
+
+describe('isReference', () => {
+ it.each`
+ referenceId | result
+ ${'#101'} | ${true}
+ ${'&101'} | ${true}
+ ${'101'} | ${false}
+ ${'#'} | ${false}
+ ${'&'} | ${false}
+ ${' &101'} | ${false}
+ ${'gitlab-org&101'} | ${true}
+ ${'gitlab-org/project-path#101'} | ${true}
+ ${'gitlab-org/sub-group/project-path#101'} | ${true}
+ ${'gitlab-org'} | ${false}
+ ${'gitlab-org101#'} | ${false}
+ ${'gitlab-org101&'} | ${false}
+ ${'#gitlab-org101'} | ${false}
+ ${'&gitlab-org101'} | ${false}
+ `('returns $result for $referenceId', ({ referenceId, result }) => {
+ expect(isReference(referenceId)).toEqual(result);
+ });
+});
diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb
index 3f58f2678d8..97b1591db8e 100644
--- a/spec/graphql/graphql_triggers_spec.rb
+++ b/spec/graphql/graphql_triggers_spec.rb
@@ -132,6 +132,20 @@ RSpec.describe GraphqlTriggers, feature_category: :shared do
end
end
+ describe '.merge_request_diff_generated' do
+ it 'triggers the merge_request_diff_generated subscription' do
+ merge_request = build_stubbed(:merge_request)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ :merge_request_diff_generated,
+ { issuable_id: merge_request.to_gid },
+ merge_request
+ ).and_call_original
+
+ described_class.merge_request_diff_generated(merge_request)
+ end
+ end
+
describe '.work_item_updated' do
it 'triggers the work_item_updated subscription' do
expect(GitlabSchema.subscriptions).to receive(:trigger).with(
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index b184baaca3e..f5bd9ad93be 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -39,7 +39,9 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
lock_pypi_package_requests_forwarding: false,
- nuget_symbol_server_enabled: false
+ nuget_symbol_server_enabled: false,
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: 'foo'
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
@@ -53,7 +55,9 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
it_behaves_like 'returning a success'
@@ -109,7 +113,9 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update, feature_category:
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
end
diff --git a/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb b/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb
index 1ce0e91765f..4205259e5b9 100644
--- a/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/catalog/resources/versions_resolver_spec.rb
@@ -7,8 +7,9 @@ RSpec.describe Resolvers::Ci::Catalog::Resources::VersionsResolver, feature_cate
include_context 'when there are catalog resources with versions'
+ let(:name) { nil }
let(:sort) { nil }
- let(:args) { { sort: sort }.compact }
+ let(:args) { { name: name, sort: sort }.compact }
let(:ctx) { { current_user: current_user } }
subject(:result) { resolve(described_class, ctx: ctx, obj: resource1, args: args) }
@@ -19,6 +20,23 @@ RSpec.describe Resolvers::Ci::Catalog::Resources::VersionsResolver, feature_cate
resource1.project.add_guest(current_user)
end
+ context 'when name argument is provided' do
+ let(:name) { 'v1.0' }
+
+ it 'returns the version that matches the name' do
+ expect(result.items.size).to eq(1)
+ expect(result.items.first.name).to eq('v1.0')
+ end
+
+ context 'when no version matches the name' do
+ let(:name) { 'does_not_exist' }
+
+ it 'returns empty response' do
+ expect(result).to be_empty
+ end
+ end
+ end
+
context 'when sort argument is not provided' do
it 'returns versions ordered by released_at descending' do
expect(result.items).to eq([v1_1, v1_0])
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index d1eec0baeea..d1726c8da6c 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver, feature_category: :fleet_vis
status_status: 'active',
type_type: :group_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
membership: :descendants,
diff --git a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
index 85b55521174..59ba7d4200c 100644
--- a/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/project_runners_resolver_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Resolvers::Ci::ProjectRunnersResolver, feature_category: :fleet_v
status_status: 'active',
type_type: :group_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
project: project
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 85a90924384..a0239a6ff34 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
upgrade_status: 'recommended',
type_type: :instance_type,
tag_name: ['active_runner'],
- preload: false,
+ preload: {},
search: 'abc',
sort: 'contacted_asc',
creator_id: '1',
@@ -125,7 +125,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
active: false,
- preload: false
+ preload: {}
}
end
@@ -145,7 +145,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
active: false,
- preload: false
+ preload: {}
}
end
@@ -163,7 +163,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
end
let(:expected_params) do
- { preload: false }
+ { preload: {} }
end
it 'calls RunnersFinder with expected arguments' do
@@ -181,7 +181,7 @@ RSpec.describe Resolvers::Ci::RunnersResolver, feature_category: :fleet_visibili
let(:expected_params) do
{
- preload: false,
+ preload: {},
version_prefix: 'a.b'
}
end
diff --git a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
index 79f3708da22..72e86d54dea 100644
--- a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
@@ -22,26 +22,27 @@ RSpec.describe ResolvesGroups do
end
end
- let_it_be(:lookahead_fields) do
+ let_it_be(:preloaded_fields) do
<<~FIELDS
containerRepositoriesCount
customEmoji { nodes { id } }
fullPath
+ groupMembersCount
path
dependencyProxyBlobCount
dependencyProxyBlobs { nodes { fileName } }
dependencyProxyImageCount
dependencyProxyImageTtlPolicy { enabled }
dependencyProxySetting { enabled }
+ descendantGroupsCount
+ projectsCount
FIELDS
end
- it 'avoids N+1 queries on the fields marked with lookahead' do
+ it 'avoids N+1 queries on the preloaded fields' do
group_ids = groups.map(&:id)
allow_next(resolver).to receive(:resolve_groups).and_return(Group.id_in(group_ids))
- # Prevent authorization queries from affecting the test.
- allow(Ability).to receive(:allowed?).and_return(true)
single_group_query = ActiveRecord::QueryRecorder.new do
data = query_groups(limit: 1)
@@ -57,7 +58,7 @@ RSpec.describe ResolvesGroups do
end
def query_groups(limit:)
- query_string = "{ groups(first: #{limit}) { nodes { id #{lookahead_fields} } } }"
+ query_string = "{ groups(first: #{limit}) { nodes { id #{preloaded_fields} } } }"
data = execute_query(query_type, graphql: query_string)
diff --git a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
index 5f12e8649b7..4d0f1676c7f 100644
--- a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
@@ -83,15 +83,16 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver, feature_category: :co
context 'with parameters' do
using RSpec::Parameterized::TableSyntax
- where(:before, :after, :sort, :name, :first, :last, :sort_value) do
- nil | nil | 'NAME_DESC' | '' | 10 | nil | '-name'
- 'bb' | nil | 'NAME_ASC' | 'a' | nil | 5 | 'name'
- nil | 'aa' | 'NAME_DESC' | 'a' | 10 | nil | '-name'
+ where(:before, :after, :sort, :name, :first, :last, :sort_value, :referrers) do
+ nil | nil | 'NAME_DESC' | '' | 10 | nil | '-name' | nil
+ 'bb' | nil | 'NAME_ASC' | 'a' | nil | 5 | 'name' | false
+ nil | 'aa' | 'NAME_DESC' | 'a' | 10 | nil | '-name' | true
end
with_them do
let(:args) do
- { before: before, after: after, sort: sort, name: name, first: first, last: last }.compact
+ { before: before, after: after, sort: sort, name: name,
+ first: first, last: last, referrers: referrers }.compact
end
it 'calls ContainerRepository#tags_page with correct parameters' do
@@ -100,7 +101,8 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver, feature_category: :co
last: after,
sort: sort_value,
name: name,
- page_size: [first, last].map(&:to_i).max
+ page_size: [first, last].map(&:to_i).max,
+ referrers: referrers
)
resolver(args)
diff --git a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
index eb39e5bafc5..744e7f35891 100644
--- a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
@@ -43,15 +43,15 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
context 'loading associations' do
it 'prevents N+1 queries when loading author' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
resolve_versions(object).items.map(&:author)
- end.count
+ end
create_list(:design_version, 3, issue: issue)
expect do
resolve_versions(object).items.map(&:author)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb
index 08e17cedfcc..2e583a1703d 100644
--- a/spec/graphql/resolvers/group_labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do
Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(group, params).to_a
end
- end.not_to exceed_query_limit(control.count)
+ end.not_to exceed_query_limit(control)
end
end
diff --git a/spec/graphql/resolvers/labels_resolver_spec.rb b/spec/graphql/resolvers/labels_resolver_spec.rb
index 16cf2e73736..fd55c3131b4 100644
--- a/spec/graphql/resolvers/labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/labels_resolver_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Resolvers::LabelsResolver do
Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(project, params).to_a
end
- end.not_to exceed_query_limit(control.count)
+ end.not_to exceed_query_limit(control)
end
end
diff --git a/spec/graphql/resolvers/ml/find_models_resolver_spec.rb b/spec/graphql/resolvers/ml/find_models_resolver_spec.rb
new file mode 100644
index 00000000000..ce85dd62515
--- /dev/null
+++ b/spec/graphql/resolvers/ml/find_models_resolver_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ml::FindModelsResolver, feature_category: :mlops do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:models) { create_list(:ml_models, 2, project: project) }
+ let_it_be(:model_in_another_project) { create(:ml_models) }
+ let_it_be(:user) { project.owner }
+
+ let(:args) { { name: 'model', orderBy: 'CREATED_AT', sort: 'desc', invalid: 'blah' } }
+ let(:read_model_registry) { true }
+
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_registry, project)
+ .and_return(read_model_registry)
+ end
+
+ subject(:resolve_models) do
+ force(resolve(described_class, obj: project, ctx: { current_user: user }, args: args))&.to_a
+ end
+
+ context 'when user is allowed and model exists' do
+ it { is_expected.to eq(models.reverse) }
+
+ it 'only passes name, sort_by and order to finder' do
+ expect(::Projects::Ml::ModelFinder).to receive(:new)
+ .with(project, { name: 'model', order_by: 'created_at',
+sort: 'desc' })
+ .and_call_original
+
+ resolve_models
+ end
+ end
+
+ context 'when user does not have permission' do
+ let(:read_model_registry) { false }
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/users_resolver_spec.rb b/spec/graphql/resolvers/users_resolver_spec.rb
index 2ae1b53c40f..ecb1bbb5618 100644
--- a/spec/graphql/resolvers/users_resolver_spec.rb
+++ b/spec/graphql/resolvers/users_resolver_spec.rb
@@ -62,6 +62,42 @@ RSpec.describe Resolvers::UsersResolver do
end
end
+ context 'when a set of group_id is passed' do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:subgroup) { create(:group, :private, parent: group) }
+ let_it_be(:group_member) { create(:user) }
+
+ let_it_be(:indirect_group_member) do
+ create(:user).tap { |u| subgroup.add_developer(u) }
+ end
+
+ let_it_be(:direct_group_members) do
+ [current_user, user1, group_member].each { |u| group.add_developer(u) }
+ end
+
+ it 'returns direct and indirect members of the group' do
+ expect(
+ resolve_users(args: { group_id: group.to_global_id })
+ ).to contain_exactly(indirect_group_member, *direct_group_members)
+ end
+
+ it 'raise an no resource not available error if the group do not exist group' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users(args: { group_id: "gid://gitlab/Group/#{non_existing_record_id}" })
+ end
+ end
+
+ context 'when user cannot read group' do
+ let(:current_user) { create(:user) }
+
+ it 'raise an no resource not available error the user cannot read the group' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users(args: { group_id: group.to_global_id })
+ end
+ end
+ end
+ end
+
context 'with anonymous access' do
let_it_be(:current_user) { nil }
diff --git a/spec/graphql/types/ci/catalog/resources/component_type_spec.rb b/spec/graphql/types/ci/catalog/resources/component_type_spec.rb
index 93ab926d406..821d41ea7b4 100644
--- a/spec/graphql/types/ci/catalog/resources/component_type_spec.rb
+++ b/spec/graphql/types/ci/catalog/resources/component_type_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Types::Ci::Catalog::Resources::ComponentType, feature_category: :
id
inputs
name
- path
+ include_path
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/catalog/resources/version_type_spec.rb b/spec/graphql/types/ci/catalog/resources/version_type_spec.rb
index 088973cf8f7..bc1b9a97e88 100644
--- a/spec/graphql/types/ci/catalog/resources/version_type_spec.rb
+++ b/spec/graphql/types/ci/catalog/resources/version_type_spec.rb
@@ -10,11 +10,12 @@ RSpec.describe Types::Ci::Catalog::Resources::VersionType, feature_category: :pi
id
created_at
released_at
- tag_name
- tag_path
+ name
+ path
author
commit
components
+ readme_html
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/inherited_ci_variable_type_spec.rb b/spec/graphql/types/ci/inherited_ci_variable_type_spec.rb
index daf80ff9978..973d6d602c9 100644
--- a/spec/graphql/types/ci/inherited_ci_variable_type_spec.rb
+++ b/spec/graphql/types/ci/inherited_ci_variable_type_spec.rb
@@ -7,13 +7,14 @@ RSpec.describe GitlabSchema.types['InheritedCiVariable'], feature_category: :sec
expect(described_class).to have_graphql_fields(
:id,
:key,
- :raw,
- :variable_type,
+ :description,
:environment_scope,
+ :group_name,
+ :group_ci_cd_settings_path,
:masked,
:protected,
- :group_name,
- :group_ci_cd_settings_path
+ :raw,
+ :variable_type
).at_least
end
end
diff --git a/spec/graphql/types/ci/instance_variable_type_spec.rb b/spec/graphql/types/ci/instance_variable_type_spec.rb
index c77a4ac1dc4..bd3dd30f3c8 100644
--- a/spec/graphql/types/ci/instance_variable_type_spec.rb
+++ b/spec/graphql/types/ci/instance_variable_type_spec.rb
@@ -5,5 +5,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['CiInstanceVariable'] do
specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
- specify { expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected).at_least }
+ specify do
+ expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected, :description).at_least
+ end
end
diff --git a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
index 7fc600745df..a51f15f2b82 100644
--- a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
+++ b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
@@ -2,15 +2,11 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['VerificationStatus'] do
+RSpec.describe GitlabSchema.types['VerificationStatus'], feature_category: :source_code_management do
specify { expect(described_class.graphql_name).to eq('VerificationStatus') }
it 'exposes all signature verification states' do
expect(described_class.values.keys)
- .to match_array(%w[
- UNVERIFIED UNVERIFIED_KEY VERIFIED
- SAME_USER_DIFFERENT_EMAIL OTHER_USER UNKNOWN_KEY
- MULTIPLE_SIGNATURES REVOKED_KEY VERIFIED_SYSTEM
- ])
+ .to match_array(Enums::CommitSignature.verification_statuses.map { |status| status.first.to_s.upcase })
end
end
diff --git a/spec/graphql/types/container_repository_referrer_type_spec.rb b/spec/graphql/types/container_repository_referrer_type_spec.rb
new file mode 100644
index 00000000000..1baab8a7f89
--- /dev/null
+++ b/spec/graphql/types/container_repository_referrer_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ContainerRepositoryReferrer'], feature_category: :container_registry do
+ fields = %i[artifact_type digest user_permissions]
+
+ it { expect(described_class.graphql_name).to eq('ContainerRepositoryReferrer') }
+
+ it { expect(described_class.description).to eq('A referrer for a container repository tag') }
+
+ it { expect(described_class).to require_graphql_authorizations(:read_container_image) }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+
+ it { expect(described_class).to expose_permissions_using(Types::PermissionTypes::ContainerRepositoryTag) }
+end
diff --git a/spec/graphql/types/container_repository_tag_type_spec.rb b/spec/graphql/types/container_repository_tag_type_spec.rb
index 596a221b5c0..4c91c39fcd6 100644
--- a/spec/graphql/types/container_repository_tag_type_spec.rb
+++ b/spec/graphql/types/container_repository_tag_type_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryTag'], feature_category: :container_registry do
- fields = %i[name path location digest revision short_revision total_size created_at can_delete user_permissions]
+ fields = %i[name path location digest revision short_revision
+ total_size created_at can_delete user_permissions referrers published_at]
it { expect(described_class.graphql_name).to eq('ContainerRepositoryTag') }
diff --git a/spec/graphql/types/current_user_todos_type_spec.rb b/spec/graphql/types/current_user_todos_type_spec.rb
index febbe868f33..2b33a705ae2 100644
--- a/spec/graphql/types/current_user_todos_type_spec.rb
+++ b/spec/graphql/types/current_user_todos_type_spec.rb
@@ -159,17 +159,17 @@ RSpec.describe GitlabSchema.types['CurrentUserTodos'] do
# AND ("todos"."state" IN ('done','pending'))
# AND "todos"."target_id" = merge_request
# AND "todos"."target_type" = 'MergeRequest' ORDER BY "todos"."id" DESC
- baseline = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
execute_query(query_type, graphql: base_query)
end
expect do
execute_query(query_type, graphql: query_without_state_arguments)
- end.not_to exceed_query_limit(baseline) # at present this is 3
+ end.not_to exceed_query_limit(control) # at present this is 3
expect do
execute_query(query_type, graphql: with_state_arguments)
- end.not_to exceed_query_limit(baseline.count + 1)
+ end.not_to exceed_query_limit(control).with_threshold(1)
end
it 'returns correct data' do
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index d3f9053faf3..51a9476f449 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['Group'] do
+RSpec.describe GitlabSchema.types['Group'], feature_category: :groups_and_projects do
include GraphqlHelpers
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Group) }
@@ -16,6 +16,7 @@ RSpec.describe GitlabSchema.types['Group'] do
id name path full_name full_path description description_html visibility
lfs_enabled request_access_enabled projects root_storage_statistics
web_url avatar_url share_with_group_lock project_creation_level
+ descendant_groups_count group_members_count projects_count
subgroup_creation_level require_two_factor_authentication
two_factor_grace_period auto_devops_enabled emails_disabled
mentions_disabled parent boards milestones group_members
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index be25c5177f6..4d1d651f0ca 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_revie
has_ci mergeable commits committers commits_without_merge_commits squash security_auto_fix default_squash_commit_message
auto_merge_strategy merge_user award_emoji prepared_at codequality_reports_comparer supports_lock_on_merge
mergeability_checks
+ allows_multiple_assignees allows_multiple_reviewers
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
diff --git a/spec/graphql/types/ml/model_links_type_spec.rb b/spec/graphql/types/ml/model_links_type_spec.rb
new file mode 100644
index 00000000000..e33102c8f7f
--- /dev/null
+++ b/spec/graphql/types/ml/model_links_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MLModelLinks'], feature_category: :mlops do
+ it 'has the expected fields' do
+ expected_fields = %w[showPath]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ml/model_type_spec.rb b/spec/graphql/types/ml/model_type_spec.rb
index ee0473ccafe..078391f135a 100644
--- a/spec/graphql/types/ml/model_type_spec.rb
+++ b/spec/graphql/types/ml/model_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['MlModel'], feature_category: :mlops do
specify { expect(described_class.description).to eq('Machine learning model in the model registry') }
it 'includes all the package fields' do
- expected_fields = %w[id name versions candidates]
+ expected_fields = %w[id name versions candidates version_count _links created_at latest_version description]
expect(described_class).to include_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/ml/model_version_links_type_spec.rb b/spec/graphql/types/ml/model_version_links_type_spec.rb
index d2a11643c35..63083b441e3 100644
--- a/spec/graphql/types/ml/model_version_links_type_spec.rb
+++ b/spec/graphql/types/ml/model_version_links_type_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['MLModelVersionLinks'], feature_category: :mlops do
it 'has the expected fields' do
- expected_fields = %w[showPath]
+ expected_fields = %w[showPath packagePath]
expect(described_class).to include_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/ml/models_order_by_enum_spec.rb b/spec/graphql/types/ml/models_order_by_enum_spec.rb
new file mode 100644
index 00000000000..211e073e8c1
--- /dev/null
+++ b/spec/graphql/types/ml/models_order_by_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MlModelsOrderBy'], feature_category: :mlops do
+ specify { expect(described_class.graphql_name).to eq('MlModelsOrderBy') }
+
+ it 'exposes all the existing order by types' do
+ expect(described_class.values.keys).to match_array(%w[CREATED_AT ID UPDATED_AT NAME])
+ end
+end
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
index 0e731c1e2bf..0e958aca586 100644
--- a/spec/graphql/types/namespace/package_settings_type_spec.rb
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -33,6 +33,8 @@ RSpec.describe GitlabSchema.types['PackageSettings'], feature_category: :package
npm_package_requests_forwarding_locked
pypi_package_requests_forwarding_locked
nuget_symbol_server_enabled
+ terraform_module_duplicates_allowed
+ terraform_module_duplicate_exception_regex
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/namespace_type_spec.rb b/spec/graphql/types/namespace_type_spec.rb
index 9e1a2bfd466..d80235023ef 100644
--- a/spec/graphql/types/namespace_type_spec.rb
+++ b/spec/graphql/types/namespace_type_spec.rb
@@ -15,5 +15,5 @@ RSpec.describe GitlabSchema.types['Namespace'] do
expect(described_class).to include_graphql_fields(*expected_fields)
end
- specify { expect(described_class).to require_graphql_authorizations(:read_namespace_via_membership) }
+ specify { expect(described_class).to require_graphql_authorizations(:read_namespace) }
end
diff --git a/spec/graphql/types/organizations/organization_type_spec.rb b/spec/graphql/types/organizations/organization_type_spec.rb
index 6bc4bac6ba2..33d0376e418 100644
--- a/spec/graphql/types/organizations/organization_type_spec.rb
+++ b/spec/graphql/types/organizations/organization_type_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Organization'], feature_category: :cell do
- let(:expected_fields) { %w[avatar_url description description_html groups id name organization_users path web_url] }
+ let(:expected_fields) do
+ %w[avatar_url description description_html groups id name organization_users path projects web_url]
+ end
specify { expect(described_class.graphql_name).to eq('Organization') }
specify { expect(described_class).to require_graphql_authorizations(:read_organization) }
diff --git a/spec/graphql/types/permission_types/issue_spec.rb b/spec/graphql/types/permission_types/issue_spec.rb
index 8f43a4a44a0..bf63420aa78 100644
--- a/spec/graphql/types/permission_types/issue_spec.rb
+++ b/spec/graphql/types/permission_types/issue_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Types::PermissionTypes::Issue do
expected_permissions = [
:read_issue, :admin_issue, :update_issue, :reopen_issue,
:read_design, :create_design, :destroy_design,
- :create_note, :update_design
+ :create_note, :update_design, :admin_issue_relation
]
expected_permissions.each do |permission|
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 3965312316b..36d72140006 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -41,7 +41,8 @@ RSpec.describe GitlabSchema.types['Project'], feature_category: :groups_and_proj
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
timelog_categories fork_targets branch_rules ci_config_variables pipeline_schedules languages
incident_management_timeline_event_tags visible_forks inherited_ci_variables autocomplete_users
- ci_cd_settings detailed_import_status value_streams
+ ci_cd_settings detailed_import_status value_streams ml_models
+ allows_multiple_merge_request_assignees allows_multiple_merge_request_reviewers
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -532,6 +533,13 @@ RSpec.describe GitlabSchema.types['Project'], feature_category: :groups_and_proj
it { is_expected.to have_graphql_type(Types::IncidentManagement::TimelineEventTagType) }
end
+ describe 'mlModels field' do
+ subject { described_class.fields['mlModels'] }
+
+ it { is_expected.to have_graphql_type(Types::Ml::ModelType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Ml::FindModelsResolver) }
+ end
+
describe 'agent_configurations' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb
index d3e5b6ffa3a..455685527c0 100644
--- a/spec/graphql/types/subscription_type_spec.rb
+++ b/spec/graphql/types/subscription_type_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe GitlabSchema.types['Subscription'] do
merge_request_reviewers_updated
merge_request_merge_status_updated
merge_request_approval_state_updated
+ merge_request_diff_generated
work_item_updated
]
diff --git a/spec/graphql/types/work_items/widgets/notes_type_spec.rb b/spec/graphql/types/work_items/widgets/notes_type_spec.rb
index 3ac61a59a9c..4738197e59f 100644
--- a/spec/graphql/types/work_items/widgets/notes_type_spec.rb
+++ b/spec/graphql/types/work_items/widgets/notes_type_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Types::WorkItems::Widgets::NotesType, feature_category: :team_planning do
it 'exposes the expected fields' do
- expected_fields = %i[discussions type]
+ expected_fields = %i[discussions type discussion_locked]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 5dc75a60a6e..b378437c407 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -65,6 +65,7 @@ RSpec.describe ApplicationSettingsHelper do
project_download_export_limit project_export_limit project_import_limit
raw_blob_request_limit group_export_limit group_download_export_limit
group_import_limit users_get_by_id_limit search_rate_limit search_rate_limit_unauthenticated
+ members_delete_limit
])
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 18cbbaf2008..bf6b5ec5173 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -478,7 +478,20 @@ RSpec.describe AvatarsHelper, feature_category: :source_code_management do
let(:resource) { build_stubbed(:group, name: 'foo') }
it 'displays group avatar' do
- is_expected.to match(%r{<span class="avatar identicon bg\d+ s32">F</span>})
+ expected_pattern = %r{
+ <div\s+
+ alt="foo"\s+
+ class="gl-avatar\s+
+ gl-avatar-s32\s+
+ gl-avatar-circle\s+
+ gl-mr-3\s+
+ gl-avatar-identicon\s+
+ gl-avatar-identicon-bg\d+"\s*>
+ \s*F\s*
+ </div>
+ }x
+
+ is_expected.to match(expected_pattern)
end
end
end
diff --git a/spec/helpers/ci/builds_helper_spec.rb b/spec/helpers/ci/builds_helper_spec.rb
index dcb775fb16d..871e1c085d4 100644
--- a/spec/helpers/ci/builds_helper_spec.rb
+++ b/spec/helpers/ci/builds_helper_spec.rb
@@ -3,28 +3,6 @@
require 'spec_helper'
RSpec.describe Ci::BuildsHelper, feature_category: :continuous_integration do
- describe '#sidebar_build_class' do
- using RSpec::Parameterized::TableSyntax
-
- where(:build_id, :current_build_id, :retried, :expected_result) do
- 1 | 1 | true | 'active retried'
- 1 | 1 | false | 'active'
- 1 | 2 | false | ''
- 1 | 2 | true | 'retried'
- end
-
- let(:build) { instance_double(Ci::Build, retried?: retried, id: build_id) }
- let(:current_build) { instance_double(Ci::Build, retried?: true, id: current_build_id ) }
-
- subject { helper.sidebar_build_class(build, current_build) }
-
- with_them do
- it 'builds sidebar html class' do
- expect(subject).to eq(expected_result)
- end
- end
- end
-
describe '#build_failed_issue_options' do
subject { helper.build_failed_issue_options }
diff --git a/spec/helpers/ci/catalog/resources_helper_spec.rb b/spec/helpers/ci/catalog/resources_helper_spec.rb
index 5c5d02ce6d8..68d56437249 100644
--- a/spec/helpers/ci/catalog/resources_helper_spec.rb
+++ b/spec/helpers/ci/catalog/resources_helper_spec.rb
@@ -36,18 +36,6 @@ RSpec.describe Ci::Catalog::ResourcesHelper, feature_category: :pipeline_composi
end
end
- describe '#can_view_namespace_catalog?' do
- subject { helper.can_view_namespace_catalog?(project) }
-
- before do
- stub_licensed_features(ci_namespace_catalog: false)
- end
-
- it 'user cannot view the Catalog in CE regardless of permissions' do
- expect(subject).to be false
- end
- end
-
describe '#js_ci_catalog_data' do
let(:project) { build(:project, :repository) }
diff --git a/spec/helpers/ci/status_helper_spec.rb b/spec/helpers/ci/status_helper_spec.rb
index 502a535e102..a00a80ac06b 100644
--- a/spec/helpers/ci/status_helper_spec.rb
+++ b/spec/helpers/ci/status_helper_spec.rb
@@ -8,19 +8,6 @@ RSpec.describe Ci::StatusHelper do
let(:success_commit) { double("Ci::Pipeline", status: 'success') }
let(:failed_commit) { double("Ci::Pipeline", status: 'failed') }
- describe "#pipeline_status_cache_key" do
- it "builds a cache key for pipeline status" do
- pipeline_status = Gitlab::Cache::Ci::ProjectPipelineStatus.new(
- build_stubbed(:project),
- pipeline_info: {
- sha: "123abc",
- status: "success"
- }
- )
- expect(helper.pipeline_status_cache_key(pipeline_status)).to eq("pipeline-status/123abc-success")
- end
- end
-
describe "#render_ci_icon" do
subject { helper.render_ci_icon("success") }
diff --git a/spec/helpers/ci/variables_helper_spec.rb b/spec/helpers/ci/variables_helper_spec.rb
index 13970dd95b4..835a7b902e7 100644
--- a/spec/helpers/ci/variables_helper_spec.rb
+++ b/spec/helpers/ci/variables_helper_spec.rb
@@ -42,23 +42,6 @@ RSpec.describe Ci::VariablesHelper, feature_category: :secrets_management do
end
end
- describe '#ci_variable_masked?' do
- let(:variable) { build_stubbed(:ci_variable, key: 'test_key', value: 'test_value', masked: true) }
-
- context 'when variable is provided and only_key_value is false' do
- it 'expect ci_variable_masked? to return true' do
- expect(helper.ci_variable_masked?(variable, false)).to eq(true)
- end
- end
-
- context 'when variable is not provided / provided and only_key_value is true' do
- it 'expect ci_variable_masked? to return false' do
- expect(helper.ci_variable_masked?(nil, true)).to eq(false)
- expect(helper.ci_variable_masked?(variable, true)).to eq(false)
- end
- end
- end
-
describe '#ci_variable_maskable_raw_regex' do
it 'converts to a javascript regex' do
expect(helper.ci_variable_maskable_raw_regex).to eq("^\\S{8,}$")
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 14f99f144b2..2aae7b61bd1 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -3,9 +3,12 @@
require 'spec_helper'
RSpec.describe EnvironmentsHelper, feature_category: :environment_management do
+ include ActionView::Helpers::AssetUrlHelper
+
+ folder_name = 'env_folder'
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository) }
- let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:environment) { create(:environment, :with_folders, folder: folder_name, project: project) }
describe '#metrics_data', feature_category: :metrics do
before do
@@ -95,4 +98,23 @@ RSpec.describe EnvironmentsHelper, feature_category: :environment_management do
expect(subject).to eq(true)
end
end
+
+ describe '#environments_folder_list_view_data' do
+ subject { helper.environments_folder_list_view_data(project, folder_name) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).and_return(true)
+ end
+
+ it 'returns folder related data' do
+ expect(subject).to include(
+ 'endpoint' => folder_project_environments_path(project, folder_name, format: :json),
+ 'can_read_environment' => 'true',
+ 'project_path' => project.full_path,
+ 'folder_name' => folder_name,
+ 'help_page_path' => '/help/ci/environments/index'
+ )
+ end
+ end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 8aee337f51c..807898884a1 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -114,13 +114,13 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
helper.group_title(nested_group)
end
expect do
helper.group_title(very_deep_nested_group)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
@@ -490,6 +490,7 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
new_project_illustration: including('illustrations/project-create-new-sm'),
empty_projects_illustration: including('illustrations/empty-state/empty-projects-md'),
empty_subgroup_illustration: including('illustrations/empty-state/empty-subgroup-md'),
+ empty_search_illustration: including('illustrations/empty-state/empty-search-md'),
render_empty_state: 'true',
can_create_subgroups: 'true',
can_create_projects: 'true'
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index d5d7f8f72b3..99ef0998fda 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -61,39 +61,6 @@ RSpec.describe IdeHelper, feature_category: :web_ide do
end
end
- context 'with environments guidance experiment', :experiment do
- before do
- stub_experiments(in_product_guidance_environments_webide: :candidate)
- end
-
- context 'when project has no enviornments' do
- it 'enables environment guidance' do
- expect(helper.ide_data(project: project, fork_info: fork_info, params: params))
- .to include('enable-environments-guidance' => 'true')
- end
-
- context 'and the callout has been dismissed' do
- it 'disables environment guidance' do
- callout = create(:callout, feature_name: :web_ide_ci_environments_guidance, user: user)
- callout.update!(dismissed_at: Time.now - 1.week)
- allow(helper).to receive(:current_user).and_return(User.find(user.id))
-
- expect(helper.ide_data(project: project, fork_info: fork_info, params: params))
- .to include('enable-environments-guidance' => 'false')
- end
- end
- end
-
- context 'when the project has environments' do
- it 'disables environment guidance' do
- create(:environment, project: project)
-
- expect(helper.ide_data(project: project, fork_info: fork_info, params: params))
- .to include('enable-environments-guidance' => 'false')
- end
- end
- end
-
context 'with vscode_web_ide=true' do
let(:base_data) do
{
diff --git a/spec/helpers/listbox_helper_spec.rb b/spec/helpers/listbox_helper_spec.rb
index bae9c40aa02..ff386ec1f3d 100644
--- a/spec/helpers/listbox_helper_spec.rb
+++ b/spec/helpers/listbox_helper_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe ListboxHelper do
dropdown
b-dropdown
gl-dropdown
- btn-group
js-redirect-listbox
])
end
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 22d1113ee8c..831f41cde0a 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -461,7 +461,7 @@ RSpec.describe MarkupHelper, feature_category: :team_planning do
it 'displays the first line of a code block' do
object = create_object("```\nCode block\nwith two lines\n```")
- expected = %r{<pre.+><code><span class="line">Code block\.\.\.</span></code></pre>}
+ expected = %r{<pre.+><code><span class="line" lang="plaintext">Code block\.\.\.</span></code></pre>}
expect(helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
end
@@ -476,8 +476,8 @@ RSpec.describe MarkupHelper, feature_category: :team_planning do
it 'preserves code color scheme' do
object = create_object("```ruby\ndef test\n 'hello world'\nend\n```")
- expected = "\n<pre class=\"code highlight js-syntax-highlight language-ruby\">" \
- "<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>" \
+ expected = "\n<pre class=\"code highlight js-syntax-highlight language-ruby\" lang=\"ruby\">" \
+ "<code><span class=\"line\" lang=\"ruby\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>" \
"</code></pre>\n"
expect(helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to eq(expected)
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 9a0f72838fb..e8b68d80650 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -134,68 +134,4 @@ RSpec.describe NavHelper, feature_category: :navigation do
it { is_expected.to eq(true) }
end
end
-
- describe '#show_super_sidebar?' do
- shared_examples 'show_super_sidebar is supposed to' do
- before do
- user.update!(use_new_navigation: user_preference)
- end
-
- context 'when user has not interacted with the new nav toggle yet' do
- let(:user_preference) { nil }
-
- specify { expect(subject).to eq true }
- end
-
- context 'when user has new nav disabled' do
- let(:user_preference) { false }
-
- specify { expect(subject).to eq true }
- end
-
- context 'when user has new nav enabled' do
- let(:user_preference) { true }
-
- specify { expect(subject).to eq true }
- end
- end
-
- context 'without a user' do
- context 'with current_user (nil) as a default' do
- before do
- allow(helper).to receive(:current_user).and_return(nil)
- end
-
- subject { helper.show_super_sidebar? }
-
- specify { expect(subject).to eq true }
- end
-
- context 'with nil provided as an argument' do
- subject { helper.show_super_sidebar?(nil) }
-
- specify { expect(subject).to eq true }
- end
- end
-
- context 'when user is signed-in' do
- let_it_be(:user) { create(:user) }
-
- context 'with current_user as a default' do
- before do
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- subject { helper.show_super_sidebar? }
-
- it_behaves_like 'show_super_sidebar is supposed to'
- end
-
- context 'with user provided as an argument' do
- subject { helper.show_super_sidebar?(user) }
-
- it_behaves_like 'show_super_sidebar is supposed to'
- end
- end
- end
end
diff --git a/spec/helpers/organizations/organization_helper_spec.rb b/spec/helpers/organizations/organization_helper_spec.rb
index 594013d515b..0f2f4ed1b54 100644
--- a/spec/helpers/organizations/organization_helper_spec.rb
+++ b/spec/helpers/organizations/organization_helper_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
- let_it_be(:organization) { build_stubbed(:organization) }
+ let_it_be(:organization_detail) { build_stubbed(:organization_detail, description_html: '<em>description</em>') }
+ let_it_be(:organization) { organization_detail.organization }
let_it_be(:new_group_path) { '/groups/new' }
let_it_be(:new_project_path) { '/projects/new' }
let_it_be(:organizations_empty_state_svg_path) { 'illustrations/empty-state/empty-organizations-md.svg' }
@@ -11,6 +12,7 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
let_it_be(:root_url) { 'http://127.0.0.1:3000/' }
let_it_be(:groups_empty_state_svg_path) { 'illustrations/empty-state/empty-groups-md.svg' }
let_it_be(:projects_empty_state_svg_path) { 'illustrations/empty-state/empty-projects-md.svg' }
+ let_it_be(:preview_markdown_organizations_path) { '/-/organizations/preview_markdown' }
before do
allow(helper).to receive(:new_group_path).and_return(new_group_path)
@@ -21,6 +23,7 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
allow(helper).to receive(:root_url).and_return(root_url)
allow(helper).to receive(:image_path).with(groups_empty_state_svg_path).and_return(groups_empty_state_svg_path)
allow(helper).to receive(:image_path).with(projects_empty_state_svg_path).and_return(projects_empty_state_svg_path)
+ allow(helper).to receive(:preview_markdown_organizations_path).and_return(preview_markdown_organizations_path)
end
describe '#organization_show_app_data' do
@@ -31,13 +34,19 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
end
it 'returns expected json' do
+ expect(organization).to receive(:avatar_url).with(size: 128).and_return('avatar.jpg')
expect(
Gitlab::Json.parse(
helper.organization_show_app_data(organization)
)
).to eq(
{
- 'organization' => { 'id' => organization.id, 'name' => organization.name },
+ 'organization' => {
+ 'id' => organization.id,
+ 'name' => organization.name,
+ 'description_html' => organization.description_html,
+ 'avatar_url' => 'avatar.jpg'
+ },
'groups_and_projects_organization_path' => '/-/organizations/default/groups_and_projects',
'new_group_path' => new_group_path,
'new_project_path' => new_project_path,
@@ -86,7 +95,8 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
expect(Gitlab::Json.parse(helper.organization_new_app_data)).to eq(
{
'organizations_path' => organizations_path,
- 'root_url' => root_url
+ 'root_url' => root_url,
+ 'preview_markdown_path' => preview_markdown_organizations_path
}
)
end
@@ -107,15 +117,19 @@ RSpec.describe Organizations::OrganizationHelper, feature_category: :cell do
describe '#organization_settings_general_app_data' do
it 'returns expected json' do
+ expect(organization).to receive(:avatar_url).with(size: 192).and_return('avatar.jpg')
expect(Gitlab::Json.parse(helper.organization_settings_general_app_data(organization))).to eq(
{
'organization' => {
'id' => organization.id,
'name' => organization.name,
- 'path' => organization.path
+ 'path' => organization.path,
+ 'description' => organization.description,
+ 'avatar' => 'avatar.jpg'
},
'organizations_path' => organizations_path,
- 'root_url' => root_url
+ 'root_url' => root_url,
+ 'preview_markdown_path' => preview_markdown_organizations_path
}
)
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index e1c0aafc3c3..bad30b5033d 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -656,12 +656,12 @@ RSpec.describe SearchHelper, feature_category: :global_search do
@project = create(:project)
description = FFaker::Lorem.characters(210)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) }.count
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) }
issues = create_list(:issue, 4, project: @project)
description_with_issues = description + ' ' + issues.map { |issue| "##{issue.iid}" }.join(' ')
- expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control_count)
+ expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index 366032100de..adf7b92127e 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -3,42 +3,6 @@
require 'spec_helper'
RSpec.describe SessionsHelper, feature_category: :system_access do
- describe '#recently_confirmed_com?' do
- subject { helper.recently_confirmed_com? }
-
- context 'when on .com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it 'when flash notice is empty it is false' do
- flash[:notice] = nil
- expect(subject).to be false
- end
-
- it 'when flash notice is anything it is false' do
- flash[:notice] = 'hooray!'
- expect(subject).to be false
- end
-
- it 'when flash notice is devise confirmed message it is true' do
- flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
- expect(subject).to be true
- end
- end
-
- context 'when not on .com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'when flash notice is devise confirmed message it is false' do
- flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
- expect(subject).to be false
- end
- end
- end
-
describe '#unconfirmed_email?' do
it 'returns true when the flash alert contains a devise failure unconfirmed message' do
flash[:alert] = t(:unconfirmed, scope: [:devise, :failure])
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
index 421b1c178aa..0f1484f49db 100644
--- a/spec/helpers/sidebars_helper_spec.rb
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -135,6 +135,13 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
name: user.name,
username: user.username,
admin_url: admin_root_url,
+ admin_mode: {
+ admin_mode_feature_enabled: true,
+ admin_mode_active: false,
+ enter_admin_mode_url: new_admin_session_path,
+ leave_admin_mode_url: destroy_admin_session_path,
+ user_is_admin: false
+ },
avatar_url: user.avatar_url,
has_link_to_profile: helper.current_user_menu?(:profile),
link_to_profile: user_path(user),
@@ -432,15 +439,6 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
{ title: s_('Navigation|Admin Area'), link: '/admin', icon: 'admin' }
end
- let_it_be(:enter_admin_mode_link) do
- { title: s_('Navigation|Enter admin mode'), link: '/admin/session/new', icon: 'lock' }
- end
-
- let_it_be(:leave_admin_mode_link) do
- { title: s_('Navigation|Leave admin mode'), link: '/admin/session/destroy', icon: 'lock-open',
- data_method: 'post' }
- end
-
subject do
helper.super_sidebar_context(user, group: nil, project: nil, panel: panel, panel_type: panel_type)
end
@@ -478,8 +476,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
it 'returns public links, admin area and leave admin mode links' do
expect(subject[:context_switcher_links]).to eq([
*public_links_for_user,
- admin_area_link,
- leave_admin_mode_link
+ admin_area_link
])
end
end
@@ -487,8 +484,7 @@ RSpec.describe SidebarsHelper, feature_category: :navigation do
context 'when admin mode is off' do
it 'returns public links and enter admin mode link' do
expect(subject[:context_switcher_links]).to eq([
- *public_links_for_user,
- enter_admin_mode_link
+ *public_links_for_user
])
end
end
diff --git a/spec/helpers/time_zone_helper_spec.rb b/spec/helpers/time_zone_helper_spec.rb
index e8d96ee0700..95acb2ee9d9 100644
--- a/spec/helpers/time_zone_helper_spec.rb
+++ b/spec/helpers/time_zone_helper_spec.rb
@@ -93,6 +93,29 @@ RSpec.describe TimeZoneHelper, :aggregate_failures do
end
end
+ describe '#timezone_data_with_unique_identifiers' do
+ subject { helper.timezone_data_with_unique_identifiers }
+
+ before do
+ allow(helper).to receive(:timezone_data).and_return([
+ { identifier: 'Europe/London', name: 'London' },
+ { identifier: 'Europe/London', name: 'Edinburgh' },
+ { identifier: 'Europe/Berlin', name: 'Berlin' },
+ { identifier: 'Europe/London', name: 'Hogwarts' }
+
+ ])
+ end
+
+ let(:expected) do
+ [
+ { identifier: 'Europe/London', name: 'Edinburgh, Hogwarts, London' },
+ { identifier: 'Europe/Berlin', name: 'Berlin' }
+ ]
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
describe '#local_time' do
let_it_be(:timezone) { 'America/Los_Angeles' }
diff --git a/spec/helpers/webpack_helper_spec.rb b/spec/helpers/webpack_helper_spec.rb
index 8cbc4db9108..675fd0c0f04 100644
--- a/spec/helpers/webpack_helper_spec.rb
+++ b/spec/helpers/webpack_helper_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe WebpackHelper do
let(:source) { 'foo.js' }
let(:asset_path) { "/assets/webpack/#{source}" }
+ before do
+ allow(helper).to receive(:vite_enabled?).and_return(false)
+ end
+
describe '#prefetch_link_tag' do
it 'returns prefetch link tag' do
expect(helper.prefetch_link_tag(source)).to eq("<link rel=\"prefetch\" href=\"/#{source}\">")
@@ -40,7 +44,6 @@ RSpec.describe WebpackHelper do
before do
stub_rails_env('development')
- stub_feature_flags(vite: true)
allow(helper).to receive(:vite_javascript_tag).and_return('vite')
allow(helper).to receive(:vite_enabled?).and_return(true)
diff --git a/spec/initializers/session_store_spec.rb b/spec/initializers/session_store_spec.rb
index a94ce327a92..c9333d022dd 100644
--- a/spec/initializers/session_store_spec.rb
+++ b/spec/initializers/session_store_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe 'Session initializer for GitLab' do
subject { Gitlab::Application.config }
+ before do
+ allow(subject).to receive_message_chain(:middleware, :insert_after)
+ end
+
let(:load_session_store) do
load Rails.root.join('config/initializers/session_store.rb')
end
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
index 217e6c11630..20563bfb685 100644
--- a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -19,10 +19,10 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
end
describe 'exception message' do
- it 'truncates exception message to 72 characters' do
- failure.update!(exception_message: 'a' * 100)
+ it 'truncates exception message to 255 characters' do
+ failure.update!(exception_message: 'a' * 500)
- expect(subject[:exception_message].length).to eq(72)
+ expect(subject[:exception_message].length).to eq(255)
end
it 'removes paths from the message' do
@@ -30,5 +30,13 @@ RSpec.describe API::Entities::BulkImports::EntityFailure, feature_category: :imp
expect(subject[:exception_message]).to eq('Test [FILTERED]')
end
+
+ it 'removes long paths without clipping the message' do
+ exception_message = "Test #{'/abc' * 300} #{'a' * 500}"
+ failure.update!(exception_message: exception_message)
+ filtered_message = "Test [FILTERED] #{'a' * 500}"
+
+ expect(subject[:exception_message]).to eq(filtered_message.truncate(255))
+ end
end
end
diff --git a/spec/lib/api/entities/diff_spec.rb b/spec/lib/api/entities/diff_spec.rb
index 27d9ed44c98..535567d4d8d 100644
--- a/spec/lib/api/entities/diff_spec.rb
+++ b/spec/lib/api/entities/diff_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe ::API::Entities::Diff, feature_category: :source_code_management
b_mode: diff.b_mode,
new_file: diff.new_file?,
renamed_file: diff.renamed_file?,
- deleted_file: diff.deleted_file?
+ deleted_file: diff.deleted_file?,
+ generated_file: diff.generated?
}
)
end
diff --git a/spec/lib/api/entities/group_spec.rb b/spec/lib/api/entities/group_spec.rb
new file mode 100644
index 00000000000..270ac323c7d
--- /dev/null
+++ b/spec/lib/api/entities/group_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Group, feature_category: :groups_and_projects do
+ let_it_be(:group) do
+ base_group = create(:group) { |g| create(:project_statistics, namespace_id: g.id) }
+ Group.with_statistics.find(base_group.id)
+ end
+
+ subject(:json) { described_class.new(group, { with_custom_attributes: true, statistics: true }).as_json }
+
+ it 'returns expected data' do
+ expect(json.keys).to(
+ include(
+ :organization_id, :path, :description, :visibility, :share_with_group_lock, :require_two_factor_authentication,
+ :two_factor_grace_period, :project_creation_level, :auto_devops_enabled,
+ :subgroup_creation_level, :emails_disabled, :emails_enabled, :lfs_enabled, :default_branch_protection,
+ :default_branch_protection_defaults, :avatar_url, :request_access_enabled, :full_name, :full_path, :created_at,
+ :parent_id, :organization_id, :shared_runners_setting, :custom_attributes, :statistics
+ )
+ )
+ end
+end
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb
index 0cf0a57fa87..621d57b49cd 100644
--- a/spec/lib/api/entities/merge_request_basic_spec.rb
+++ b/spec/lib/api/entities/merge_request_basic_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic, feature_category: :code_revie
# stub the `head_commit_sha` as it will trigger a
# backward compatibility query that is out-of-scope
# for this test whenever it is `nil`
- allow_any_instance_of(MergeRequestDiff).to receive(:head_commit_sha).and_return(Gitlab::Git::BLANK_SHA)
+ allow_any_instance_of(MergeRequestDiff).to receive(:head_commit_sha).and_return(Gitlab::Git::SHA1_BLANK_SHA)
query = scope.all
batch = ActiveRecord::QueryRecorder.new do
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 21b3b8e6927..d1dee70e34d 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -406,6 +406,37 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ describe '#find_organization!' do
+ let_it_be(:organization) { create(:organization) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
+ end
+
+ context 'when user is authenticated' do
+ it 'returns requested organization' do
+ expect(helper.find_organization!(organization.id)).to eq(organization)
+ end
+ end
+
+ context 'when user is not authenticated' do
+ let(:user) { nil }
+
+ it 'returns requested organization' do
+ expect(helper.find_organization!(organization.id)).to eq(organization)
+ end
+ end
+
+ context 'when organization does not exist' do
+ it 'returns nil' do
+ expect(helper).to receive(:render_api_error!).with('404 Organization Not Found', 404)
+ expect(helper.find_organization!(non_existing_record_id)).to be_nil
+ end
+ end
+ end
+
describe '#find_group!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -457,7 +488,7 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
- context 'support for IDs and paths as arguments' do
+ context 'with support for IDs and paths as arguments' do
let_it_be(:group) { create(:group) }
let(:user) { group.first_owner }
@@ -505,6 +536,34 @@ RSpec.describe API::Helpers, feature_category: :shared do
end
end
+ context 'with support for organization as an argument' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:organization) { create(:organization) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(group.first_owner)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ subject { helper.find_group!(group.id, organization: organization) }
+
+ context 'when group exists in the organization' do
+ before do
+ group.update!(organization: organization)
+ end
+
+ it { is_expected.to eq(group) }
+ end
+
+ context 'when group does not exist in the organization' do
+ it 'returns nil' do
+ expect(helper).to receive(:render_api_error!).with('404 Group Not Found', 404)
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#find_group_by_full_path!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:user) { create(:user) }
@@ -674,23 +733,15 @@ RSpec.describe API::Helpers, feature_category: :shared do
let(:send_authorized_project_scope) { helper.authorized_project_scope?(project) }
- where(:job_token_authentication, :route_setting, :feature_flag, :same_job_project, :expected_result) do
- false | false | false | false | true
- false | false | false | true | true
- false | false | true | false | true
- false | false | true | true | true
- false | true | false | false | true
- false | true | false | true | true
- false | true | true | false | true
- false | true | true | true | true
- true | false | false | false | true
- true | false | false | true | true
- true | false | true | false | true
- true | false | true | true | true
- true | true | false | false | false
- true | true | false | true | false
- true | true | true | false | false
- true | true | true | true | true
+ where(:job_token_authentication, :route_setting, :same_job_project, :expected_result) do
+ false | false | false | true
+ false | false | true | true
+ false | true | false | true
+ false | true | true | true
+ true | false | false | true
+ true | false | true | true
+ true | true | false | false
+ true | true | true | true
end
with_them do
@@ -699,9 +750,6 @@ RSpec.describe API::Helpers, feature_category: :shared do
allow(helper).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
allow(helper).to receive(:current_authenticated_job).and_return(job)
allow(job).to receive(:project).and_return(same_job_project ? project : other_project)
-
- stub_feature_flags(ci_job_token_scope: false)
- stub_feature_flags(ci_job_token_scope: project) if feature_flag
end
it 'returns the expected result' do
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 3e7a0187d86..f45fccfba4c 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -71,4 +71,56 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
end
end
end
+
+ describe '#gitlab_tags' do
+ describe 'when tags param is not supplied' do
+ let(:params) { {} }
+
+ it 'returns nil' do
+ expect(gitlab_tags).to be nil
+ end
+ end
+
+ describe 'when tags param is supplied' do
+ let(:params) { { tags: input } }
+
+ using RSpec::Parameterized::TableSyntax
+
+ subject { gitlab_tags }
+
+ where(:input, :output) do
+ [] | nil
+ [{}] | {}
+ [{ key: 'foo', value: 'bar' }] | {}
+ [{ key: "gitlab.version", value: "1.2.3" }] | { "version" => "1.2.3" }
+ [{ key: "foo", value: "bar" }, { key: "gitlab.foo", value: "baz" }] | { "foo" => "baz" }
+ end
+ with_them do
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
+ end
+
+ describe '#custom_version' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { custom_version }
+
+ where(:input, :output) do
+ [] | nil
+ [{}] | nil
+ [{ key: 'foo', value: 'bar' }] | nil
+ [{ key: "gitlab.version", value: "1.2.3" }] | "1.2.3"
+ [{ key: "foo", value: "bar" }, { key: "gitlab.foo", value: "baz" }] | nil
+ end
+ with_them do
+ let(:params) { { tags: input } }
+
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index a692d76da77..e1159b9fab2 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -433,16 +433,16 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.send(:store_dev_info, project: project, merge_requests: merge_requests)
- end.count
+ end
merge_requests << create(:merge_request, :unique_branches, source_project: project)
expect do
subject.send(:store_dev_info, project: project,
merge_requests: merge_requests)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
index 0ed320e863c..15cb4994d46 100644
--- a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
@@ -24,9 +24,9 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json }
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
described_class.represent(merge_requests, user_notes_count: user_notes_count)
- end.count
+ end
merge_requests << create(:merge_request, :unique_branches)
@@ -35,7 +35,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
records: merge_requests, associations: { merge_request_reviewers: :reviewer }
).call
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'uses counts from user_notes_count' do
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
deleted file mode 100644
index 9fab5cbc1c0..00000000000
--- a/spec/lib/backup/database_model_spec.rb
+++ /dev/null
@@ -1,185 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do
- using RSpec::Parameterized::TableSyntax
-
- let(:gitlab_database_name) { 'main' }
-
- describe '#connection' do
- subject(:connection) { described_class.new(gitlab_database_name).connection }
-
- it 'an instance of a ActiveRecord::Base.connection' do
- connection.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases -- We actually need an ActiveRecord::Base here
- end
- end
-
- describe '#config' do
- let(:application_config) do
- {
- adapter: 'postgresql',
- host: 'some_host',
- port: '5432'
- }
- end
-
- subject(:config) { described_class.new(gitlab_database_name).config }
-
- before do
- allow(
- Gitlab::Database.database_base_models_with_gitlab_shared[gitlab_database_name].connection_db_config
- ).to receive(:configuration_hash).and_return(application_config)
- end
-
- shared_examples 'no configuration is overridden' do
- it 'ActiveRecord backup configuration is expected to equal application configuration' do
- expect(config[:activerecord]).to eq(application_config)
- end
-
- it 'PostgreSQL ENV is expected to equal application configuration' do
- expect(config[:pg_env]).to eq(
- {
- 'PGHOST' => application_config[:host],
- 'PGPORT' => application_config[:port]
- }
- )
- end
- end
-
- shared_examples 'environment variables override application configuration' do
- let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
-
- it 'ActiveRecord backup configuration overrides application configuration' do
- expect(config[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
- end
-
- it 'PostgreSQL ENV overrides application configuration' do
- expect(config[:pg_env]).to include({ pg_env => overridden_value })
- end
- end
-
- context 'when no GITLAB_BACKUP_PG* variables are set' do
- it_behaves_like 'no configuration is overridden'
- end
-
- context 'when generic database configuration is overridden' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_PGPORT' | '1543'
- 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'when specific database configuration is overridden' do
- context 'and environment variables are for the current database name' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
- 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'and environment variables are for another database' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_CI_PGPORT' | '1543'
- 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'no configuration is overridden'
- end
- end
-
- context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
- before do
- stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
- stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specfic_user')
- end
-
- it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
- expect(config.dig(:activerecord, :username)).to eq('specfic_user')
- expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
- end
- end
- end
- end
-end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 024f6c5db96..679be62393e 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -68,20 +68,20 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.dump(destination, backup_id)
- end.count
+ end
create_list(:project, 2, :repository)
create_list(:snippet, 2, :repository)
- # Number of expected queries are 2 more than control_count
+ # Number of expected queries are 2 more than control.count
# to account for the queries for project.design_management_repository
# for each project.
# We are using 2 projects here.
expect do
subject.dump(destination, backup_id)
- end.not_to exceed_query_limit(control_count + 2)
+ end.not_to exceed_query_limit(control).with_threshold(2)
end
describe 'storages' do
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index 4fc9d9dd4f6..701a45aa54d 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -47,13 +47,13 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni
it 'does not do N+1 query' do
create(:custom_emoji, name: 'party-parrot', group: group)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter('<p>:tanuki:</p>')
end
expect do
filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
- end.not_to exceed_all_query_limit(control_count.count)
+ end.not_to exceed_all_query_limit(control)
end
it 'uses custom emoji from ancestor group' do
diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
index 06bb0edc92c..d14f218763f 100644
--- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
+++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb
@@ -259,15 +259,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
- end.count
+ end
- expect(control_count).to eq 12
+ expect(control.count).to eq 12
expect do
filter("#{link} #{link2}", context)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
@@ -419,15 +419,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
- end.count
+ end
- expect(control_count).to eq 10
+ expect(control.count).to eq 10
expect do
filter("#{link} #{link2}", context)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/markdown_engines/base_spec.rb b/spec/lib/banzai/filter/markdown_engines/base_spec.rb
index e7b32876610..3114f367fac 100644
--- a/spec/lib/banzai/filter/markdown_engines/base_spec.rb
+++ b/spec/lib/banzai/filter/markdown_engines/base_spec.rb
@@ -14,4 +14,10 @@ RSpec.describe Banzai::Filter::MarkdownEngines::Base, feature_category: :team_pl
expect(engine.send(:sourcepos_disabled?)).to be_truthy
end
+
+ it 'accepts a nil context' do
+ engine = described_class.new(nil)
+
+ expect(engine.context).to eq({})
+ end
end
diff --git a/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb b/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb
new file mode 100644
index 00000000000..da58b824a06
--- /dev/null
+++ b/spec/lib/banzai/filter/markdown_engines/glfm_markdown_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::MarkdownEngines::GlfmMarkdown, feature_category: :team_planning do
+ it 'defaults to generating sourcepos' do
+ engine = described_class.new({})
+
+ expect(engine.render('# hi')).to eq %(<h1 data-sourcepos="1:1-1:4">hi</h1>\n)
+ end
+
+ it 'turns off sourcepos' do
+ engine = described_class.new({ no_sourcepos: true })
+
+ expect(engine.render('# hi')).to eq %(<h1>hi</h1>\n)
+ end
+end
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index b4fb715b8f0..30869d67661 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -8,18 +8,30 @@ RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning
describe 'markdown engine from context' do
it 'finds the correct engine' do
- expect(described_class.render_engine(:common_mark)).to eq Banzai::Filter::MarkdownEngines::CommonMark
+ expect(described_class.new('foo', { markdown_engine: :common_mark }).render_engine)
+ .to eq Banzai::Filter::MarkdownEngines::CommonMark
end
- it 'defaults to the DEFAULT_ENGINE' do
- default_engine = Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE.to_s.classify
- default = "Banzai::Filter::MarkdownEngines::#{default_engine}".constantize
+ it 'defaults to the RUST_ENGINE' do
+ default_engine = Banzai::Filter::MarkdownFilter::RUST_ENGINE.to_s.classify
+ engine = "Banzai::Filter::MarkdownEngines::#{default_engine}".constantize
- expect(described_class.render_engine(nil)).to eq default
+ expect(described_class.new('foo', {}).render_engine).to eq engine
+ end
+
+ context 'when :markdown_rust feature flag is turned off' do
+ it 'defaults to the RUBY_ENGINE' do
+ stub_feature_flags(markdown_rust: false)
+
+ ruby_engine = Banzai::Filter::MarkdownFilter::RUBY_ENGINE.to_s.classify
+ engine = "Banzai::Filter::MarkdownEngines::#{ruby_engine}".constantize
+
+ expect(described_class.new('foo', {}).render_engine).to eq engine
+ end
end
it 'raise error for unrecognized engines' do
- expect { described_class.render_engine(:foo_bar) }.to raise_error(NameError)
+ expect { described_class.new('foo', { markdown_engine: :foo_bar }).render_engine }.to raise_error(NameError)
end
end
diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index 9a2e68aaae0..0bdd64c360d 100644
--- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -230,11 +230,11 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = alert_reference.to_s
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 1
+ expect(control.count).to eq 1
markdown = "#{alert_reference} ^alert#2 ^alert#3 ^alert#4 #{alert2_reference}"
@@ -248,11 +248,9 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
# 1x2 for alerts in each project
# Total == 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count += 6
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end
diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 35a3f20f7b7..730554857df 100644
--- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -283,11 +283,11 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = commit_reference.to_s
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 0
+ expect(control.count).to eq 0
markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
@@ -298,11 +298,9 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
# 1 for loading the namespaces associated to the project
# 1 for loading the routes associated with the namespace
# Total = 5
- max_count += 5
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(5)
end
end
end
diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
index fd03d7c0d27..678d6619101 100644
--- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
@@ -240,7 +240,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
* #1[not a valid reference.gif]
MD
- baseline = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
+ control = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
# each project mentioned requires 2 queries:
#
@@ -253,7 +253,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
# In addition there is a 1 query overhead for all the projects at the
# start. Currently, the baseline for 2 projects is `2 * 2 + 1 = 5` queries
#
- expect { process(multiple_references) }.not_to exceed_query_limit(baseline.count)
+ expect { process(multiple_references) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 823f006c98a..acc59c85cbf 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -338,9 +338,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
single_reference = "External Issue #{issue1.to_reference}"
multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index d16188e99a3..fd947e3e9cb 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -41,9 +41,9 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
single_reference = "Issue #{issue.to_reference}"
multiple_references = "Issues #{issue.to_reference} and #{another_issue.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index 81b08a4c516..bcc256813c9 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -35,13 +35,13 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
# Run this once to establish a baseline
reference_filter("Label #{reference}")
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter("Label #{reference}")
end
labels_markdown = Array.new(10, "Label #{reference}").join('\n')
- expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control_count.count)
+ expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control)
end
it 'includes a data-project attribute' do
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index ccc8478c7d8..e3036993f7b 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -26,9 +26,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
single_reference = "Merge request #{merge.to_reference}"
multiple_references = "Merge requests #{merge.to_reference} and #{another_merge.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index c55fff78756..12af94507b6 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -115,17 +115,17 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
# warm up first
reference_filter(markdown)
- max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(max_count).to eq 2
+ expect(control.count).to eq 2
markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
index 04877931610..b4d9a08e4c6 100644
--- a/spec/lib/banzai/filter/references/reference_cache_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -70,13 +70,13 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
filter_single = filter_class.new(doc_single, project: project)
cache_single = described_class.new(filter_single, { project: project }, {})
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
cache_single.load_references_per_parent(filter_single.nodes)
cache_single.load_parent_per_reference
cache_single.load_records_per_parent
- end.count
+ end
- expect(control_count).to eq 3
+ expect(control.count).to eq 3
# Since this is an issue filter that is not batching issue queries
# across projects, we have to account for that.
# 1 for for routes to find routes.source_id of projects matching paths
@@ -88,13 +88,11 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
# 1x2 for groups
# 1x2 for work_item_types
# Total = 11
- max_count = control_count + 8
-
expect do
cache.load_references_per_parent(filter.nodes)
cache.load_parent_per_reference
cache.load_records_per_parent
- end.not_to exceed_query_limit(max_count)
+ end.not_to exceed_query_limit(control).with_threshold(8)
end
end
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index 00eac7262f4..51c5551dda8 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -229,11 +229,11 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = "#{reference} $9999990"
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
- end.count
+ end
- expect(control_count).to eq 1
+ expect(control.count).to eq 1
markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12"
@@ -247,11 +247,9 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
# 1x2 for snippets in each project == 2
# Total = 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
- max_count = control_count + 6
-
expect do
reference_filter(markdown)
- end.not_to exceed_all_query_limit(max_count)
+ end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end
diff --git a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
index e59e53891bf..cf245ccc72a 100644
--- a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb
@@ -306,9 +306,9 @@ RSpec.describe Banzai::Filter::References::WorkItemReferenceFilter, feature_cate
single_reference = "Work item #{work_item.to_reference}"
multiple_references = "Work items #{work_item.to_reference} and #{another_work_item.to_reference}"
- control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
+ control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
- expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
+ expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb
index 5bbd98592e7..fe1a2bd9a2e 100644
--- a/spec/lib/banzai/issuable_extractor_spec.rb
+++ b/spec/lib/banzai/issuable_extractor_spec.rb
@@ -45,9 +45,9 @@ RSpec.describe Banzai::IssuableExtractor, feature_category: :team_planning do
second_call_queries = ActiveRecord::QueryRecorder.new do
extractor.extract([issue_link, work_item_link, merge_request_link])
- end.count
+ end
- expect(second_call_queries).to eq 0
+ expect(second_call_queries.count).to eq 0
end
end
end
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index a845e4fa7f4..bb6d4eeefbc 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -167,6 +167,21 @@ RSpec.describe Banzai::Pipeline::GfmPipeline, feature_category: :team_planning d
end
end
+ context 'when label reference is similar to a commit SHA' do
+ let(:numeric_commit_sha) { '8634272' }
+ let(:project) { create(:project, :repository) }
+ let(:label) { create(:label, project: project, id: numeric_commit_sha) }
+
+ it 'renders a label reference' do
+ expect(project.commit_by(oid: numeric_commit_sha)).to be_present
+
+ output = described_class.to_html(label.to_reference(format: :id), project: project)
+
+ expect(output).to include(label.name)
+ expect(output).to include(Gitlab::Routing.url_helpers.project_issues_path(project, label_name: label.name))
+ end
+ end
+
describe 'asset proxy' do
let(:project) { create(:project, :public) }
let(:image) { '![proxy](http://example.com/test.png)' }
diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
index 8f4148be2dc..0f3834c2dc8 100644
--- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
@@ -37,11 +37,11 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser, feature_category: :team_p
# Run this once to establish a baseline
visible_references(:public)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
subject.nodes_visible_to_user(user, [link])
end
- expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control_count.count)
+ expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control)
end
it 'creates a reference for guest for a public snippet' do
diff --git a/spec/lib/click_house/iterator_spec.rb b/spec/lib/click_house/iterator_spec.rb
index fd054c0afe5..962ccc6d884 100644
--- a/spec/lib/click_house/iterator_spec.rb
+++ b/spec/lib/click_house/iterator_spec.rb
@@ -29,6 +29,16 @@ RSpec.describe ClickHouse::Iterator, :click_house, feature_category: :database d
expect(collect_ids_with_batch_size(15)).to match_array(expected_values)
end
+ context 'when min value is given' do
+ let(:iterator) { described_class.new(query_builder: query_builder, connection: connection, min_value: 5) }
+
+ it 'iterates from the given min value' do
+ expected_values = (5..10).to_a
+
+ expect(collect_ids_with_batch_size(5)).to match_array(expected_values)
+ end
+ end
+
context 'when there are no records for the given query' do
let(:query_builder) do
ClickHouse::QueryBuilder
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index 3c87af3a1c8..e13f639f048 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -256,6 +256,23 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
it { is_expected.to eq(expected) }
end
+ context 'with referrers included' do
+ subject { client.tags(path, page_size: page_size, referrers: true) }
+
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, input: { referrers: 'true' }, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
context 'with a response with a link header containing next page' do
let(:expected) do
{
@@ -961,7 +978,8 @@ RSpec.describe ContainerRegistry::GitlabApiClient, feature_category: :container_
last: input[:last],
name: input[:name],
sort: input[:sort],
- before: input[:before]
+ before: input[:before],
+ referrers: input[:referrers]
}.compact
url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/tags/list/"
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index 8f9308f2127..42191cb121c 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -336,6 +336,31 @@ RSpec.describe ContainerRegistry::Tag, feature_category: :container_registry do
it { is_expected.to eq(nil) }
end
end
+
+ describe 'published_at=' do
+ subject do
+ tag.published_at = input
+ tag.published_at
+ end
+
+ context 'with a valid input' do
+ let(:input) { 2.days.ago.iso8601 }
+
+ it { is_expected.to eq(DateTime.iso8601(input)) }
+ end
+
+ context 'with a nil input' do
+ let(:input) { nil }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with an invalid input' do
+ let(:input) { 'not a timestamp' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
end
end
end
diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb
index 595725d357c..b75c780a33e 100644
--- a/spec/lib/feature/definition_spec.rb
+++ b/spec/lib/feature/definition_spec.rb
@@ -30,11 +30,11 @@ RSpec.describe Feature::Definition do
:name | 'ALL_CAPS' | /Feature flag 'ALL_CAPS' is invalid/
:name | nil | /Feature flag is missing name/
:path | nil | /Feature flag 'feature_flag' is missing path/
- :type | nil | /Feature flag 'feature_flag' is missing type/
+ :type | nil | /Feature flag 'feature_flag' is missing `type`/
:type | 'invalid' | /Feature flag 'feature_flag' type 'invalid' is invalid/
:path | 'development/invalid.yml' | /Feature flag 'feature_flag' has an invalid path/
- :path | 'invalid/feature_flag.yml' | /Feature flag 'feature_flag' has an invalid type/
- :default_enabled | nil | /Feature flag 'feature_flag' is missing default_enabled/
+ :path | 'invalid/feature_flag.yml' | /Feature flag 'feature_flag' has an invalid path/
+ :default_enabled | nil | /Feature flag 'feature_flag' is missing `default_enabled`/
end
with_them do
diff --git a/spec/lib/gitlab/application_setting_fetcher_spec.rb b/spec/lib/gitlab/application_setting_fetcher_spec.rb
new file mode 100644
index 00000000000..0225a7608cb
--- /dev/null
+++ b/spec/lib/gitlab/application_setting_fetcher_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ApplicationSettingFetcher, feature_category: :cell do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+
+ described_class.clear_in_memory_application_settings!
+ end
+
+ describe '.clear_in_memory_application_settings!' do
+ subject(:clear_in_memory_application_settings!) { described_class.clear_in_memory_application_settings! }
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+
+ described_class.current_application_settings
+ end
+
+ it 'will re-initialize settings' do
+ expect(ApplicationSetting).to receive(:build_from_defaults).and_call_original
+
+ clear_in_memory_application_settings!
+ described_class.current_application_settings
+ end
+ end
+
+ describe '.current_application_settings' do
+ subject(:current_application_settings) { described_class.current_application_settings }
+
+ context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
+ end
+
+ it 'returns an in-memory ApplicationSetting object' do
+ expect(ApplicationSetting).not_to receive(:current)
+ expect(ApplicationSetting).to receive(:build_from_defaults).and_call_original
+
+ expect(current_application_settings).to be_a(ApplicationSetting)
+ expect(current_application_settings).not_to be_persisted
+ end
+ end
+
+ context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is false' do
+ let_it_be(:settings) { create(:application_setting) }
+
+ context 'and an error is raised' do
+ before do
+ # The cached method is called twice:
+ # - ApplicationSettingFetcher
+ # - ApplicationSetting (CachedAttribute module)
+ # For this test, the first needs to raise an exception
+ # The second is swallowed on production so that should not raise an exception
+ # So we only let the first call raise an exception
+ # Alternatively, we could mock Rails.env.production? but I prefer not to
+ raise_exception = true
+ allow(ApplicationSetting).to receive(:cached).twice do
+ if raise_exception
+ raise_exception = false
+ raise(StandardError)
+ else
+ ApplicationSetting.last
+ end
+ end
+ end
+
+ it 'will retrieve uncached ApplicationSetting' do
+ expect(ApplicationSetting).to receive(:current).and_call_original
+
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'and settings in cache' do
+ before do
+ # Warm the cache
+ ApplicationSetting.current
+ end
+
+ it 'fetches the settings from cache' do
+ expect(::ApplicationSetting).to receive(:cached).and_call_original
+
+ expect(ActiveRecord::QueryRecorder.new { current_application_settings }.count).to eq(0)
+ end
+ end
+
+ context 'and settings are not in cache' do
+ before do
+ allow(ApplicationSetting).to receive(:cached).and_return(nil)
+ end
+
+ context 'and we are running a Rake task' do
+ before do
+ allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
+ end
+
+ context 'and database does not exist' do
+ before do
+ allow(::ApplicationSetting.database)
+ .to receive(:cached_table_exists?).and_raise(ActiveRecord::NoDatabaseError)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and database connection is not active' do
+ before do
+ allow(::ApplicationSetting.connection).to receive(:active?).and_return(false)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and table does not exist' do
+ before do
+ allow(::ApplicationSetting.database).to receive(:cached_table_exists?).and_return(false)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and database connection raises some error' do
+ before do
+ allow(::ApplicationSetting.connection).to receive(:active?).and_raise(StandardError)
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+ end
+
+ context 'and there are pending database migrations' do
+ before do
+ allow_next_instance_of(ActiveRecord::MigrationContext) do |migration_context|
+ allow(migration_context).to receive(:needs_migration?).and_return(true)
+ end
+ end
+
+ it 'uses Gitlab::FakeApplicationSettings' do
+ expect(current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
+ end
+
+ context 'when a new setting is used but the migration did not run yet' do
+ let(:default_attributes) { { new_column: 'some_value' } }
+
+ before do
+ allow(ApplicationSetting).to receive(:defaults).and_return(default_attributes)
+ end
+
+ it 'uses the default value if present' do
+ expect(current_application_settings.new_column).to eq(
+ default_attributes[:new_column]
+ )
+ end
+ end
+ end
+ end
+
+ context 'and settings are in database' do
+ it 'returns settings from database' do
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'and settings are not in the database' do
+ before do
+ allow(ApplicationSetting).to receive(:current).and_return(nil)
+ end
+
+ it 'returns default settings' do
+ expect(ApplicationSetting).to receive(:create_from_defaults).and_call_original
+
+ expect(current_application_settings).to eq(settings)
+ end
+ end
+
+ context 'when we hit a recursive loop' do
+ before do
+ allow(ApplicationSetting).to receive(:current).and_raise(ApplicationSetting::Recursion)
+ end
+
+ it 'recovers and returns in-memory settings' do
+ settings = described_class.current_application_settings
+
+ expect(settings).to be_a(ApplicationSetting)
+ expect(settings).not_to be_persisted
+ end
+ end
+ end
+ end
+ end
+
+ describe '.expire_current_application_settings' do
+ subject(:expire) { described_class.expire_current_application_settings }
+
+ it 'expires ApplicationSetting' do
+ expect(ApplicationSetting).to receive(:expire)
+
+ expire
+ end
+ end
+
+ describe '.current_application_settings?' do
+ subject(:settings?) { described_class.current_application_settings? }
+
+ context 'when settings exist' do
+ let_it_be(:settings) { create(:application_setting) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when settings do not exist' do
+ it { is_expected.to be(false) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
index e0ef45d5621..bccddaa50a0 100644
--- a/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
+++ b/spec/lib/gitlab/auth/two_factor_auth_verifier_spec.rb
@@ -169,4 +169,33 @@ RSpec.describe Gitlab::Auth::TwoFactorAuthVerifier do
end
end
end
+
+ describe '#two_factor_authentication_reason?' do
+ it 'returns false if two factor authentication is not required' do
+ allow(user).to receive(:require_two_factor_authentication?).and_return(false)
+
+ expect(subject.two_factor_authentication_reason).to be_falsey
+ end
+
+ it 'returns :global if two factor authentication is enabled globally' do
+ stub_application_setting require_two_factor_authentication: true
+
+ expect(subject.two_factor_authentication_reason).to eq(:global)
+ end
+
+ it 'returns :admin_2fa if the current user is an admin and two factor is enabled' do
+ stub_application_setting require_admin_two_factor_authentication: true
+
+ allow(user).to receive(:admin?).and_return(true)
+
+ expect(subject.two_factor_authentication_reason).to eq(:admin_2fa)
+ end
+
+ it 'returns :group if two factor authentication is enforced through a group setting' do
+ stub_application_setting require_two_factor_authentication: false
+ allow(user).to receive(:require_two_factor_authentication_from_group?).and_return(true)
+
+ expect(subject.two_factor_authentication_reason).to eq(:group)
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 9974e24ad50..fd51ebbc8fa 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'ADMIN_SCOPES contains all scopes for ADMIN access' do
- expect(subject::ADMIN_SCOPES).to match_array %i[sudo admin_mode]
+ expect(subject::ADMIN_SCOPES).to match_array %i[sudo admin_mode read_service_ping]
end
it 'REPOSITORY_SCOPES contains all scopes for REPOSITORY access' do
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'contains all non-default scopes' do
- expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
+ expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository read_service_ping write_repository read_registry write_registry sudo admin_mode read_observability write_observability create_runner k8s_proxy ai_features]
end
it 'contains for non-admin user all non-default scopes without ADMIN access and without observability scopes' do
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository read_service_ping write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes' do
@@ -72,7 +72,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
end
it 'optional_scopes contains all non-default scopes' do
- expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
+ expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry read_service_ping write_registry sudo admin_mode openid profile email read_observability write_observability create_runner k8s_proxy ai_features]
end
context 'with observability_tracing feature flag' do
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
it 'contains for admin user all non-default scopes with ADMIN access and without observability scopes' do
user = build_stubbed(:user, admin: true)
- expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode create_runner k8s_proxy ai_features]
+ expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry read_service_ping sudo admin_mode create_runner k8s_proxy ai_features]
end
it 'contains for project all resource bot scopes including observability scopes' do
diff --git a/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
new file mode 100644
index 00000000000..ea5c7086ac2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_issue_search_data_namespace_id_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIssueSearchDataNamespaceId,
+ schema: 20240105144908, feature_category: :team_planning do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:issue_search_data) { table(:issue_search_data) }
+ let(:issue_type) { table(:work_item_types).find_by!(namespace_id: nil, base_type: 0) }
+
+ let(:namespace_1) { namespaces.create!(name: 'namespace1', type: 'Group', path: 'namespace1') }
+ let(:namespace_2) { namespaces.create!(name: 'namespace2', type: 'Group', path: 'namespace2') }
+
+ let(:proj_ns_1) { namespaces.create!(name: 'pn1', path: 'pn1', type: 'Project', parent_id: namespace_1.id) }
+ let(:proj_ns_2) { namespaces.create!(name: 'pn2', path: 'pn2', type: 'Project', parent_id: namespace_2.id) }
+
+ let(:proj_1) do
+ projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace_1.id, project_namespace_id: proj_ns_1.id)
+ end
+
+ let(:proj_2) do
+ projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace_2.id, project_namespace_id: proj_ns_2.id)
+ end
+
+ let(:proj_1_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_1_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_1.id, namespace_id: proj_ns_1.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_1) do
+ issues.create!(title: 'issue1', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let(:proj_2_issue_2) do
+ issues.create!(title: 'issue2', project_id: proj_2.id, namespace_id: proj_ns_2.id, work_item_type_id: issue_type.id)
+ end
+
+ let!(:proj_1_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_1.id)
+ end
+
+ let!(:proj_1_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_1.id, issue_id: proj_1_issue_2.id)
+ end
+
+ let!(:proj_2_issue_1_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_1.id)
+ end
+
+ let!(:proj_2_issue_2_search_data) do
+ issue_search_data.create!(namespace_id: nil, project_id: proj_2.id, issue_id: proj_2_issue_2.id)
+ end
+
+ let(:migration) do
+ described_class.new(
+ start_id: proj_1_issue_1.id,
+ end_id: proj_2_issue_2.id,
+ batch_table: :issues,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 2,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ it 'backfills namespace_id for the specified records' do
+ migration.perform
+
+ [proj_1_issue_1, proj_1_issue_2, proj_2_issue_1, proj_2_issue_2].each do |issue|
+ expect(issue_search_data.find_by_issue_id(issue.id).namespace_id).to eq(issue.namespace_id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..1462848845e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillOwaspTopTenOfVulnerabilityReads,
+ feature_category: :vulnerability_management do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_occurrence_identifiers) { table(:vulnerability_occurrence_identifiers) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+
+ shared_context 'with vulnerability data' do
+ let(:external_id) { '' }
+ let(:external_type) { '' }
+ let(:identifier_name) { '' }
+
+ let(:vulnerability_1) { create_vulnerability(title: 'vulnerability 1') }
+ let(:vulnerability_2) { create_vulnerability(title: 'vulnerability 2') }
+ let(:vulnerability_3) { create_vulnerability(title: 'vulnerability 3') }
+
+ let(:vuln_identifier) do
+ create_identifier(external_id: external_id, external_type: external_type, name: identifier_name)
+ end
+
+ let(:vuln_finding) do
+ create_finding(vulnerability_id: vulnerability_1.id, primary_identifier_id: vuln_identifier.id)
+ end
+
+ let!(:vulnerability_read_1) { create_vulnerability_read(vulnerability_id: vulnerability_1.id) }
+ let!(:vulnerability_read_2) { create_vulnerability_read(vulnerability_id: vulnerability_2.id) }
+ let!(:vulnerability_read_3) { create_vulnerability_read(vulnerability_id: vulnerability_3.id) }
+
+ before do
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding.id, identifier_id: vuln_identifier.id)
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform_migration) do
+ described_class.new(
+ start_id: vulnerability_reads.first.vulnerability_id,
+ end_id: vulnerability_reads.last.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: vulnerability_reads.count,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ ).perform
+ end
+
+ context 'with owasp top 10 data' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'A1:2017-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+ end
+
+ it 'updates vulnerability_reads' do
+ expect { perform_migration }.to change { vulnerability_read_1.reload.owasp_top_10 }
+ .from(nil).to(1)
+ .and not_change { vulnerability_read_2.reload.owasp_top_10 }.from(nil)
+ end
+
+ it 'updates vulnerability_reads with correct mapping' do
+ vuln_identifier_2 = create_identifier(external_id: 'A1:2021', external_type: 'owasp', name: 'A1 2021')
+ vuln_identifier_3 = create_identifier
+ vuln_finding_2 = create_finding(vulnerability_id: vulnerability_2.id,
+ primary_identifier_id: vuln_identifier_2.id)
+ vuln_finding_3 = create_finding(vulnerability_id: vulnerability_3.id,
+ primary_identifier_id: vuln_identifier_3.id)
+
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_2.id,
+ identifier_id: vuln_identifier_2.id)
+ create_vulnerability_occurrence_identifier(occurrence_id: vuln_finding_3.id,
+ identifier_id: vuln_identifier_3.id)
+
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to eq(1)
+ expect(vulnerability_read_2.reload.owasp_top_10).to eq(11)
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect owasp top 10 data' do
+ include_context 'with vulnerability data'
+
+ shared_examples 'does not update vulnerability_reads' do
+ it do
+ perform_migration
+
+ expect(vulnerability_read_1.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_2.reload.owasp_top_10).to be_nil
+ expect(vulnerability_read_3.reload.owasp_top_10).to be_nil
+ end
+ end
+
+ context 'with incorrect long format external_id' do
+ let(:external_id) { 'A1:2015-Injection' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect short format external_id' do
+ let(:external_id) { 'A1' }
+ let(:external_type) { 'owasp' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+
+ context 'with incorrect external_type' do
+ let(:external_id) { 'A1:2017' }
+ let(:external_type) { 'owasp2017' }
+ let(:identifier_name) { 'Injection' }
+
+ it_behaves_like 'does not update vulnerability_reads'
+ end
+ end
+
+ context 'with no vulnerability identifiers match' do
+ include_context 'with vulnerability data' do
+ let(:external_id) { 'CVE-2018-1234' }
+ let(:external_type) { 'CVE' }
+ let(:identifier_name) { 'CVE-2018-1234' }
+ end
+
+ it 'does not update vulnerability_reads' do
+ perform_migration
+
+ expect(vulnerability_reads.where.not(owasp_top_10: nil).count).to eq(0)
+ end
+ end
+ end
+
+ private
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_vulnerability_read(overrides = {})
+ attrs = {
+ project_id: project.id,
+ vulnerability_id: 1,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ }.merge(overrides)
+
+ vulnerability_reads.create!(attrs)
+ end
+
+ def create_finding(overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: scanner.id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier.id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_identifier(overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_vulnerability_occurrence_identifier(overrides = {})
+ time = Time.now.utc
+
+ attrs = {
+ created_at: time,
+ updated_at: time,
+ occurrence_id: nil,
+ identifier_id: nil
+ }.merge(overrides)
+
+ vulnerability_occurrence_identifiers.create!(attrs)
+ end
+
+ def checksum(value)
+ sha = Digest::SHA256.hexdigest(value)
+ Gitlab::Database::ShaAttribute.new.serialize(sha)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
new file mode 100644
index 00000000000..c466fdaa36a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_artifact_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineArtifact,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_artifacts_table) { table(:ci_pipeline_artifacts, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_artifact_100) do
+ ci_pipeline_artifacts_table.create!(
+ id: 1,
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_artifact_101) do
+ ci_pipeline_artifacts_table.create!(
+ id: 2,
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_artifact) do
+ ci_pipeline_artifacts_table.create!(
+ id: 3,
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ size: 1.megabyte,
+ file_type: 1,
+ file_format: 1,
+ file: fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json'
+ ),
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_artifacts_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_artifacts_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_artifacts,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_artifact.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_artifact_100.reload.partition_id }
+ .and not_change { ci_pipeline_artifact_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_artifact.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..ad1900ab6a6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineChatData,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_chat_data_table) { table(:ci_pipeline_chat_data, database: :ci) }
+ let!(:pipeline1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:invalid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 1,
+ pipeline_id: pipeline1.id,
+ chat_name_id: 1,
+ response_url: '',
+ partition_id: pipeline1.partition_id
+ )
+ end
+
+ let!(:valid_ci_pipeline_chat_data) do
+ ci_pipeline_chat_data_table.create!(
+ id: 2,
+ pipeline_id: pipeline2.id,
+ chat_name_id: 2,
+ response_url: '',
+ partition_id: pipeline2.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_chat_data_table.minimum(:id),
+ end_id: ci_pipeline_chat_data_table.maximum(:id),
+ batch_table: :ci_pipeline_chat_data,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ pipeline1.update!(partition_id: 101)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to change { invalid_ci_pipeline_chat_data.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..fad3e277888
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineConfig,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_config_table) { table(:ci_pipelines_config, database: :ci) }
+ let!(:pipeline_1) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_2) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_3) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_config_100) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_1.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_config_101) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_2.id,
+ content: "content",
+ partition_id: pipeline_2.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_config) do
+ ci_pipeline_config_table.create!(
+ pipeline_id: pipeline_3.id,
+ content: "content",
+ partition_id: pipeline_1.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_config_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_config_table.maximum(:pipeline_id),
+ batch_table: :ci_pipelines_config,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ before do
+ pipeline_3.update!(partition_id: 100)
+ end
+
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_config.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_config_100.reload.partition_id }
+ .and not_change { ci_pipeline_config_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_config.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..d09d5016dcc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_partition_id_ci_pipeline_metadata_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineMetadata,
+ feature_category: :continuous_integration do
+ let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
+ let(:ci_pipeline_metadata_table) { table(:ci_pipeline_metadata, database: :ci) }
+ let!(:pipeline_100) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
+ let!(:pipeline_101) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
+ let!(:pipeline_102) { ci_pipelines_table.create!(id: 3, partition_id: 101) }
+ let!(:ci_pipeline_metadata_100) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_100.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let!(:ci_pipeline_metadata_101) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_101.id,
+ project_id: 1,
+ partition_id: pipeline_101.partition_id
+ )
+ end
+
+ let!(:invalid_ci_pipeline_metadata) do
+ ci_pipeline_metadata_table.create!(
+ pipeline_id: pipeline_102.id,
+ project_id: 1,
+ partition_id: pipeline_100.partition_id
+ )
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: ci_pipeline_metadata_table.minimum(:pipeline_id),
+ end_id: ci_pipeline_metadata_table.maximum(:pipeline_id),
+ batch_table: :ci_pipeline_metadata,
+ batch_column: :pipeline_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ describe '#perform' do
+ context 'when second partition does not exist' do
+ it 'does not execute the migration' do
+ expect { migration.perform }
+ .not_to change { invalid_ci_pipeline_metadata.reload.partition_id }
+ end
+ end
+
+ context 'when second partition exists' do
+ before do
+ allow(migration).to receive(:uses_multiple_partitions?).and_return(true)
+ end
+
+ it 'fixes invalid records in the wrong the partition' do
+ expect { migration.perform }
+ .to not_change { ci_pipeline_metadata_100.reload.partition_id }
+ .and not_change { ci_pipeline_metadata_101.reload.partition_id }
+ .and change { invalid_ci_pipeline_metadata.reload.partition_id }
+ .from(100)
+ .to(101)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..725cd7f4bca
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVsCodeSettingsVersion, schema: 20231212135235, feature_category: :web_ide do
+ let(:vs_code_settings) { table(:vs_code_settings) }
+
+ let(:users) { table(:users) }
+
+ let(:user) do
+ users.create!(
+ email: "test1@example.com",
+ username: "test1",
+ notification_email: "test@example.com",
+ name: "test",
+ state: "active",
+ projects_limit: 10)
+ end
+
+ let(:persistent_settings) { VsCode::Settings::SETTINGS_TYPES.filter { |type| type != 'machines' } }
+
+ subject(:migration) do
+ described_class.new(
+ start_id: vs_code_settings.first.id,
+ end_id: vs_code_settings.last.id,
+ batch_table: :vs_code_settings,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection
+ )
+ end
+
+ describe "#perform" do
+ context 'when it finds vs_code_setting rows with version that is nil or zero' do
+ let(:settings) do
+ persistent_settings.each_with_index.map do |type, index|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: index.odd? ? nil : 0)
+ end
+ end
+
+ it 'sets version field with default value for setting type' do
+ settings.each do |setting|
+ expect(setting.version).to eq(nil).or eq(0)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version)
+ .to eq(described_class::VsCodeSetting::DEFAULT_SETTING_VERSIONS[setting.setting_type])
+ end
+ end
+ end
+
+ context 'when it finds vs_code_setting rows with version that is not nil or zero' do
+ let(:settings) do
+ persistent_settings.map do |type|
+ vs_code_settings.create!(user_id: user.id,
+ setting_type: type,
+ content: '{}',
+ uuid: SecureRandom.uuid,
+ version: 1)
+ end
+ end
+
+ it 'does not set version field' do
+ settings.each do |setting|
+ expect(setting.version).to eq(1)
+ end
+
+ migration.perform
+
+ settings.each do |setting|
+ expect(setting.reload.version).to eq(1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
new file mode 100644
index 00000000000..05817001395
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_vulnerabilities_without_finding_id_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropVulnerabilitiesWithoutFindingId, feature_category: :vulnerability_management do # rubocop:disable Layout/LineLength -- autogenerated
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:members) { table(:members) }
+ let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let(:vulnerability_scanners) { table(:vulnerability_scanners) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let!(:user) { create_user(email: "test1@example.com", username: "test1") }
+ let!(:namespace) { namespaces.create!(name: "test-1", path: "test-1", owner_id: user.id) }
+ let!(:project) do
+ projects.create!(
+ id: 9999, namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ creator_id: user.id
+ )
+ end
+
+ let!(:membership) do
+ members.create!(access_level: 50, source_id: project.id, source_type: "Project", user_id: user.id, state: 0,
+ notification_level: 3, type: "ProjectMember", member_namespace_id: namespace.id)
+ end
+
+ let(:migration_attrs) do
+ {
+ start_id: vulnerabilities.first.id,
+ end_id: vulnerabilities.last.id,
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ describe "#perform" do
+ subject(:background_migration) { described_class.new(**migration_attrs).perform }
+
+ let!(:vulnerability_without_finding_id) { create_vulnerability }
+
+ let!(:vulnerabilities_finding) { create_finding(project) }
+ let!(:vulnerability_with_finding_id) { create_vulnerability(finding_id: vulnerabilities_finding.id) }
+
+ it 'removes all Vulnerabilities without a finding_id' do
+ expect { background_migration }.to change { vulnerabilities.count }.from(2).to(1)
+ end
+ end
+
+ private
+
+ def create_scanner(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "test_vulnerability_scanner",
+ name: "Test Vulnerabilities::Scanner"
+ }.merge(overrides)
+
+ vulnerability_scanners.create!(attrs)
+ end
+
+ def create_identifier(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ external_id: "CVE-2018-1234",
+ external_type: "CVE",
+ name: "CVE-2018-1234",
+ fingerprint: SecureRandom.hex(20)
+ }.merge(overrides)
+
+ vulnerability_identifiers.create!(attrs)
+ end
+
+ def create_finding(project, overrides = {})
+ attrs = {
+ project_id: project.id,
+ scanner_id: create_scanner(project).id,
+ severity: 5, # medium
+ confidence: 2, # unknown,
+ report_type: 99, # generic
+ primary_identifier_id: create_identifier(project).id,
+ project_fingerprint: SecureRandom.hex(20),
+ location_fingerprint: SecureRandom.hex(20),
+ uuid: SecureRandom.uuid,
+ name: "CVE-2018-1234",
+ raw_metadata: "{}",
+ metadata_version: "test:1.0"
+ }.merge(overrides)
+
+ vulnerability_findings.create!(attrs)
+ end
+
+ def create_vulnerability(overrides = {})
+ attrs = {
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1,
+ state: 1,
+ detected_at: Time.zone.now
+ }.merge(overrides)
+
+ vulnerabilities.create!(attrs)
+ end
+
+ def create_user(overrides = {})
+ attrs = {
+ email: "test@example.com",
+ notification_email: "test@example.com",
+ name: "test",
+ username: "test",
+ state: "active",
+ projects_limit: 10
+ }.merge(overrides)
+
+ users.create!(attrs)
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
index 1ae68f9efb8..eeb2f9c8000 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_importer_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
it 'imports the merge request correctly' do
expect_next(Gitlab::Import::MergeRequestCreator, project).to receive(:execute).and_call_original
expect_next(Gitlab::BitbucketServerImport::UserFinder, project).to receive(:author_id).and_call_original
+ expect_next(Gitlab::BitbucketServerImport::MentionsConverter, project.id).to receive(:convert).and_call_original
+
expect { importer.execute }.to change { MergeRequest.count }.by(1)
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
@@ -34,6 +36,18 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
)
end
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect_next(Gitlab::BitbucketServerImport::MentionsConverter, project.id).not_to receive(:convert)
+
+ importer.execute
+ end
+ end
+
context 'when the `bitbucket_server_user_mapping_by_username` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
index 914ebefdb8f..7b662c1a2c7 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/pull_request_notes_importer_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let_it_be(:pull_request_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
+ let(:mentions_converter) { Gitlab::BitbucketServerImport::MentionsConverter.new(project) }
let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
@@ -79,6 +80,10 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
.to receive(:info).with(include(import_stage: stage, message: message))
end
+ before do
+ allow(Gitlab::BitbucketServerImport::MentionsConverter).to receive(:new).and_return(mentions_converter)
+ end
+
subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
describe '#execute' do
@@ -113,6 +118,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
it 'imports the stand alone comments' do
+ expect(mentions_converter).to receive(:convert).and_call_original
+
expect { subject.execute }.to change { Note.count }.by(1)
expect(merge_request.notes.count).to eq(1)
@@ -124,6 +131,66 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
)
end
+ context 'when the author is not found' do
+ before do
+ allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
+ allow(user_finder).to receive(:uid).and_return(nil)
+ end
+ end
+
+ it 'adds a note with the author username and email' do
+ subject.execute
+
+ expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
+ end
+ end
+
+ context 'when the note has a parent note' do
+ let(:pr_note) do
+ instance_double(
+ BitbucketServer::Representation::Comment,
+ note: 'Note',
+ author_email: note_author.email,
+ author_username: note_author.username,
+ comments: [],
+ created_at: now,
+ updated_at: now,
+ parent_comment: pr_parent_note
+ )
+ end
+
+ let(:pr_parent_note) do
+ instance_double(
+ BitbucketServer::Representation::Comment,
+ note: 'Parent note',
+ author_email: note_author.email,
+ author_username: note_author.username,
+ comments: [],
+ created_at: now,
+ updated_at: now,
+ parent_comment: nil
+ )
+ end
+
+ it 'adds the parent note before the actual note' do
+ subject.execute
+
+ expect(Note.first.note).to include("> #{pr_parent_note.note}\n\n")
+ end
+ end
+
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect(mentions_converter).not_to receive(:convert)
+
+ subject.execute
+ end
+ end
+
it 'logs its progress' do
expect_log(stage: 'import_standalone_pr_comments', message: 'starting')
expect_log(stage: 'import_standalone_pr_comments', message: 'finished')
@@ -181,6 +248,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
it 'imports the threaded discussion' do
+ expect(mentions_converter).to receive(:convert).and_call_original.twice
+
expect { subject.execute }.to change { Note.count }.by(2)
expect(merge_request.discussions.count).to eq(1)
@@ -204,6 +273,18 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(reply_note.position.new_line).to eq(pr_inline_note.new_pos)
end
+ context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
+ end
+
+ it 'does not convert mentions' do
+ expect(mentions_converter).not_to receive(:convert)
+
+ subject.execute
+ end
+ end
+
it 'logs its progress' do
expect_log(stage: 'import_inline_comments', message: 'starting')
expect_log(stage: 'import_inline_comments', message: 'finished')
diff --git a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
index 33d6ab94513..79010390628 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importers/users_importer_spec.rb
@@ -52,6 +52,13 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::UsersImporter, feature_
expect(logger).to receive(:info).with(hash_including(message: 'importing page 3 using batch size 2'))
expect(logger).to receive(:info).with(hash_including(message: 'finished'))
+ expect_next_instance_of(Gitlab::Import::PageCounter) do |page_counter|
+ expect(page_counter).to receive(:current).and_call_original.once
+ expect(page_counter).to receive(:set).with(2).and_call_original.once
+ expect(page_counter).to receive(:set).with(3).and_call_original.once
+ expect(page_counter).to receive(:expire!).and_call_original.once
+ end
+
expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).and_call_original.twice
importer.execute
diff --git a/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb b/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb
new file mode 100644
index 00000000000..46800c924c9
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/mentions_converter_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::MentionsConverter, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project_id) { 12 }
+ let(:text) { 'text without @ mentions' }
+ let(:source_user_cache_prefix) { "bitbucket_server/project/#{project_id}/source/username" }
+
+ subject(:converted_text) { described_class.new(project_id).convert(text) }
+
+ describe '#convert' do
+ context 'when the text has no mentions' do
+ it 'does not change the text' do
+ expect(converted_text).to eq(text)
+ end
+ end
+
+ context 'when the text has a mention' do
+ let(:text) { 'mentioning @john' }
+
+ context 'when the mention has matching cached email' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john", 'john@example.com')
+ end
+
+ context 'when a user with the email does not exist on gitlab' do
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+
+ context 'when a user with the same email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ it "replaces the mention with the user's username" do
+ expect(converted_text).to eq('mentioning @johndoe')
+ end
+ end
+
+ context 'when a user with the same username but not email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'john') }
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when there is cached email but not for the mentioned username' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/jane", 'jane@example.com')
+ end
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+
+ context 'when a user with the same email exists on gitlab' do
+ let_it_be(:user) { create(:user, username: 'jane', email: 'jane@example.com') }
+
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when the mention has digits, underscores, uppercase and hyphens' do
+ let(:text) { '@john_DOE-123' }
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john_DOE-123", 'john@example.com')
+ end
+
+ it "replaces the mention with the user's username" do
+ expect(converted_text).to eq('@johndoe')
+ end
+ end
+
+ context 'when the mention has emails' do
+ let(:text) { "@john's email is john@gmail.com and @jane's email is info@jane." }
+
+ it 'does not alter the emails' do
+ expect(converted_text).to eq("`@john`'s email is john@gmail.com and `@jane`'s email is info@jane.")
+ end
+ end
+
+ context 'when no emails are cached' do
+ it 'puts the mention in backticks' do
+ expect(converted_text).to eq('mentioning `@john`')
+ end
+ end
+ end
+
+ context 'when the text has multiple mentions' do
+ let(:text) { "@john, @jane-doe and @johndoe123 with \n@john again on a newline" }
+
+ context 'if none of the mentions have matching cached emails and users' do
+ it 'puts every mention in backticks' do
+ expect(converted_text).to eq("`@john`, `@jane-doe` and `@johndoe123` with \n`@john` again on a newline")
+ end
+ end
+
+ context 'if one of the mentions have matching user' do
+ let_it_be(:user) { create(:user, username: 'johndoe', email: 'john@example.com') }
+
+ before do
+ ::Gitlab::Cache::Import::Caching.write("#{source_user_cache_prefix}/john", 'john@example.com')
+ end
+
+ it 'replaces all mentions with the username and puts rest of mentions in backticks' do
+ expect(converted_text).to eq("@johndoe, `@jane-doe` and `@johndoe123` with \n@johndoe again on a newline")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb b/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb
new file mode 100644
index 00000000000..73f9cde8322
--- /dev/null
+++ b/spec/lib/gitlab/bitbucket_server_import/user_from_mention_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BitbucketServerImport::UserFromMention, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project_id) { 11 }
+ let(:username) { '@johndoe' }
+ let(:email) { 'john@gmail.com' }
+ let(:hash) { { key: 'value' } }
+ let(:cache_key) { "bitbucket_server/project/#{project_id}/source/username/#{username}" }
+
+ let(:example) do
+ Class.new do
+ include Gitlab::BitbucketServerImport::UserFromMention
+
+ def initialize(project_id)
+ @project_id = project_id
+ end
+
+ attr_reader :project_id
+
+ def foo(mention)
+ user_from_cache(mention)
+ end
+
+ def bar(hash)
+ cache_multiple(hash)
+ end
+ end
+ end
+
+ subject(:example_class) { example.new(project_id) }
+
+ describe '#user_from_cache' do
+ it 'returns nil if the cache is empty' do
+ expect(example_class.foo(username)).to be_nil
+ end
+
+ context 'when the username and email is cached' do
+ before do
+ ::Gitlab::Cache::Import::Caching.write(cache_key, email)
+ end
+
+ context 'if a user with the email does not exist' do
+ it 'returns nil' do
+ expect(example_class.foo(username)).to be_nil
+ end
+ end
+
+ context 'if a user with the email exists' do
+ let!(:user) { create(:user, email: email) }
+
+ it 'returns the user' do
+ expect(example_class.foo(username)).to eq(user)
+ end
+ end
+ end
+ end
+
+ describe '#cache_multiple' do
+ it 'calls write_multiple with the hash' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:write_multiple).with(hash, timeout: 72.hours)
+
+ example_class.bar(hash)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 8f1c552e0b7..6cde51b668a 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -224,4 +224,56 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean
subject { described_class.write_if_greater('foo', value) }
end
end
+
+ describe '.list_add' do
+ it 'adds a value to a list' do
+ described_class.list_add('foo', 10)
+ described_class.list_add('foo', 20)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
+
+ expect(values).to eq(%w[10 20])
+ end
+
+ context 'when a limit is provided' do
+ it 'limits the size of the list to the number of items defined by the limit' do
+ described_class.list_add('foo', 10, limit: 3)
+ described_class.list_add('foo', 20, limit: 3)
+ described_class.list_add('foo', 30, limit: 3)
+ described_class.list_add('foo', 40, limit: 3)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
+
+ expect(values).to eq(%w[20 30 40])
+ end
+ end
+
+ it_behaves_like 'validated redis value' do
+ subject { described_class.list_add('foo', value) }
+ end
+ end
+
+ describe '.values_from_list' do
+ it 'returns empty hash when the list is empty' do
+ expect(described_class.values_from_list('foo')).to eq([])
+ end
+
+ it 'returns the items stored in the list in order' do
+ described_class.list_add('foo', 10)
+ described_class.list_add('foo', 20)
+ described_class.list_add('foo', 10)
+
+ expect(described_class.values_from_list('foo')).to eq(%w[10 20 10])
+ end
+ end
+
+ describe '.del' do
+ it 'deletes the key' do
+ described_class.write('foo', 'value')
+
+ expect { described_class.del('foo') }.to change { described_class.read('foo') }.from('value').to(nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 854c04dd581..d20399cf7cf 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -78,13 +78,13 @@ RSpec.describe Gitlab::Checks::ChangesAccess, feature_category: :source_code_man
end
context 'with oldrev' do
- let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::SHA1_BLANK_SHA }] }
it_behaves_like 'returns only commits with non empty revisions'
end
context 'without oldrev' do
- let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::SHA1_BLANK_SHA }] }
it_behaves_like 'returns only commits with non empty revisions'
end
@@ -94,7 +94,7 @@ RSpec.describe Gitlab::Checks::ChangesAccess, feature_category: :source_code_man
describe '#commits_for' do
let(:new_commits) { [] }
let(:expected_commits) { [] }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
shared_examples 'a listing of new commits' do
it 'returns expected commits' do
diff --git a/spec/lib/gitlab/checks/diff_check_spec.rb b/spec/lib/gitlab/checks/diff_check_spec.rb
index 20c6ad8a6e8..8056611b5bd 100644
--- a/spec/lib/gitlab/checks/diff_check_spec.rb
+++ b/spec/lib/gitlab/checks/diff_check_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Checks::DiffCheck, feature_category: :source_code_managem
end
context 'when deletion is true' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not call find_changed_paths' do
expect(project.repository).not_to receive(:find_changed_paths)
diff --git a/spec/lib/gitlab/checks/lfs_check_spec.rb b/spec/lib/gitlab/checks/lfs_check_spec.rb
index 9f001dd1941..f00915bc1ec 100644
--- a/spec/lib/gitlab/checks/lfs_check_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_check_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Checks::LfsCheck, feature_category: :source_code_manageme
context 'with blank newrev' do
it_behaves_like 'a skipped integrity check' do
- let(:changes) { [{ oldrev: oldrev, newrev: Gitlab::Git::BLANK_SHA, ref: ref }] }
+ let(:changes) { [{ oldrev: oldrev, newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: ref }] }
end
end
end
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index 0aecf26f42f..4f844f10f34 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Checks::LfsIntegrity, feature_category: :source_code_mana
end
context 'deletion' do
- let(:newrevs) { [Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [Gitlab::Git::SHA1_BLANK_SHA] }
it 'skips integrity check' do
expect_any_instance_of(Gitlab::Git::LfsChanges).not_to receive(:new_pointers)
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index f8c0d69be2e..3854437483d 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
let(:pull_policy) { %w[always if-not-present] }
- let(:executor_opts) { { docker: { platform: 'arm64' } } }
+ let(:executor_opts) { { docker: { platform: 'arm64', user: 'dave' } } }
let(:job) do
create(:ci_build, options: { image: { name: image_name,
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
let(:service_entrypoint) { '/bin/sh' }
let(:service_alias) { 'db' }
let(:service_command) { 'sleep 30' }
- let(:executor_opts) { { docker: { platform: 'amd64' } } }
+ let(:executor_opts) { { docker: { platform: 'amd64', user: 'dave' } } }
let(:pull_policy) { %w[always if-not-present] }
let(:job) do
create(:ci_build, options: { services: [{ name: service_image_name, entrypoint: service_entrypoint,
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index 00e44650d44..4ee8903dcd3 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -134,7 +134,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
ref: 'feature',
source: source,
sha: '0b4bc9a4',
- before_sha: Gitlab::Git::BLANK_SHA,
+ before_sha: Gitlab::Git::SHA1_BLANK_SHA,
merge_request: merge_request
)
end
diff --git a/spec/lib/gitlab/ci/build/rules_spec.rb b/spec/lib/gitlab/ci/build/rules_spec.rb
index 99577539798..61bd9f41182 100644
--- a/spec/lib/gitlab/ci/build/rules_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules_spec.rb
@@ -254,6 +254,18 @@ RSpec.describe Gitlab::Ci::Build::Rules, feature_category: :pipeline_composition
end
end
+ context 'with auto_cancel' do
+ context 'with matching rule' do
+ let(:rule_list) { [{ if: '$VAR == null', auto_cancel: { on_new_commit: 'interruptible' } }] }
+
+ it do
+ is_expected.to eq(
+ described_class::Result.new(when: 'on_success', auto_cancel: { on_new_commit: 'interruptible' })
+ )
+ end
+ end
+ end
+
context 'with a regexp variable matching rule' do
let(:rule_list) { [{ if: '"abcde" =~ $pattern' }] }
diff --git a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
index bdd66cc00a1..764908ee040 100644
--- a/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/auto_cancel_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::AutoCancel, feature_category: :pipelin
it 'returns errors' do
expect(config.errors)
- .to include('auto cancel on new commit must be one of: conservative, interruptible, disabled')
+ .to include('auto cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 35f2a99ee87..04154b72453 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -23,6 +23,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge, feature_category: :continuous_
end
end
+ describe '.visible?' do
+ it 'always returns true' do
+ expect(described_class.visible?).to be_truthy
+ end
+ end
+
describe '.matching?' do
subject { described_class.matching?(name, config) }
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index 99a6e25b313..0a82010c20c 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
- context "when docker specifies an option" do
+ context "when docker specifies platform" do
let(:config) { { name: 'image:1.0', docker: { platform: 'amd64' } } }
it 'is valid' do
@@ -129,15 +129,73 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
)
end
end
+
+ context "when invalid data type is specified for platform option" do
+ let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ end
+ end
+ end
+
+ context "when docker specifies user" do
+ let(:config) { { name: 'image:1.0', docker: { user: 'dave' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { user: 'dave' }
+ }
+ )
+ end
+ end
+
+ context "when user is a UID" do
+ let(:config) { { name: 'image:1.0', docker: { user: '1001' } } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ executor_opts: {
+ docker: { user: '1001' }
+ }
+ )
+ end
+ end
+ end
+
+ context "when invalid data type is specified for user option" do
+ let(:config) { { name: 'image:1.0', docker: { user: 1 } } }
+
+ it 'raises an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match %r{image executor opts '/docker/user' must be a valid 'string'}
+ end
+ end
end
context "when docker specifies an invalid option" do
- let(:config) { { name: 'image:1.0', docker: { platform: 1 } } }
+ let(:config) { { name: 'image:1.0', docker: { unknown_key: 'foo' } } }
it 'is not valid' do
expect(entry).not_to be_valid
expect(entry.errors.first)
- .to match %r{image executor opts '/docker/platform' must be a valid 'string'}
+ .to match %r{image executor opts '/docker/unknown_key' must be a valid 'schema'}
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
index cd8e35ede61..a9f891a7b50 100644
--- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule, feature_category: :pipeline_composition do
diff --git a/spec/lib/gitlab/ci/config/entry/needs_spec.rb b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
index d1a8a74ac06..61bb3e912ba 100644
--- a/spec/lib/gitlab/ci/config/entry/needs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/needs_spec.rb
@@ -52,6 +52,27 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Needs, feature_category: :pipeline_c
end
end
+ context 'when config has disallowed keys' do
+ let(:config) { ['some_value'] }
+
+ before do
+ needs.metadata[:allowed_needs] = %i[cross_dependency]
+ needs.compose!
+ end
+
+ describe '#valid?' do
+ it 'returns invalid' do
+ expect(needs.valid?).to be_falsey
+ end
+ end
+
+ describe '#errors' do
+ it 'returns invalid types error' do
+ expect(needs.errors).to include('needs config uses invalid types: job')
+ end
+ end
+ end
+
context 'when wrong needs type is used' do
let(:config) { [{ job: 'job_name', artifacts: true, some: :key }] }
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 3531d6e9f1a..d5bf532c216 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'gitlab_chronic_duration'
-RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
+RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.metadata(metadata)
@@ -11,7 +11,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
let(:metadata) do
- { allowed_when: %w[on_success on_failure always never manual delayed] }
+ {
+ allowed_when: %w[on_success on_failure always never manual delayed],
+ allowed_keys: %i[if changes exists when start_in allow_failure variables needs auto_cancel]
+ }
end
let(:entry) { factory.create! }
@@ -296,18 +299,18 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
end
- context 'with a string passed in metadata but not allowed in the class' do
- let(:metadata) { { allowed_when: %w[explode] } }
+ context 'with an invalid when' do
+ let(:metadata) { { allowed_when: %w[always never], allowed_keys: %i[if when] } }
let(:config) do
- { if: '$THIS == "that"', when: 'explode' }
+ { if: '$THIS == "that"', when: 'on_success' }
end
it { is_expected.to be_a(described_class) }
it { is_expected.not_to be_valid }
it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: explode/)
+ expect(subject.errors).to include(/when unknown value: on_success/)
end
context 'when composed' do
@@ -318,41 +321,30 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: explode/)
+ expect(subject.errors).to include(/when unknown value: on_success/)
end
end
end
- context 'with a string allowed in the class but not passed in metadata' do
- let(:metadata) { { allowed_when: %w[always never] } }
-
+ context 'with an invalid variables' do
let(:config) do
- { if: '$THIS == "that"', when: 'on_success' }
+ { if: '$THIS == "that"', variables: 'hello' }
end
- it { is_expected.to be_a(described_class) }
- it { is_expected.not_to be_valid }
-
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: on_success/)
+ before do
+ subject.compose!
end
- context 'when composed' do
- before do
- subject.compose!
- end
-
- it { is_expected.not_to be_valid }
+ it { is_expected.not_to be_valid }
- it 'returns an error about invalid when:' do
- expect(subject.errors).to include(/when unknown value: on_success/)
- end
+ it 'returns an error about invalid variables:' do
+ expect(subject.errors).to include(/variables config should be a hash/)
end
end
- context 'with an invalid variables' do
+ context 'with an invalid auto_cancel' do
let(:config) do
- { if: '$THIS == "that"', variables: 'hello' }
+ { if: '$THIS == "that"', auto_cancel: { on_new_commit: 'xyz' } }
end
before do
@@ -361,8 +353,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
- it 'returns an error about invalid variables:' do
- expect(subject.errors).to include(/variables config should be a hash/)
+ it 'returns an error' do
+ expect(subject.errors).to include(
+ 'auto_cancel on new commit must be one of: conservative, interruptible, none')
end
end
end
@@ -445,6 +438,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.to eq(config) }
end
+
+ context 'when it has auto_cancel' do
+ let(:config) { { if: '$THIS || $THAT', auto_cancel: { on_new_commit: 'interruptible' } } }
+
+ it { is_expected.to eq(config) }
+ end
end
describe '.default' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index b0871f2345e..0113b6c1f7f 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -1,16 +1,18 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
-RSpec.describe Gitlab::Ci::Config::Entry::Rules do
+RSpec.describe Gitlab::Ci::Config::Entry::Rules, feature_category: :pipeline_composition do
let(:factory) do
Gitlab::Config::Entry::Factory.new(described_class)
.metadata(metadata)
.value(config)
end
- let(:metadata) { { allowed_when: %w[always never] } }
+ let(:metadata) do
+ { allowed_when: %w[always never], allowed_keys: %i[if when] }
+ end
subject(:entry) { factory.create! }
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 82747e7b521..8ce0f890b46 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -154,22 +154,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
end
context 'when configuration has docker options' do
- let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
+ context "with platform option" do
+ let(:config) { { name: 'postgresql:9.5', docker: { platform: 'amd64' } } }
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { platform: 'amd64' }
+ }
+ )
+ end
end
end
- describe '#value' do
- it "returns value" do
- expect(entry.value).to eq(
- name: 'postgresql:9.5',
- executor_opts: {
- docker: { platform: 'amd64' }
- }
- )
+ context "with user option" do
+ let(:config) { { name: 'postgresql:9.5', docker: { user: 'dave' } } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ expect(entry.value).to eq(
+ name: 'postgresql:9.5',
+ executor_opts: {
+ docker: { user: 'dave' }
+ }
+ )
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
index d3ce3ffe641..dbd25010884 100644
--- a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
subject(:config) { described_class.new(workflow_hash) }
describe 'validations' do
+ before do
+ config.compose!
+ end
+
context 'when work config value is a string' do
let(:workflow_hash) { 'build' }
@@ -27,6 +31,28 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
context 'when work config value is a hash' do
+ context 'with an invalid key' do
+ let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'attaches an error specifying the unknown key' do
+ expect(config.errors).to include('workflow config contains unknown keys: trash')
+ end
+ end
+
+ describe '#value' do
+ it 'returns the invalid configuration' do
+ expect(config.value).to eq(workflow_hash)
+ end
+ end
+ end
+ end
+
+ context 'when config has rules' do
let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
describe '#valid?' do
@@ -45,8 +71,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
end
- context 'with an invalid key' do
- let(:workflow_hash) { { trash: [{ if: '$VAR' }] } }
+ context 'when rules has an invalid key' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', trash: 'something' }] } }
describe '#valid?' do
it 'is invalid' do
@@ -54,7 +80,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
it 'attaches an error specifying the unknown key' do
- expect(config.errors).to include('workflow config contains unknown keys: trash')
+ expect(config.errors).to include('rules:rule config contains unknown keys: trash')
end
end
@@ -64,6 +90,41 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow, feature_category: :pipeline_
end
end
end
+
+ context 'when rules has auto_cancel' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', auto_cancel: { on_new_commit: 'interruptible' } }] } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'attaches no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+
+ describe '#value' do
+ it 'returns the config' do
+ expect(config.value).to eq(workflow_hash)
+ end
+ end
+
+ context 'when auto_cancel has an invalid value' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR', auto_cancel: { on_new_commit: 'xyz' } }] } }
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'returns error' do
+ expect(config.errors).to include(
+ 'rules:rule:auto_cancel on new commit must be one of: conservative, interruptible, none')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 9ac72ebbac8..3409fc53d19 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -159,10 +159,14 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
+ let(:lazy_response) { double('lazy_response') }
before do
+ allow(lazy_response).to receive(:execute).and_return(lazy_response)
+
subject.expandset << :a_file
subject.set_deadline(15.seconds)
+ subject.execute_remote_parallel_request(lazy_response)
end
it { expect(mutated).not_to eq(subject) }
@@ -170,8 +174,9 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.pipeline).to eq(subject.pipeline) }
it { expect(mutated.expandset).to eq(subject.expandset) }
- it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
- it { expect(mutated.logger).to eq(mutated.logger) }
+ it { expect(mutated.execution_deadline).to eq(subject.execution_deadline) }
+ it { expect(mutated.logger).to eq(subject.logger) }
+ it { expect(mutated.parallel_requests).to eq(subject.parallel_requests) }
end
context 'with attributes' do
@@ -212,4 +217,80 @@ RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipelin
end
end
end
+
+ describe '#execute_remote_parallel_request' do
+ let(:lazy_response1) { double('lazy_response', wait: true, complete?: complete1) }
+ let(:lazy_response2) { double('lazy_response') }
+
+ let(:complete1) { false }
+
+ before do
+ allow(lazy_response1).to receive(:execute).and_return(lazy_response1)
+ allow(lazy_response2).to receive(:execute).and_return(lazy_response2)
+ end
+
+ context 'when the queue is empty' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response1) }
+ .to change { subject.parallel_requests }
+ .from([])
+ .to([lazy_response1])
+ end
+ end
+
+ context 'when there is a lazy response in the queue' do
+ before do
+ subject.execute_remote_parallel_request(lazy_response1)
+ end
+
+ context 'when there is a free slot in the queue' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 2)
+ end
+
+ it 'adds the new lazy response to the queue' do
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+
+ context 'when the queue is full' do
+ before do
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+ end
+
+ context 'when the first lazy response in the queue is complete' do
+ let(:complete1) { true }
+
+ it 'removes the completed lazy response and adds the new one to the queue' do
+ expect(lazy_response1).not_to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response2])
+ end
+ end
+
+ context 'when the first lazy response in the queue is not complete' do
+ let(:complete1) { false }
+
+ it 'waits for the first lazy response to complete and then adds the new one to the queue' do
+ expect(lazy_response1).to receive(:wait)
+
+ expect { subject.execute_remote_parallel_request(lazy_response2) }
+ .to change { subject.parallel_requests }
+ .from([lazy_response1])
+ .to([lazy_response1, lazy_response2])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
index 88e272ac3fd..7907837db6a 100644
--- a/spec/lib/gitlab/ci/config/external/file/component_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb
@@ -146,6 +146,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category:
external_resource.content
end
+
+ context 'when user is missing in a context' do
+ let_it_be(:user) { nil }
+
+ it 'does not track the event' do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ external_resource.content
+ end
+ end
end
context 'when component is invalid' do
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index 7293e640112..adca9e750d0 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -157,6 +157,40 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pi
it_behaves_like "#content"
end
+ describe '#preload_content' do
+ context 'when the parallel request queue is full' do
+ let(:location1) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file1.yml' }
+ let(:location2) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.secret_file2.yml' }
+
+ before do
+ # Makes the parallel queue full easily
+ stub_const("Gitlab::Ci::Config::External::Context::MAX_PARALLEL_REMOTE_REQUESTS", 1)
+
+ # Adding a failing promise to the queue
+ promise = Concurrent::Promise.new do
+ sleep 1.1
+ raise Timeout::Error
+ end
+
+ context.execute_remote_parallel_request(
+ Gitlab::HTTP_V2::LazyResponse.new(promise, location1, {}, nil)
+ )
+
+ stub_full_request(location2).to_return(body: remote_file_content)
+ end
+
+ it 'waits for the queue' do
+ file2 = described_class.new({ remote: location2 }, context)
+
+ start_at = Time.current
+ file2.preload_content
+ end_at = Time.current
+
+ expect(end_at - start_at).to be > 1
+ end
+ end
+ end
+
describe "#error_message" do
subject(:error_message) do
Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file])
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
index 2c57106b07c..9718d16756c 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
@@ -3,118 +3,173 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties, feature_category: :dependency_management do
- subject(:parse_source_from_properties) { described_class.parse_source(properties) }
+ shared_examples 'handling invalid properties' do
+ context 'when properties are nil' do
+ let(:properties) { nil }
- context 'when properties are nil' do
- let(:properties) { nil }
+ it { is_expected.to be_nil }
+ end
+
+ context 'when report does not have valid properties' do
+ let(:properties) { ['name' => 'foo', 'value' => 'bar'] }
- it { is_expected.to be_nil }
+ it { is_expected.to be_nil }
+ end
end
- context 'when report does not have gitlab properties' do
- let(:properties) { ['name' => 'foo', 'value' => 'bar'] }
+ describe '#parse_source' do
+ subject(:parse_source_from_properties) { described_class.parse_source(properties) }
- it { is_expected.to be_nil }
- end
+ it_behaves_like 'handling invalid properties'
- context 'when schema_version is missing' do
- let(:properties) do
- [
- { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
- ]
- end
+ context 'when schema_version is missing' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
- it { is_expected.to be_nil }
- end
+ it { is_expected.to be_nil }
+ end
- context 'when schema version is unsupported' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '2' },
- { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
- ]
+ context 'when schema version is unsupported' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '2' },
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
+
+ it { is_expected.to be_nil }
end
- it { is_expected.to be_nil }
- end
+ context 'when no dependency_scanning or container_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab::aquasecurity:trivy:FilePath', 'value' => '1' }
+ ]
+ end
- context 'when no dependency_scanning or container_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' }
- ]
+ it 'does not call source parsers' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:source)
+ expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).not_to receive(:source)
+
+ parse_source_from_properties
+ end
end
- it 'does not call source parsers' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:source)
- expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).not_to receive(:source)
+ context 'when dependency_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
+ { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' },
+ { 'name' => 'gitlab:dependency_scanning:unsupported_property', 'value' => 'Should be ignored' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'category' => 'development',
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
- parse_source_from_properties
- end
- end
+ it 'passes only supported properties to the dependency scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
- context 'when dependency_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
- { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
- { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
- { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
- { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
- { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' },
- { 'name' => 'gitlab:dependency_scanning:unsupported_property', 'value' => 'Should be ignored' }
- ]
+ parse_source_from_properties
+ end
end
- let(:expected_input) do
- {
- 'category' => 'development',
- 'input_file' => { 'path' => 'package-lock.json' },
- 'source_file' => { 'path' => 'package.json' },
- 'package_manager' => { 'name' => 'npm' },
- 'language' => { 'name' => 'JavaScript' }
- }
- end
+ context 'when container_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:container_scanning:image:name', 'value' => 'photon' },
+ { 'name' => 'gitlab:container_scanning:image:tag', 'value' => '5.0-20231007' },
+ { 'name' => 'gitlab:container_scanning:operating_system:name', 'value' => 'Photon OS' },
+ { 'name' => 'gitlab:container_scanning:operating_system:version', 'value' => '5.0' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'image' => {
+ 'name' => 'photon',
+ 'tag' => '5.0-20231007'
+ },
+ 'operating_system' => {
+ 'name' => 'Photon OS',
+ 'version' => '5.0'
+ }
+ }
+ end
- it 'passes only supported properties to the dependency scanning parser' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
+ it 'passes only supported properties to the container scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).to receive(:source).with(expected_input)
- parse_source_from_properties
+ parse_source_from_properties
+ end
end
end
- context 'when container_scanning properties are present' do
- let(:properties) do
- [
- { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
- { 'name' => 'gitlab:container_scanning:image:name', 'value' => 'photon' },
- { 'name' => 'gitlab:container_scanning:image:tag', 'value' => '5.0-20231007' },
- { 'name' => 'gitlab:container_scanning:operating_system:name', 'value' => 'Photon OS' },
- { 'name' => 'gitlab:container_scanning:operating_system:version', 'value' => '5.0' }
- ]
+ describe '#parse_trivy_source' do
+ subject(:parse_trivy_source_from_properties) { described_class.parse_trivy_source(properties) }
+
+ it_behaves_like 'handling invalid properties'
+
+ context 'when no trivy properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab::aquasecurity:trivy:FilePath', 'value' => '1' }
+ ]
+ end
+
+ it 'does not call source parsers' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::Trivy).not_to receive(:source)
+
+ parse_trivy_source_from_properties
+ end
end
- let(:expected_input) do
- {
- 'image' => {
- 'name' => 'photon',
- 'tag' => '5.0-20231007'
- },
- 'operating_system' => {
- 'name' => 'Photon OS',
- 'version' => '5.0'
+ context 'when trivy properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'aquasecurity:trivy:PkgID', 'value' => 'sha256:47ce8fad8..' },
+ { 'name' => 'aquasecurity:trivy:LayerDigest',
+ 'value' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..' },
+ { 'name' => 'aquasecurity:trivy:LayerDiffID', 'value' => 'sha256:94dd7d531fa..' },
+ { 'name' => 'aquasecurity:trivy:SrcEpoch', 'value' => 'sha256:5d20c808c..' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'PkgID' => 'sha256:47ce8fad8..',
+ 'LayerDigest' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..',
+ 'LayerDiffID' => 'sha256:94dd7d531fa..',
+ 'SrcEpoch' => 'sha256:5d20c808c..'
}
- }
- end
+ end
- it 'passes only supported properties to the container scanning parser' do
- expect(Gitlab::Ci::Parsers::Sbom::Source::ContainerScanning).to receive(:source).with(expected_input)
+ it 'passes only supported properties to the container scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::Trivy).to receive(:source).with(expected_input)
- parse_source_from_properties
+ parse_trivy_source_from_properties
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
index 9c8402faf77..6a6fe59bce1 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -125,6 +125,56 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx, feature_category: :dependen
parse!
end
+ context 'when component is trivy type' do
+ let(:parsed_properties) do
+ {
+ 'PkgID' => 'adduser@3.134',
+ 'PkgType' => 'debian'
+ }
+ end
+
+ let(:components) do
+ [
+ {
+ # Trivy component
+ "bom-ref" => "0eda252d-d8a4-4250-b816-b6314f029063",
+ "type" => "library",
+ "name" => "analyzer",
+ "purl" => "pkg:gem/activesupport@5.1.4",
+ "properties" => [
+ {
+ "name" => "aquasecurity:trivy:PkgID",
+ "value" => "apt@2.6.1"
+ },
+ {
+ "name" => "aquasecurity:trivy:PkgType",
+ "value" => "debian"
+ }
+ ]
+ }
+ ]
+ end
+
+ before do
+ allow(properties_parser).to receive(:parse_trivy_source).and_return(parsed_properties)
+ stub_const('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties', properties_parser)
+ end
+
+ it 'adds each component, ignoring unused attributes' do
+ expect(report).to receive(:add_component)
+ .with(
+ an_object_having_attributes(
+ component_type: "library",
+ properties: parsed_properties,
+ purl: an_object_having_attributes(
+ type: "gem"
+ )
+ )
+ )
+ parse!
+ end
+ end
+
context 'when a component has an invalid purl' do
before do
components.push(
diff --git a/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb
new file mode 100644
index 00000000000..460ca4f28a1
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/source/trivy_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::Source::Trivy, feature_category: :dependency_management do
+ subject { described_class.source(property_data) }
+
+ context 'when all property data is present' do
+ let(:property_data) do
+ {
+ 'PkgID' => 'sha256:47ce8fad8..',
+ 'LayerDigest' => 'registry.test.com/atiwari71/container-scanning-test/main@sha256:e14a4bcf..',
+ 'LayerDiffID' => 'sha256:94dd7d531fa..',
+ 'SrcEpoch' => 'sha256:5d20c808c..'
+ }
+ end
+
+ it 'returns expected source data' do
+ is_expected.to have_attributes(
+ source_type: :trivy,
+ data: property_data
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 431a6d94c48..6aa526c1829 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
context 'when name is provided' do
it 'sets name from the report as a name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-1030' }
+ finding = report.findings.second
expected_name = Gitlab::Json.parse(finding.raw_metadata)['name']
expect(finding.name).to eq(expected_name)
@@ -197,7 +197,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
let(:location) { nil }
it 'returns only identifier name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ finding = report.findings.third
+
expect(finding.name).to eq("CVE-2017-11429")
end
end
@@ -205,21 +206,24 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
context 'when location exists' do
context 'when CVE identifier exists' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' }
+ finding = report.findings.third
+
expect(finding.name).to eq("CVE-2017-11429 in yarn.lock")
end
end
context 'when CWE identifier exists' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' }
+ finding = report.findings.fourth
+
expect(finding.name).to eq("CWE-2017-11429 in yarn.lock")
end
end
context 'when neither CVE nor CWE identifier exist' do
it 'combines identifier with location to create name' do
- finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' }
+ finding = report.findings.fifth
+
expect(finding.name).to eq("other-2017-11429 in yarn.lock")
end
end
@@ -476,6 +480,20 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common, feature_category: :vulnera
end
end
end
+
+ describe 'handling the unicode null characters' do
+ let(:artifact) { build(:ci_job_artifact, :common_security_report_with_unicode_null_character) }
+
+ it 'escapes the unicode null characters while parsing the report' do
+ finding = report.findings.first
+
+ expect(finding.solution).to eq('Upgrade to latest version.\u0000')
+ end
+
+ it 'adds warning to report' do
+ expect(report.warnings).to include({ type: 'Parsing', message: 'Report artifact contained unicode null characters which are escaped during the ingestion.' })
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 68158503628..37535b80cd4 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
let(:command) { described_class.new(project: project) }
it 'uses BLANK_SHA' do
- is_expected.to eq(Gitlab::Git::BLANK_SHA)
+ is_expected.to eq(Gitlab::Git::SHA1_BLANK_SHA)
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index 44ccb1eeae1..bf146791659 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -12,13 +12,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
end
let(:step) { described_class.new(pipeline, command) }
- let(:ff_always_set_pipeline_failure_reason) { true }
describe '#perform!' do
context 'when pipeline has been skipped by workflow configuration' do
before do
- stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
-
allow(step).to receive(:workflow_rules_result)
.and_return(
double(pass?: false, variables: {})
@@ -47,15 +44,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
expect(pipeline).to be_failed
expect(pipeline).to be_filtered_by_workflow_rules
end
-
- context 'when always_set_pipeline_failure_reason is disabled' do
- let(:ff_always_set_pipeline_failure_reason) { false }
-
- it 'does not set the failure reason', :aggregate_failures do
- expect(pipeline).not_to be_failed
- expect(pipeline.failure_reason).to be_blank
- end
- end
end
context 'when pipeline has not been skipped by workflow configuration' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
index 84c2fb6525e..5956137a725 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
@@ -52,22 +52,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
expect(pipeline.status).to eq 'failed'
expect(pipeline.failure_reason).to eq drop_reason.to_s
end
-
- context 'when feature flag always_set_pipeline_failure_reason is false' do
- before do
- stub_feature_flags(always_set_pipeline_failure_reason: false)
- end
-
- specify do
- subject.error(message, config_error: config_error, drop_reason: drop_reason)
-
- if command.save_incompleted
- expect(pipeline.failure_reason).to eq drop_reason.to_s
- else
- expect(pipeline.failure_reason).not_to be_present
- end
- end
- end
end
context 'when the error includes malicious HTML' do
@@ -93,6 +77,37 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
end
end
+ context 'when drop_reason is nil' do
+ let(:command) { double(project: nil) }
+
+ shared_examples "error function with no drop reason" do
+ it 'drops with out failure reason' do
+ expect(command).to receive(:increment_pipeline_failure_reason_counter)
+
+ call_error
+
+ expect(pipeline.failure_reason).to be_nil
+ expect(pipeline.yaml_errors).to be_nil
+ expect(pipeline.errors[:base]).to include(message)
+ expect(pipeline).to be_failed
+ expect(pipeline).not_to be_persisted
+ end
+ end
+
+ context 'when no drop_reason argument is passed' do
+ let(:call_error) { subject.error(message) }
+
+ it_behaves_like "error function with no drop reason"
+ end
+
+ context 'when drop_reason argument is passed as nil' do
+ let(:drop_reason) { nil }
+ let(:call_error) { subject.error(message, drop_reason: drop_reason) }
+
+ it_behaves_like "error function with no drop reason"
+ end
+ end
+
context 'when config error is false' do
context 'does not set the yaml error or override the drop reason' do
let(:drop_reason) { :size_limit_exceeded }
@@ -107,7 +122,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers, feature_category: :continuo
expect(pipeline).to be_persisted
end
- context ' when the drop reason is not persistable' do
+ context 'when the drop reason is not persistable' do
let(:drop_reason) { :filtered_by_rules }
let(:command) { double(project: nil) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
index 732748d8c8b..787a458f0ff 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -240,6 +240,78 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata, feature_category:
expect(pipeline.pipeline_metadata).not_to be_persisted
end
end
+
+ context 'with workflow:rules:auto_cancel' do
+ context 'with auto_cancel:on_new_commit not set and rules:workflow:auto_cancel:on_new_commit set' do
+ let(:config) do
+ {
+ variables: { MY_VAR: my_var_value },
+ workflow: {
+ auto_cancel: { on_job_failure: 'all' },
+ rules: [{ if: '$MY_VAR == "something"', auto_cancel: { on_new_commit: 'interruptible' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ context 'when the rule is matched' do
+ let(:my_var_value) { 'something' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+
+ context 'when the rule is not matched' do
+ let(:my_var_value) { 'something else' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('conservative')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+ end
+
+ context 'with auto_cancel:on_new_commit set and rules:workflow:auto_cancel:on_new_commit set' do
+ let(:config) do
+ {
+ variables: { MY_VAR: my_var_value },
+ workflow: {
+ auto_cancel: { on_new_commit: 'interruptible' },
+ rules: [{ if: '$MY_VAR == "something"', auto_cancel: { on_new_commit: 'none' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
+
+ context 'when the rule is matched' do
+ let(:my_var_value) { 'something' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('none')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+
+ context 'when the rule is not matched' do
+ let(:my_var_value) { 'something else' }
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+ end
+ end
end
context 'with both pipeline name and auto_cancel' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 476b1be35a9..22ff367c746 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -34,15 +34,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
{ rspec: { script: 'rspec' } }
end
- let(:ff_always_set_pipeline_failure_reason) { true }
-
def run_chain
dependencies.map(&:perform!)
step.perform!
end
before do
- stub_feature_flags(always_set_pipeline_failure_reason: ff_always_set_pipeline_failure_reason)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -113,18 +110,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continu
expect(pipeline).to be_failed
expect(pipeline).to be_filtered_by_rules
end
-
- context 'when ff always_set_pipeline_failure_reason is disabled' do
- let(:ff_always_set_pipeline_failure_reason) { false }
-
- it 'sets the failure reason without persisting the pipeline', :aggregate_failures do
- run_chain
-
- expect(pipeline).not_to be_persisted
- expect(pipeline).not_to be_failed
- expect(pipeline).not_to be_filtered_by_rules
- end
- end
end
describe 'pipeline protect' do
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
index d7f967f1c55..dabee0f32de 100644
--- a/spec/lib/gitlab/ci/reports/security/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::Security::Report do
+RSpec.describe Gitlab::Ci::Reports::Security::Report, feature_category: :vulnerability_management do
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:created_at) { 2.weeks.ago }
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do
let(:other_report) do
create(
:ci_reports_security_report,
- findings: [create(:ci_reports_security_finding, compare_key: 'other_finding')],
+ findings: [create(:ci_reports_security_finding)],
scanners: [create(:ci_reports_security_scanner, external_id: 'other_scanner', name: 'Other Scanner')],
identifiers: [create(:ci_reports_security_identifier, external_id: 'other_id', name: 'other_scanner')]
)
diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
index f8d67a6f0b4..18ad723b75c 100644
--- a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb
@@ -152,51 +152,6 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :secr
end
end
- context 'when truncate_ci_merge_request_description feature flag is disabled' do
- before do
- stub_feature_flags(truncate_ci_merge_request_description: false)
- end
-
- context 'when merge request description hits the limit' do
- let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH + 1) }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
-
- context 'when merge request description fits the length limit' do
- let(:merge_request_description) { 'a' * (MergeRequest::CI_MERGE_REQUEST_DESCRIPTION_MAX_LENGTH - 1) }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
-
- context 'when merge request description does not exist' do
- let(:merge_request_description) { nil }
-
- it 'does not truncate the exposed description' do
- expect(subject.to_hash)
- .to include(
- 'CI_MERGE_REQUEST_DESCRIPTION' => merge_request.description
- )
- expect(subject.to_hash)
- .not_to have_key('CI_MERGE_REQUEST_DESCRIPTION_IS_TRUNCATED')
- end
- end
- end
-
it 'exposes diff variables' do
expect(subject.to_hash)
.to include(
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb
new file mode 100644
index 00000000000..d8f8a58edf3
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/include_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
+ include StubRequests
+
+ subject(:processor) do
+ described_class.new(config, project: project, user: project.first_owner, logger: logger)
+ end
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:logger) { Gitlab::Ci::Pipeline::Logger.new(project: project) }
+ let(:result) { processor.execute }
+ let(:builds) { result.builds }
+
+ context 'with include:remote' do
+ let(:config) do
+ <<~YAML
+ include:
+ - remote: http://my.domain.com/config1.yml
+ - remote: http://my.domain.com/config2.yml
+ YAML
+ end
+
+ before do
+ stub_full_request('http://my.domain.com/config1.yml')
+ .to_return(body: 'build1: { script: echo Hello World }')
+
+ stub_full_request('http://my.domain.com/config2.yml')
+ .to_return(body: 'build2: { script: echo Hello World }')
+ end
+
+ it 'returns builds from included files' do
+ expect(builds.pluck(:name)).to eq %w[build1 build2]
+ end
+
+ it 'stores instrumentation logs' do
+ result
+
+ expect(logger.observations_hash['config_mapper_process_duration_s']['count']).to eq(1)
+ end
+
+ # Remove with the FF ci_parallel_remote_includes
+ it 'does not store log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash).not_to have_key('config_file_fetch_remote_content_duration_s')
+ end
+
+ context 'when the FF ci_parallel_remote_includes is disabled' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: false)
+ end
+
+ it 'stores log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash['config_file_fetch_remote_content_duration_s']['count']).to eq(2)
+ end
+
+ context 'when the FF is specifically enabled for the project' do
+ before do
+ stub_feature_flags(ci_parallel_remote_includes: [project])
+ end
+
+ it 'does not store log with config_file_fetch_remote_content' do
+ result
+
+ expect(logger.observations_hash).not_to have_key('config_file_fetch_remote_content_duration_s')
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
index 03ff7077969..297872f4cf3 100644
--- a/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/test_cases/interruptible_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
module Gitlab
module Ci
RSpec.describe YamlProcessor, feature_category: :pipeline_composition do
- subject(:processor) { described_class.new(config, user: nil).execute }
+ subject(:processor) { described_class.new(config, user: nil) }
- let(:builds) { processor.builds }
+ let(:result) { processor.execute }
+ let(:builds) { result.builds }
context 'with interruptible' do
let(:default_config) { nil }
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 844a6849c8f..4f759109b26 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -516,6 +516,32 @@ module Gitlab
})
end
end
+
+ context 'with rules and auto_cancel' do
+ let(:config) do
+ <<-YML
+ workflow:
+ rules:
+ - if: $VAR == "value"
+ auto_cancel:
+ on_new_commit: none
+ on_job_failure: none
+
+ hello:
+ script: echo world
+ YML
+ end
+
+ it 'parses workflow_rules' do
+ expect(subject.workflow_rules).to contain_exactly({
+ if: '$VAR == "value"',
+ auto_cancel: {
+ on_new_commit: 'none',
+ on_job_failure: 'none'
+ }
+ })
+ end
+ end
end
describe '#warnings' do
@@ -1295,10 +1321,12 @@ module Gitlab
name: ruby:2.7
docker:
platform: linux/amd64
+ user: dave
services:
- name: postgres:11.9
docker:
platform: linux/amd64
+ user: john
YAML
end
@@ -1313,9 +1341,9 @@ module Gitlab
options: {
script: ["exit 0"],
image: { name: "ruby:2.7",
- executor_opts: { docker: { platform: 'linux/amd64' } } },
+ executor_opts: { docker: { platform: 'linux/amd64', user: 'dave' } } },
services: [{ name: "postgres:11.9",
- executor_opts: { docker: { platform: 'linux/amd64' } } }]
+ executor_opts: { docker: { platform: 'linux/amd64', user: 'john' } } }]
},
allow_failure: false,
when: "on_success",
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
new file mode 100644
index 00000000000..103df128dac
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjects::JobArtifactObject, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ let(:job_artifact_object) do
+ described_class.new(
+ fog_file,
+ bucket_prefix: bucket_prefix
+ )
+ end
+
+ # rubocop:disable RSpec/VerifiedDoubles -- For some reason it can't see Fog::AWS::Storage::File
+ let(:fog_file) { double(key: fog_file_key, content_length: 145) }
+ # rubocop:enable RSpec/VerifiedDoubles
+
+ let(:fog_file_key) { 'aaa/bbb/123' }
+ let(:bucket_prefix) { nil }
+
+ describe '#path' do
+ subject { job_artifact_object.path }
+
+ it { is_expected.to eq(fog_file.key) }
+ end
+
+ describe '#size' do
+ subject { job_artifact_object.size }
+
+ it { is_expected.to eq(fog_file.content_length) }
+ end
+
+ describe '#in_final_location?' do
+ subject { job_artifact_object.in_final_location? }
+
+ context 'when path has @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/@final/123/ccc' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when path has no @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/ccc' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#orphan?' do
+ shared_examples_for 'identifying orphan object' do
+ let(:artifact_final_path) { 'aaa/@final/bbb' }
+ let(:fog_file_key) { File.join([bucket_prefix, artifact_final_path].compact) }
+
+ subject { job_artifact_object.orphan? }
+
+ context 'when there is job artifact record with a file_final_path that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the file_final_path
+ create(:ci_job_artifact, file_final_path: artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when there are no job artifact records with a file_final_path that matches the object path' do
+ context 'and there is a pending direct upload entry that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the pending direct upload entry
+ ObjectStorage::PendingDirectUpload.prepare(:artifacts, artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and there are no pending direct upload entries that match the object path' do
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ context 'when bucket prefix is not present' do
+ it_behaves_like 'identifying orphan object'
+ end
+
+ context 'when bucket prefix is present' do
+ let(:bucket_prefix) { 'my/prefix' }
+
+ it_behaves_like 'identifying orphan object'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
new file mode 100644
index 00000000000..aeb87bc0d9e
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
@@ -0,0 +1,263 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner, :orphan_final_artifacts_cleanup, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#run!' do
+ let(:cleaner) do
+ described_class.new(
+ provider: specified_provider,
+ force_restart: force_restart,
+ dry_run: dry_run
+ )
+ end
+
+ let(:dry_run) { true }
+ let(:force_restart) { false }
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ subject(:run) { cleaner.run! }
+
+ before do
+ stub_const('Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::BasePaginator::BATCH_SIZE', 2)
+
+ Rake.application.rake_require 'tasks/gitlab/cleanup'
+
+ Gitlab.config.artifacts.object_store.tap do |config|
+ config[:remote_directory] = remote_directory
+ config[:bucket_prefix] = bucket_prefix
+ end
+
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ shared_examples_for 'cleaning up orphan final job artifact objects' do
+ let(:fog_connection) do
+ stub_object_storage_uploader(
+ config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ direct_upload: true
+ )
+ end
+
+ let!(:orphan_final_object_1) { create_fog_file }
+ let!(:orphan_final_object_2) { create_fog_file }
+ let!(:orphan_non_final_object) { create_fog_file(final: false) }
+
+ let!(:non_orphan_final_object_1) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ let!(:non_orphan_final_object_2) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ shared_context 'when resuming from marker' do
+ let(:dummy_error) { Class.new(StandardError) }
+
+ before do
+ fetch_counter = 0
+
+ allow(cleaner).to receive(:fetch_batch).and_wrap_original do |m, *args|
+ raise dummy_error if fetch_counter == 1
+
+ fetch_counter += 1
+ m.call(*args)
+ end
+ end
+ end
+
+ shared_examples_for 'handling dry run mode' do
+ context 'when on dry run (which is default)' do
+ it 'logs orphan objects to delete but does not delete them' do
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+ end
+ end
+ end
+
+ context 'when dry run is set to false' do
+ let(:dry_run) { false }
+
+ it 'logs orphan objects to delete and deletes them' do
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were deleted because this is not dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were deleted because this is not a dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when not configured to use bucket_prefix' do
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ it_behaves_like 'handling dry run mode'
+ end
+
+ context 'when configured to use bucket_prefix' do
+ let(:remote_directory) { 'main-bucket' }
+ let(:bucket_prefix) { 'my/artifacts' }
+
+ it_behaves_like 'handling dry run mode'
+ end
+ end
+
+ context 'when defaulting to provider in the object store configuration' do
+ let(:specified_provider) { nil }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'when provider is specified' do
+ context 'and provider is supported' do
+ let(:specified_provider) { 'aws' }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'and provider is not supported' do
+ let(:specified_provider) { 'somethingelse' }
+
+ it 'raises an error' do
+ expect { run }.to raise_error(described_class::UnsupportedProviderError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index df1b12e479f..7fc50438c95 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -97,207 +97,44 @@ RSpec.describe Gitlab::CurrentSettings, feature_category: :shared do
expect(described_class.metrics_sample_interval).to be(15)
end
- context 'when ENV["IN_MEMORY_APPLICATION_SETTINGS"] is true' do
- before do
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'true')
- end
+ it 'retrieves settings using ApplicationSettingFetcher' do
+ expect(Gitlab::ApplicationSettingFetcher).to receive(:current_application_settings).and_call_original
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
+ described_class.home_page_url
+ end
+ end
- expect(described_class.current_application_settings).to be_a(ApplicationSetting)
- expect(described_class.current_application_settings).not_to be_persisted
- end
+ describe '#current_application_settings?' do
+ subject(:settings_set) { described_class.current_application_settings? }
+
+ before do
+ # unstub, it is stubbed in spec/spec_helper.rb
+ allow(described_class).to receive(:current_application_settings?).and_call_original
end
- context 'in a Rake task with DB unavailable' do
+ context 'when settings are cached in RequestStore' do
before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(false)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- allow(ApplicationSetting.connection).to receive(:active?).and_return(false)
+ allow(Gitlab::SafeRequestStore).to receive(:exist?).with(:current_application_settings).and_return(true)
end
- context 'and no settings in cache' do
- before do
- expect(ApplicationSetting).not_to receive(:current)
- end
-
- it 'returns a FakeApplicationSettings object' do
- expect(described_class.current_application_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
-
- it 'does not issue any query' do
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
+ it 'returns true' do
+ expect(settings_set).to be(true)
end
end
- context 'with DB available' do
- # This method returns the ::ApplicationSetting.defaults hash
- # but with respect of custom attribute accessors of ApplicationSetting model
- def settings_from_defaults
- ar_wrapped_defaults = ::ApplicationSetting.build_from_defaults.attributes
- ar_wrapped_defaults.slice(*::ApplicationSetting.defaults.keys)
- end
-
- context 'and settings in cache' do
- include_context 'with settings in cache'
-
- it 'fetches the settings from cache' do
- # For some reason, `allow(described_class).to receive(:connect_to_db?).and_return(true)` causes issues
- # during the initialization phase of the test suite, so instead let's mock the internals of it
- expect(ApplicationSetting.connection).not_to receive(:active?)
- expect(ApplicationSetting.connection).not_to receive(:cached_table_exists?)
- expect_any_instance_of(ActiveRecord::MigrationContext).not_to receive(:needs_migration?)
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
+ context 'when ApplicationSettingFetcher.current_application_settings? returns true' do
+ before do
+ allow(Gitlab::ApplicationSettingFetcher).to receive(:current_application_settings?).and_return(true)
end
- context 'and no settings in cache' do
- before do
- allow(ApplicationSetting.connection).to receive(:active?).and_return(true)
- allow(ApplicationSetting.connection).to receive(:cached_table_exists?).with('application_settings').and_return(true)
- end
-
- context 'with RequestStore enabled', :request_store do
- it 'fetches the settings from DB only once' do
- described_class.current_application_settings # warm the cache
-
- expect(ActiveRecord::QueryRecorder.new { described_class.current_application_settings }.count).to eq(0)
- end
- end
-
- it 'creates default ApplicationSettings if none are present' do
- settings = described_class.current_application_settings
-
- expect(settings).to be_a(ApplicationSetting)
- expect(settings).to be_persisted
- expect(settings).to have_attributes(settings_from_defaults)
- end
-
- context 'when we hit a recursive loop' do
- before do
- expect(ApplicationSetting).to receive(:create_from_defaults) do
- raise ApplicationSetting::Recursion
- end
- end
-
- it 'recovers and returns in-memory settings' do
- settings = described_class.current_application_settings
-
- expect(settings).to be_a(ApplicationSetting)
- expect(settings).not_to be_persisted
- end
- end
-
- context 'when ApplicationSettings does not have a primary key' do
- before do
- allow(ApplicationSetting.connection).to receive(:primary_key).with('application_settings').and_return(nil)
- end
-
- it 'raises an exception if ApplicationSettings does not have a primary key' do
- expect { described_class.current_application_settings }.to raise_error(/table is missing a primary key constraint/)
- end
- end
-
- context 'with pending migrations' do
- let(:current_settings) { described_class.current_application_settings }
-
- before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(false)
- end
-
- shared_examples 'a non-persisted ApplicationSetting object' do
- it 'uses the default value from ApplicationSetting.defaults' do
- expect(current_settings.signup_enabled).to eq(ApplicationSetting.defaults[:signup_enabled])
- end
-
- it 'uses the default value from custom ApplicationSetting accessors' do
- expect(current_settings.commit_email_hostname).to eq(ApplicationSetting.default_commit_email_hostname)
- end
-
- it 'responds to predicate methods' do
- expect(current_settings.signup_enabled?).to eq(current_settings.signup_enabled)
- end
- end
-
- context 'in a Rake task' do
- before do
- allow(Gitlab::Runtime).to receive(:rake?).and_return(true)
- expect_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(true)
- end
-
- it_behaves_like 'a non-persisted ApplicationSetting object'
-
- it 'returns a FakeApplicationSettings object' do
- expect(current_settings).to be_a(Gitlab::FakeApplicationSettings)
- end
-
- context 'when a new column is used before being migrated' do
- before do
- allow(ApplicationSetting).to receive(:defaults).and_return({ foo: 'bar' })
- end
-
- it 'uses the default value if present' do
- expect(current_settings.foo).to eq('bar')
- end
- end
- end
-
- context 'with no ApplicationSetting DB record' do
- it_behaves_like 'a non-persisted ApplicationSetting object'
- end
-
- context 'with an existing ApplicationSetting DB record' do
- before do
- described_class.update!(home_page_url: 'http://mydomain.com')
- end
-
- it_behaves_like 'a non-persisted ApplicationSetting object'
-
- it 'uses the value from the DB attribute if present and not overridden by an accessor' do
- expect(current_settings.home_page_url).to eq('http://mydomain.com')
- end
- end
- end
-
- context 'when ApplicationSettings.current is present' do
- it 'returns the existing application settings' do
- expect(ApplicationSetting).to receive(:current).and_return(:current_settings)
-
- expect(described_class.current_application_settings).to eq(:current_settings)
- end
- end
+ it 'returns true' do
+ expect(settings_set).to be(true)
end
end
- end
-
- describe '#current_application_settings?', :use_clean_rails_memory_store_caching do
- before do
- allow(described_class).to receive(:current_application_settings?).and_call_original
- ApplicationSetting.delete_all # ensure no settings exist
- end
-
- it 'returns true when settings exist' do
- described_class.update!(
- home_page_url: 'http://mydomain.com',
- signup_enabled: false)
-
- expect(described_class.current_application_settings?).to eq(true)
- end
-
- it 'returns false when settings do not exist' do
- expect(described_class.current_application_settings?).to eq(false)
- end
-
- context 'with cache', :request_store do
- include_context 'with settings in cache'
-
- it 'returns an in-memory ApplicationSetting object' do
- expect(ApplicationSetting).not_to receive(:current)
- expect(described_class.current_application_settings?).to eq(true)
+ context 'when not cached and not in ApplicationSettingFetcher' do
+ it 'returns false' do
+ expect(settings_set).to be(false)
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index ad7cd2dc736..5fa61b1680d 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -184,14 +184,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
- control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }.count
+ control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
- expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control_count)
+ expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control)
end
it "with multiple retried builds" do
@@ -201,14 +201,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
- control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }.count
+ control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, :retried, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
- expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control_count)
+ expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index 02dc596c5eb..2d2beaff339 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -89,13 +89,13 @@ RSpec.describe Gitlab::DataBuilder::Push do
described_class.build(
project: project,
user: user,
- oldrev: Gitlab::Git::BLANK_SHA,
+ oldrev: Gitlab::Git::SHA1_BLANK_SHA,
newrev: '8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b',
ref: 'refs/tags/v1.1.0')
end
it { expect(data).to be_a(Hash) }
- it { expect(data[:before]).to eq(Gitlab::Git::BLANK_SHA) }
+ it { expect(data[:before]).to eq(Gitlab::Git::SHA1_BLANK_SHA) }
it { expect(data[:checkout_sha]).to eq('5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
it { expect(data[:after]).to eq('8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b') }
it { expect(data[:ref]).to eq('refs/tags/v1.1.0') }
diff --git a/spec/lib/gitlab/database/click_house_client_spec.rb b/spec/lib/gitlab/database/click_house_client_spec.rb
index 271500ed3f6..e501a17b9b0 100644
--- a/spec/lib/gitlab/database/click_house_client_spec.rb
+++ b/spec/lib/gitlab/database/click_house_client_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
describe 'RSpec hooks' do
it 'ensures that tables are empty' do
- results = ClickHouse::Client.select('SELECT * FROM events', :main)
+ results = ClickHouse::Client.select('SELECT * FROM FINAL events', :main)
expect(results).to be_empty
end
@@ -66,7 +66,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
:main)
end
- results = ClickHouse::Client.select('SELECT id, path, created_at FROM events ORDER BY id', :main)
+ results = ClickHouse::Client.select('SELECT id, path, created_at FROM events FINAL ORDER BY id', :main)
expect(results).to match([
{ 'id' => 10, 'path' => '1/2/', 'created_at' => be_within(0.1.seconds).of(time) },
@@ -87,7 +87,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
ClickHouse::Client.execute(insert_query, :main)
- results = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+ results = ClickHouse::Client.select('SELECT * FROM events FINAL ORDER BY id', :main)
expect(results.size).to eq(3)
last = results.last
@@ -106,7 +106,7 @@ RSpec.describe 'ClickHouse::Client', :click_house, feature_category: :database d
ClickHouse::Client.execute(delete_query, :main)
select_query = ClickHouse::Client::Query.new(
- raw_query: 'SELECT * FROM events WHERE id = {id:UInt64}',
+ raw_query: 'SELECT * FROM events FINAL WHERE id = {id:UInt64}',
placeholders: { id: event3.id }
)
diff --git a/spec/lib/gitlab/database/dictionary_spec.rb b/spec/lib/gitlab/database/dictionary_spec.rb
index 261cf27ed69..59145842b24 100644
--- a/spec/lib/gitlab/database/dictionary_spec.rb
+++ b/spec/lib/gitlab/database/dictionary_spec.rb
@@ -24,6 +24,25 @@ RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
end
end
+ describe '.any_entry' do
+ it 'loads an entry from any scope' do
+ expect(described_class.any_entry('ci_pipelines')).to be_present # Regular table
+ expect(described_class.any_entry('audit_events_archived')).to be_present # Deleted table
+ expect(described_class.any_entry('postgres_constraints')).to be_present # View
+ expect(described_class.any_entry('not_a_table_ever')).to be_nil
+ end
+ end
+
+ describe '.entry' do
+ it 'loads an Entry from the given scope' do
+ expect(described_class.entry('ci_pipelines')).to be_present # Regular table
+ expect(described_class.entry('audit_events_archived')).not_to be_present # Deleted table
+ expect(described_class.entry('postgres_constraints')).not_to be_present # Deleted table
+ expect(described_class.entry('audit_events_archived', 'deleted_tables')).to be_present # Deleted table
+ expect(described_class.entry('postgres_constraints', 'views')).to be_present # View
+ end
+ end
+
describe '::Entry' do
subject(:database_dictionary) { described_class::Entry.new(file_path) }
@@ -80,6 +99,39 @@ RSpec.describe Gitlab::Database::Dictionary, feature_category: :database do
expect { database_dictionary.validate! }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
end
end
+
+ context 'with allow_cross_joins' do
+ let(:file_path) { 'db/docs/achievements.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:joins))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
+
+ context 'with allow_cross_transactions' do
+ let(:file_path) { 'db/docs/activity_pub_releases_subscriptions.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:transactions))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
+
+ context 'with allow_cross_foreign_keys' do
+ let(:file_path) { 'db/docs/agent_group_authorizations.yml' }
+
+ describe '#allow_cross_to_schemas' do
+ it 'returns the list of allowed schemas' do
+ expect(database_dictionary.allow_cross_to_schemas(:foreign_keys))
+ .to contain_exactly(:gitlab_main_clusterwide)
+ end
+ end
+ end
end
context 'for a view' do
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 7fca47c707c..f716bcfcf49 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -217,17 +217,19 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_joins_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | true
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[] | %w[] | true
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %w[users evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | false
+ %i[gitlab_main gitlab_ci] | %w[evidences schema_migrations] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %w[users evidences detached_partitions] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | true
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users activity_pub_releases_subscriptions] | false
end
with_them do
@@ -237,17 +239,19 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_transactions_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | true
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | true
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | true
+ %i[] | %w[] | true
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_ci] | %w[users evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | true
+ %i[gitlab_main gitlab_ci] | %w[evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_main gitlab_shared] | %w[users evidences detached_partitions] | true
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | true
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users activity_pub_releases_subscriptions] | true
end
with_them do
@@ -257,15 +261,17 @@ RSpec.describe Gitlab::Database::GitlabSchema, feature_category: :database do
describe '.cross_foreign_key_allowed?' do
where(:schemas, :tables, :result) do
- %i[] | %i[] | false
- %i[gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_main] | %i[] | true
- %i[gitlab_main_clusterwide gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_internal] | %i[] | false
- %i[gitlab_main gitlab_ci] | %i[] | false
- %i[gitlab_main_clusterwide gitlab_shared] | %i[] | false
+ %i[] | %w[] | false
+ %i[gitlab_main] | %w[evidences] | true
+ %i[gitlab_main_clusterwide gitlab_main] | %w[users evidences] | true
+ %i[gitlab_main_clusterwide gitlab_ci] | %w[users ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_internal] | %w[users schema_migrations] | false
+ %i[gitlab_main gitlab_ci] | %w[evidences ci_pipelines] | false
+ %i[gitlab_main_clusterwide gitlab_shared] | %w[users detached_partitions] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[users namespaces] | false
%i[gitlab_main_clusterwide gitlab_main_cell] | %w[plans namespaces] | true
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users achievements] | false
+ %i[gitlab_main_clusterwide gitlab_main_cell] | %w[users agent_group_authorizations] | true
end
with_them do
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index 8b653e2d89d..afcec5ea214 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
+RSpec.describe Gitlab::Database::MigrationHelpers::V2, feature_category: :database do
include Database::TriggerHelpers
include Database::TableSchemaHelpers
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
context 'when the batch column does exist' do
it 'passes it when creating the column' do
expect(migration).to receive(:create_column_from)
- .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status)
+ .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status, type_cast_function: nil)
.and_call_original
migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :status)
@@ -495,4 +495,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
end
end
+
+ describe '#change_column_type_concurrently' do
+ let(:table_name) { :_test_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, '{ \"lucky_number\": 8 }')
+ SQL
+ end
+
+ it 'adds a column of the new type and triggers to keep these two columns in sync' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ end
+ expect(recorder.log).to include(/ALTER TABLE "_test_change_column_type_concurrently" ADD "name_for_type_change" text/)
+ expect(recorder.log).to include(/BEGIN\n IF NEW."name" IS NOT DISTINCT FROM NULL AND NEW."name_for_type_change" IS DISTINCT FROM NULL THEN\n NEW."name" = NEW."name_for_type_change";\n END IF;\n\n IF NEW."name_for_type_change" IS NOT DISTINCT FROM NULL AND NEW."name" IS DISTINCT FROM NULL THEN\n NEW."name_for_type_change" = NEW."name";\n END IF;\n\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name" := NEW."name_for_type_change";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/BEGIN\n NEW."name_for_type_change" := NEW."name";\n RETURN NEW;\nEND/m)
+ expect(recorder.log).to include(/ON "_test_change_column_type_concurrently"\nFOR EACH ROW\sEXECUTE FUNCTION/m)
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."id" >= \d+/)
+ end
+
+ context 'with batch column name' do
+ it 'updates the new column using the batch column' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, batch_column_name: :user_id)
+ end
+ expect(recorder.log).to include(/UPDATE .* WHERE "_test_change_column_type_concurrently"."user_id" >= \d+/)
+ end
+ end
+
+ context 'with type cast function' do
+ it 'updates the new column with casting the value to the given type' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.change_column_type_concurrently(table_name, :name, :text, type_cast_function: 'JSON')
+ end
+ expect(recorder.log).to include(/SET "name_for_type_change" = JSON\("_test_change_column_type_concurrently"\."name"\)/m)
+ end
+ end
+ end
+
+ describe '#undo_change_column_type_concurrently' do
+ let(:table_name) { :_test_undo_change_column_type_concurrently }
+
+ before do
+ migration.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{table_name};
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ user_id bigint,
+ name character varying
+ );
+ /* at least one record for batching update */
+ INSERT INTO #{table_name} (id, user_id, name)
+ VALUES (1, 9, 'For every young')
+ SQL
+ end
+
+ it 'undoes the column type change' do
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ migration.change_column_type_concurrently(table_name, :name, :text)
+ recorder = ActiveRecord::QueryRecorder.new do
+ migration.undo_change_column_type_concurrently(table_name, :name)
+ end
+ expect(recorder.log).to include(/DROP TRIGGER IF EXISTS .+ON "_test_undo_change_column_type_concurrently"/m)
+ expect(recorder.log).to include(/ALTER TABLE "_test_undo_change_column_type_concurrently" DROP COLUMN "name_for_type_change"/)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/namespace_each_batch_spec.rb b/spec/lib/gitlab/database/namespace_each_batch_spec.rb
new file mode 100644
index 00000000000..23de19a6683
--- /dev/null
+++ b/spec/lib/gitlab/database/namespace_each_batch_spec.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::NamespaceEachBatch, feature_category: :database do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:user) { create(:user, :admin) }
+
+ let(:namespace_id) { group.id }
+
+ let_it_be(:subgroup1) { create(:group, parent: group) }
+ let_it_be(:subgroup2) { create(:group, parent: group) }
+
+ let_it_be(:subsubgroup1) { create(:group, parent: subgroup1) }
+ let_it_be(:subsubgroup2) { create(:group, parent: subgroup1) }
+ let_it_be(:subsubgroup3) { create(:group, parent: subgroup1) }
+
+ let_it_be(:project1) { create(:project, namespace: group) }
+ let_it_be(:project2) { create(:project, namespace: group) }
+ let_it_be(:project3) { create(:project, namespace: subsubgroup2) }
+ let_it_be(:project4) { create(:project, namespace: subsubgroup3) }
+ let_it_be(:project5) { create(:project, namespace: subsubgroup3) }
+
+ let(:namespace_class) { Namespace }
+ let(:batch_size) { 3 }
+
+ def collected_ids(cursor = { current_id: namespace_id, depth: [namespace_id] })
+ [].tap do |ids|
+ described_class.new(namespace_class: namespace_class, cursor: cursor).each_batch(of: batch_size) do |batch_ids|
+ ids.concat(batch_ids)
+ end
+ end
+ end
+
+ shared_examples 'iteration over the hierarchy' do
+ it 'returns the correct namespace ids' do
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id,
+ subgroup2.id,
+ project1.project_namespace_id,
+ project2.project_namespace_id
+ ])
+ end
+ end
+
+ it_behaves_like 'iteration over the hierarchy'
+
+ context 'when batch size is larger than the hierarchy' do
+ let(:batch_size) { 100 }
+
+ it_behaves_like 'iteration over the hierarchy'
+ end
+
+ context 'when batch size is 1' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'iteration over the hierarchy'
+ end
+
+ context 'when stopping the iteration in the middle and resuming' do
+ it 'returns the correct ids' do
+ ids = []
+ cursor = { current_id: namespace_id, depth: [namespace_id] }
+
+ iterator = described_class.new(namespace_class: namespace_class, cursor: cursor)
+ iterator.each_batch(of: 5) do |batch_ids, new_cursor|
+ ids.concat(batch_ids)
+ cursor = new_cursor
+ end
+
+ iterator = described_class.new(namespace_class: namespace_class, cursor: cursor)
+ iterator.each_batch(of: 500) do |batch_ids|
+ ids.concat(batch_ids)
+ end
+
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id,
+ subgroup2.id,
+ project1.project_namespace_id,
+ project2.project_namespace_id
+ ])
+ end
+ end
+
+ context 'when querying a subgroup' do
+ let(:namespace_id) { subgroup1.id }
+
+ it 'returns the correct ids' do
+ expect(collected_ids).to eq([
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ project3.project_namespace_id,
+ subsubgroup3.id,
+ project4.project_namespace_id,
+ project5.project_namespace_id
+ ])
+ end
+ end
+
+ context 'when querying a subgroup without descendants' do
+ let(:namespace_id) { subgroup2.id }
+
+ it 'finds only the given namespace id' do
+ expect(collected_ids).to eq([subgroup2.id])
+ end
+ end
+
+ context 'when batching over groups only' do
+ let(:namespace_class) { Group }
+
+ it 'returns the correct namespace ids' do
+ expect(collected_ids).to eq([
+ group.id,
+ subgroup1.id,
+ subsubgroup1.id,
+ subsubgroup2.id,
+ subsubgroup3.id,
+ subgroup2.id
+ ])
+ end
+ end
+
+ context 'when the cursor is invalid' do
+ context 'when non-integer current id is given' do
+ it 'raises error' do
+ cursor = { current_id: 'not int', depth: [group.id] }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when depth is not an array' do
+ it 'raises error' do
+ cursor = { current_id: group.id, depth: group.id }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when non-integer depth values are given' do
+ it 'raises error' do
+ cursor = { current_id: group.id, depth: ['not int'] }
+
+ expect { collected_ids(cursor) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when giving non-existing namespace id' do
+ it 'returns nothing', :enable_admin_mode do
+ cursor = { current_id: subgroup1.id, depth: [group.id, subgroup1.id] }
+
+ Groups::DestroyService.new(group, user).execute
+
+ expect(collected_ids(cursor)).to eq([])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
index d1d7aa12c46..4fc62c6cc74 100644
--- a/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
+++ b/spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'new tables with gitlab_main schema', feature_category: :cell do
# Specific tables can be exempted from this requirement, and such tables must be added to the `exempted_tables` list.
let!(:exempted_tables) do
[
- "audit_events_instance_amazon_s3_configurations" # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ "audit_events_instance_amazon_s3_configurations", # https://gitlab.com/gitlab-org/gitlab/-/issues/431327
+ "sbom_source_packages" # https://gitlab.com/gitlab-org/gitlab/-/issues/437718
]
end
diff --git a/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb b/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb
new file mode 100644
index 00000000000..eadae77bdc6
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/int_range_partition_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::IntRangePartition, feature_category: :database do
+ describe 'validate attributes' do
+ subject(:int_range_partition) { described_class.from_sql(table, partition_name, definition) }
+
+ let(:table) { 'foo' }
+ let(:partition_name) { 'foo_bar' }
+ let(:definition) { "FOR VALUES FROM ('1') TO ('10')" }
+
+ context 'when `from` is greater than `to`' do
+ let(:definition) { "FOR VALUES FROM ('10') TO ('1')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`to` must be greater than `from`')
+ end
+ end
+
+ context 'when `to` is 0' do
+ let(:definition) { "FOR VALUES FROM ('10') TO ('0')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`to` statement must be greater than 0')
+ end
+ end
+
+ context 'when `from` is 0' do
+ let(:definition) { "FOR VALUES FROM ('0') TO ('1')" }
+
+ it 'raises an exception' do
+ expect { int_range_partition }.to raise_error(RuntimeError, '`from` statement must be greater than 0')
+ end
+ end
+ end
+
+ describe '.from_sql' do
+ subject(:int_range_partition) { described_class.from_sql(table, partition_name, definition) }
+
+ let(:table) { 'foo' }
+ let(:partition_name) { 'foo_bar' }
+ let(:definition) { "FOR VALUES FROM ('1') TO ('10')" }
+
+ it 'uses specified table name' do
+ expect(int_range_partition.table).to eq(table)
+ end
+
+ it 'uses specified partition name' do
+ expect(int_range_partition.partition_name).to eq(partition_name)
+ end
+
+ it 'parses start date' do
+ expect(int_range_partition.from).to eq(1)
+ end
+
+ it 'parses end date' do
+ expect(int_range_partition.to).to eq(10)
+ end
+ end
+
+ describe '#partition_name' do
+ subject(:int_range_partition_name) do
+ described_class.new(table, from, to, partition_name: partition_name).partition_name
+ end
+
+ let(:table) { 'foo' }
+ let(:from) { '1' }
+ let(:to) { '10' }
+ let(:partition_name) { nil }
+
+ it 'uses table as prefix' do
+ expect(int_range_partition_name).to start_with(table)
+ end
+
+ it 'uses start id (from) as suffix' do
+ expect(int_range_partition_name).to end_with("_1")
+ end
+
+ context 'with partition name explicitly given' do
+ let(:partition_name) { "foo_bar" }
+
+ it 'uses given partition name' do
+ expect(int_range_partition_name).to eq(partition_name)
+ end
+ end
+ end
+
+ describe '#to_sql' do
+ subject(:to_sql) { described_class.new(table, from, to).to_sql }
+
+ let(:table) { 'foo' }
+ let(:from) { '1' }
+ let(:to) { '10' }
+
+ it 'transforms to a CREATE TABLE statement' do
+ expect(to_sql).to eq(<<~SQL)
+ CREATE TABLE IF NOT EXISTS "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}"."foo_1"
+ PARTITION OF "foo"
+ FOR VALUES FROM ('1') TO ('10')
+ SQL
+ end
+ end
+
+ describe 'object equality - #eql' do
+ def expect_inequality(actual, other)
+ expect(actual.eql?(other)).to be_falsey
+ expect(actual).not_to eq(other)
+ end
+
+ def expect_equality(actual, other)
+ expect(actual).to eq(other)
+ expect(actual.eql?(other)).to be_truthy
+ expect(actual.hash).to eq(other.hash)
+ end
+
+ def make_new(table: 'foo', from: '1', to: '10', partition_name: 'foo_1')
+ described_class.new(table, from, to, partition_name: partition_name)
+ end
+
+ it 'treats objects identical with identical attributes' do
+ expect_equality(make_new, make_new)
+ end
+
+ it 'different table leads to in-equality' do
+ expect_inequality(make_new, make_new(table: 'bar'))
+ end
+
+ it 'different from leads to in-equality' do
+ expect_inequality(make_new, make_new(from: '2'))
+ end
+
+ it 'different to leads to in-equality' do
+ expect_inequality(make_new, make_new(to: '11'))
+ end
+
+ it 'different partition_name leads to in-equality' do
+ expect_inequality(make_new, make_new(partition_name: 'different'))
+ end
+
+ it 'nil partition_name is ignored if auto-generated matches' do
+ expect_equality(make_new, make_new(partition_name: nil))
+ end
+ end
+
+ describe 'Comparable, #<=>' do
+ let(:table) { 'foo' }
+
+ it 'sorts by partition bounds' do
+ partitions = [
+ described_class.new(table, '100', '110', partition_name: 'p_100'),
+ described_class.new(table, '5', '10', partition_name: 'p_5'),
+ described_class.new(table, '10', '100', partition_name: 'p_10'),
+ described_class.new(table, '1', '5', partition_name: 'p_1')
+ ]
+
+ expect(partitions.sort).to eq(
+ [
+ described_class.new(table, '1', '5', partition_name: 'p_1'),
+ described_class.new(table, '5', '10', partition_name: 'p_5'),
+ described_class.new(table, '10', '100', partition_name: 'p_10'),
+ described_class.new(table, '100', '110', partition_name: 'p_100')
+ ])
+ end
+
+ it 'returns nil for partitions of different tables' do
+ one = described_class.new('foo', '1', '10')
+ two = described_class.new('bar', '1', '10')
+
+ expect(one.<=>(two)).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb
new file mode 100644
index 00000000000..19937544393
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/int_range_strategy_spec.rb
@@ -0,0 +1,317 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::IntRangeStrategy, feature_category: :database do
+ include Database::PartitioningHelpers
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = '_test_partitioned_test'
+ end
+ end
+
+ after do
+ model.reset_column_information
+ end
+
+ describe '#current_partitions' do
+ subject(:current_partitions) { described_class.new(model, partitioning_key, partition_size: 10).current_partitions }
+
+ let(:partitioning_key) { double }
+ let(:table_name) { :_test_partitioned_test }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (id serial not null, external_id integer not null, PRIMARY KEY (id, external_id))
+ PARTITION BY RANGE (external_id);
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_1
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('1') TO ('5');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_5
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('5') TO ('10');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_10
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('10') TO ('100');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_100
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('100') TO ('110');
+ SQL
+ end
+
+ it 'returns partitions order by range bound' do
+ expect(current_partitions).to eq(
+ [
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 1, 5,
+ partition_name: '_test_partitioned_test_1'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 5, 10,
+ partition_name: '_test_partitioned_test_5'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 10, 100,
+ partition_name: '_test_partitioned_test_10'),
+ Gitlab::Database::Partitioning::IntRangePartition.new(table_name, 100, 110,
+ partition_name: '_test_partitioned_test_100')
+ ])
+ end
+ end
+
+ describe '#extra_partitions' do
+ let(:partitioning_key) { double }
+ let(:table_name) { :_test_partitioned_test }
+
+ subject(:extra_partitions) { described_class.new(model, partitioning_key, partition_size: 10).extra_partitions }
+
+ it 'returns an empty array' do
+ expect(extra_partitions).to eq([])
+ end
+ end
+
+ describe '#missing_partitions' do
+ subject(:missing_partitions) { described_class.new(model, partitioning_key, partition_size: 10).missing_partitions }
+
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = '_test_partitioned_test'
+ self.primary_key = :id
+ end
+ end
+
+ let(:partitioning_key) { :external_id }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{model.table_name}
+ (id serial not null, external_id integer not null, PRIMARY KEY (id, external_id))
+ PARTITION BY RANGE (external_id);
+ SQL
+ end
+
+ context 'when the current partitions are not completed' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_11
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('11') TO ('21');
+ SQL
+ end
+
+ context 'when partitions have data' do
+ before do
+ model.create!(external_id: 15)
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(7)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+
+ context 'when partitions are empty' do
+ before do
+ model.delete_all
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(7)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+ end
+
+ context 'with existing partitions' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_1
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('1') TO ('11');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_11
+ PARTITION OF #{model.table_name}
+ FOR VALUES FROM ('11') TO ('21');
+ SQL
+ end
+
+ context 'when partitions have data' do
+ before do
+ model.create!(external_id: 1)
+ model.create!(external_id: 15)
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+
+ context 'when partitions are empty' do
+ before do
+ model.delete_all
+ end
+
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 61, 71),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 71, 81)
+ )
+
+ expect(missing_partitions).not_to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21)
+ )
+ end
+ end
+ end
+
+ context 'without partitions' do
+ it 'returns missing partitions' do
+ expect(missing_partitions.size).to eq(6)
+
+ expect(missing_partitions).to include(
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 1, 11),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 11, 21),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 21, 31),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 31, 41),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 41, 51),
+ Gitlab::Database::Partitioning::IntRangePartition.new(model.table_name, 51, 61)
+ )
+ end
+ end
+ end
+
+ describe 'attributes' do
+ let(:partitioning_key) { :partition }
+ let(:table_name) { :_test_partitioned_test }
+ let(:partition_size) { 5 }
+
+ subject(:strategy) do
+ described_class.new(
+ model, partitioning_key,
+ partition_size: partition_size
+ )
+ end
+
+ specify do
+ expect(strategy).to have_attributes({
+ model: model,
+ partitioning_key: partitioning_key,
+ partition_size: partition_size
+ })
+ end
+ end
+
+ describe 'simulates the merge_request_diff_commits partition creation' do
+ let(:table_name) { '_test_partitioned_test' }
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = '_test_partitioned_test'
+ self.primary_key = :merge_request_diff_id
+
+ partitioned_by :merge_request_diff_id, strategy: :int_range, partition_size: 2
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ create table #{table_name}
+ (
+ merge_request_diff_id int not null,
+ relative_order int not null,
+ created_at timestamptz,
+ primary key (merge_request_diff_id, relative_order)
+ )
+ PARTITION BY RANGE (merge_request_diff_id);
+
+ create table gitlab_partitions_dynamic.#{table_name}_1
+ PARTITION of #{table_name} FOR VALUES FROM (1) TO (3);
+
+ create table gitlab_partitions_dynamic.#{table_name}_3
+ PARTITION of #{table_name} FOR VALUES FROM (3) TO (5);
+ SQL
+ end
+
+ it 'redirects to the new partition', :aggregate_failures do
+ expect_range_partitions_for(table_name, {
+ '1' => %w[1 3],
+ '3' => %w[3 5]
+ })
+
+ expect do
+ model.create!(merge_request_diff_id: 1, relative_order: 1, created_at: Time.zone.now) # Goes in partition 1
+ end.to change { model.count }.by(1)
+
+ expect do
+ model.create!(merge_request_diff_id: 5, relative_order: 1, created_at: Time.zone.now)
+ end.to raise_error(ActiveRecord::StatementInvalid, /no partition of relation/)
+
+ Gitlab::Database::Partitioning::PartitionManager.new(model).sync_partitions # Generates more 6 partitions
+
+ expect_range_partitions_for(table_name, {
+ '1' => %w[1 3],
+ '3' => %w[3 5],
+ '5' => %w[5 7],
+ '7' => %w[7 9],
+ '9' => %w[9 11],
+ '11' => %w[11 13],
+ '13' => %w[13 15],
+ '15' => %w[15 17]
+ })
+
+ expect do
+ model.create!(merge_request_diff_id: 5, relative_order: 1, created_at: Time.zone.now) # Goes in partition 5
+ end.to change { model.count }.by(1)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
index b30501cce21..e0b090f7ff9 100644
--- a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
table_name: table_name,
partitioning_column: partitioning_column,
parent_table_name: parent_table_name,
- zero_partition_value: partitioning_default
+ zero_partition_value: zero_partition_value
)
end
@@ -24,107 +24,121 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
let(:async) { false }
- it 'adds a check constraint' do
- expect { prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.from(0).to(1)
- end
-
- context 'when it fails to add constraint' do
- before do
- allow(migration_context).to receive(:add_check_constraint)
- end
-
- it 'raises UnableToPartition error' do
- expect { prepare }
- .to raise_error(described_class::UnableToPartition)
- .and change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(0)
- end
- end
-
- context 'when async' do
- let(:async) { true }
-
- it 'adds a NOT VALID check constraint' do
+ shared_examples 'runs #prepare_for_partitioning' do
+ it 'adds a check constraint' do
expect { prepare }.to change {
Gitlab::Database::PostgresConstraint
.check_constraints
.by_table_identifier(table_identifier)
.count
}.from(0).to(1)
+ end
- constraint =
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .last
+ context 'when it fails to add constraint' do
+ before do
+ allow(migration_context).to receive(:add_check_constraint)
+ end
- expect(constraint.definition).to end_with('NOT VALID')
+ it 'raises UnableToPartition error' do
+ expect { prepare }
+ .to raise_error(described_class::UnableToPartition)
+ .and change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(0)
+ end
end
- it 'adds a PostgresAsyncConstraintValidation record' do
- expect { prepare }.to change {
- Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count
- }.by(1)
+ context 'when async' do
+ let(:async) { true }
- record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation
- .where(table_name: table_name).last
+ it 'adds a NOT VALID check constraint' do
+ expect { prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.from(0).to(1)
- expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME
- expect(record).to be_check_constraint
- end
+ constraint =
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .last
- context 'when constraint exists but is not valid' do
- before do
- converter.prepare_for_partitioning(async: true)
+ expect(constraint.definition).to end_with('NOT VALID')
end
- it 'validates the check constraint' do
+ it 'adds a PostgresAsyncConstraintValidation record' do
expect { prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier).first.constraint_valid?
- }.from(false).to(true)
+ Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation.count
+ }.by(1)
+
+ record = Gitlab::Database::AsyncConstraints::PostgresAsyncConstraintValidation
+ .where(table_name: table_name).last
+
+ expect(record.name).to eq described_class::PARTITIONING_CONSTRAINT_NAME
+ expect(record).to be_check_constraint
end
- context 'when it fails to validate constraint' do
+ context 'when constraint exists but is not valid' do
before do
- allow(migration_context).to receive(:validate_check_constraint)
+ converter.prepare_for_partitioning(async: true)
end
- it 'raises UnableToPartition error' do
- expect { prepare }
- .to raise_error(described_class::UnableToPartition,
- starting_with('Error validating partitioning constraint'))
- .and change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(0)
+ it 'validates the check constraint' do
+ expect { prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier).first.constraint_valid?
+ }.from(false).to(true)
end
- end
- end
- context 'when constraint exists and is valid' do
- before do
- converter.prepare_for_partitioning(async: false)
+ context 'when it fails to validate constraint' do
+ before do
+ allow(migration_context).to receive(:validate_check_constraint)
+ end
+
+ it 'raises UnableToPartition error' do
+ expect { prepare }
+ .to raise_error(described_class::UnableToPartition,
+ starting_with('Error validating partitioning constraint'))
+ .and change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(0)
+ end
+ end
end
- it 'raises UnableToPartition error' do
- expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do'))
- prepare
+ context 'when constraint exists and is valid' do
+ before do
+ converter.prepare_for_partitioning(async: false)
+ end
+
+ it 'raises UnableToPartition error' do
+ expect(Gitlab::AppLogger).to receive(:info).with(starting_with('Nothing to do'))
+ prepare
+ end
end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #prepare_for_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #prepare_for_partitioning'
+ end
end
describe '#revert_preparation_for_partitioning' do
@@ -132,15 +146,29 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
converter.prepare_for_partitioning
end
+ shared_examples 'runs #revert_preparation_for_partitioning' do
+ it 'removes a check constraint' do
+ expect { revert_prepare }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier("#{connection.current_schema}.#{table_name}")
+ .count
+ }.from(1).to(0)
+ end
+ end
+
subject(:revert_prepare) { converter.revert_preparation_for_partitioning }
- it 'removes a check constraint' do
- expect { revert_prepare }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier("#{connection.current_schema}.#{table_name}")
- .count
- }.from(1).to(0)
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #revert_preparation_for_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #revert_preparation_for_partitioning'
end
end
@@ -153,128 +181,146 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
converter.prepare_for_partitioning(async: async)
end
- context 'when the primary key is incorrect' do
- before do
- connection.execute(<<~SQL)
- alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key
- alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too
- alter table #{table_name} drop constraint #{table_name}_pkey;
- alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id);
- SQL
- end
+ shared_examples 'runs partition method' do
+ context 'when the primary key is incorrect' do
+ before do
+ connection.execute(<<~SQL)
+ alter table #{referencing_table_name} drop constraint fk_referencing; -- this depends on the primary key
+ alter table #{other_referencing_table_name} drop constraint fk_referencing_other; -- this does too
+ alter table #{table_name} drop constraint #{table_name}_pkey;
+ alter table #{table_name} add constraint #{table_name}_pkey PRIMARY KEY (id);
+ SQL
+ end
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /#{partitioning_column}/)
+ end
end
- end
- context 'when there is not a supporting check constraint' do
- before do
- connection.execute(<<~SQL)
- alter table #{table_name} drop constraint partitioning_constraint;
- SQL
- end
+ context 'when there is not a supporting check constraint' do
+ before do
+ connection.execute(<<~SQL)
+ alter table #{table_name} drop constraint partitioning_constraint;
+ SQL
+ end
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ end
end
- end
- context 'when supporting check constraint is not valid' do
- let(:async) { true }
+ context 'when supporting check constraint is not valid' do
+ let(:async) { true }
- it 'throws a reasonable error message' do
- expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ it 'throws a reasonable error message' do
+ expect { partition }.to raise_error(described_class::UnableToPartition, /is not ready for partitioning./)
+ end
end
- end
-
- it 'migrates the table to a partitioned table' do
- fks_before = migration_context.foreign_keys(table_name)
-
- partition
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
- expect(migration_context.foreign_keys(parent_table_name).map(&:options)).to match_array(fks_before.map(&:options))
+ it 'migrates the table to a partitioned table' do
+ fks_before = migration_context.foreign_keys(table_name)
- connection.execute(<<~SQL)
- insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name};
- SQL
+ partition
- # Create a second partition
- connection.execute(<<~SQL)
- create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2)
- SQL
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ expect(migration_context.foreign_keys(parent_table_name)
+ .map(&:options)).to match_array(fks_before.map(&:options))
- parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1)
- expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
-
- expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error
- end
+ connection.execute(<<~SQL)
+ insert into #{table_name} (referenced_id, other_referenced_id) select #{referenced_table_name}.id, #{other_referenced_table_name}.id from #{referenced_table_name}, #{other_referenced_table_name};
+ SQL
- context 'when the existing table is owned by a different user' do
- before do
+ # Create a second partition
connection.execute(<<~SQL)
- CREATE USER other_user SUPERUSER;
- ALTER TABLE #{table_name} OWNER TO other_user;
+ create table #{table_name}2 partition of #{parent_table_name} FOR VALUES IN (2)
SQL
- end
- let(:current_user) { model.connection.select_value('select current_user') }
+ parent_model.create!(partitioning_column => 2, :referenced_id => 1, :other_referenced_id => 1)
+ expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
- it 'partitions without error' do
- expect { partition }.not_to raise_error
+ expect { referencing_model.create!(partitioning_column => 1, :ref_id => 1) }.not_to raise_error
end
- end
- context 'when an error occurs during the conversion' do
- before do
- # Set up the fault that we'd like to inject
- fault.call
- end
+ context 'when the existing table is owned by a different user' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE USER other_user SUPERUSER;
+ ALTER TABLE #{table_name} OWNER TO other_user;
+ SQL
+ end
- let(:old_fks) do
- Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited
- end
+ let(:current_user) { model.connection.select_value('select current_user') }
- let(:new_fks) do
- Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited
+ it 'partitions without error' do
+ expect { partition }.not_to raise_error
+ end
end
- context 'when partitioning fails the first time' do
- where(:case_name, :fault) do
- [
- ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
- ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
- ["adding the second foreign key", lazy { fail_adding_fk(parent_table_name, other_referenced_table_name) }],
- ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
- ]
+ context 'when an error occurs during the conversion' do
+ before do
+ # Set up the fault that we'd like to inject
+ fault.call
end
- with_them do
- it 'recovers from a fault', :aggregate_failures do
- expect { converter.partition }.to raise_error(/fault/)
+ let(:old_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(table_identifier).not_inherited
+ end
- expect { converter.partition }.not_to raise_error
- expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ let(:new_fks) do
+ Gitlab::Database::PostgresForeignKey.by_referenced_table_identifier(parent_table_identifier).not_inherited
+ end
+
+ context 'when partitioning fails the first time' do
+ where(:case_name, :fault) do
+ [
+ ["creating parent table", lazy { fail_sql_matching(/CREATE/i) }],
+ ["adding the first foreign key", lazy { fail_adding_fk(parent_table_name, referenced_table_name) }],
+ ["adding the second foreign key", lazy do
+ fail_adding_fk(parent_table_name, other_referenced_table_name)
+ end],
+ ["attaching table", lazy { fail_sql_matching(/ATTACH/i) }]
+ ]
+ end
+
+ with_them do
+ it 'recovers from a fault', :aggregate_failures do
+ expect { converter.partition }.to raise_error(/fault/)
+
+ expect { converter.partition }.not_to raise_error
+ expect(Gitlab::Database::PostgresPartition.for_parent_table(parent_table_name).count).to eq(1)
+ end
end
end
end
- end
- context 'when table has LFK triggers' do
- before do
- migration_context.track_record_deletions(table_name)
- end
+ context 'when table has LFK triggers' do
+ before do
+ migration_context.track_record_deletions(table_name)
+ end
- it 'moves the trigger on the parent table', :aggregate_failures do
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ it 'moves the trigger on the parent table', :aggregate_failures do
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect { partition }.not_to raise_error
+ expect { partition }.not_to raise_error
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs partition method'
+ end
+
+ context 'when multiple partitioning values are given' do
+ # Because of the common spec on line 220
+ let(:zero_partition_value) { [1, 3, 4] }
+
+ include_examples 'runs partition method'
+ end
end
describe '#revert_partitioning' do
@@ -285,49 +331,67 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ
subject(:revert_conversion) { converter.revert_partitioning }
- it 'detaches the partition' do
- expect { revert_conversion }.to change {
- Gitlab::Database::PostgresPartition
- .for_parent_table(parent_table_name).count
- }.from(1).to(0)
- end
-
- it 'does not drop the child partition' do
- expect { revert_conversion }.not_to change { table_oid(table_name) }
- end
+ shared_examples 'runs #revert_partitioning' do
+ it 'detaches the partition' do
+ expect { revert_conversion }.to change {
+ Gitlab::Database::PostgresPartition
+ .for_parent_table(parent_table_name).count
+ }.from(1).to(0)
+ end
- it 'removes the parent table' do
- expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false)
- end
+ it 'does not drop the child partition' do
+ expect { revert_conversion }.not_to change { table_oid(table_name) }
+ end
- it 're-adds the check constraint' do
- expect { revert_conversion }.to change {
- Gitlab::Database::PostgresConstraint
- .check_constraints
- .by_table_identifier(table_identifier)
- .count
- }.by(1)
- end
+ it 'removes the parent table' do
+ expect { revert_conversion }.to change { table_oid(parent_table_name).present? }.from(true).to(false)
+ end
- it 'moves sequences back to the original table' do
- expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }.from(0)
- .and change { converter.send(:sequences_owned_by, parent_table_name).count }.to(0)
- end
+ it 're-adds the check constraint' do
+ expect { revert_conversion }.to change {
+ Gitlab::Database::PostgresConstraint
+ .check_constraints
+ .by_table_identifier(table_identifier)
+ .count
+ }.by(1)
+ end
- context 'when table has LFK triggers' do
- before do
- migration_context.track_record_deletions(parent_table_name)
- migration_context.track_record_deletions(table_name)
+ it 'moves sequences back to the original table' do
+ expect { revert_conversion }.to change { converter.send(:sequences_owned_by, table_name).count }
+ .from(0)
+ .and change {
+ converter.send(
+ :sequences_owned_by, parent_table_name).count
+ }.to(0)
end
- it 'restores the trigger on the partition', :aggregate_failures do
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
- expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
+ context 'when table has LFK triggers' do
+ before do
+ migration_context.track_record_deletions(parent_table_name)
+ migration_context.track_record_deletions(table_name)
+ end
+
+ it 'restores the trigger on the partition', :aggregate_failures do
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy
- expect { revert_conversion }.not_to raise_error
+ expect { revert_conversion }.not_to raise_error
- expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ expect(migration_context.has_loose_foreign_key?(table_name)).to be_truthy
+ end
end
end
+
+ context 'when a single partitioning value is given' do
+ let(:zero_partition_value) { single_partitioning_value }
+
+ include_examples 'runs #revert_partitioning'
+ end
+
+ context 'when multiple partitioning values are given' do
+ let(:zero_partition_value) { multiple_partitioning_values }
+
+ include_examples 'runs #revert_partitioning'
+ end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 9ca0a1b6e57..f4a4b0f002c 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy, feature_cate
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
+ after do
+ model.reset_column_information
+ end
+
subject(:strategy) do
described_class.new(
model,
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb
new file mode 100644
index 00000000000..230847f6902
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/uniqueness_helpers_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::UniquenessHelpers, feature_category: :database do
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:table_not_partitioned) { '_test_not_partitioned_table' }
+ let(:table_partitioned) { '_test_partitioned_table' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE _test_partitioned_table
+ (
+ id serial NOT NULL,
+ PARTITION bigint NULL DEFAULT 1,
+ PRIMARY KEY (id, partition)
+ ) PARTITION BY list(partition);
+
+ CREATE TABLE _test_partitioned_table_1
+ PARTITION OF _test_partitioned_table FOR VALUES IN (1);
+ SQL
+ end
+
+ describe '#ensure_unique_id' do
+ subject(:ensure_unique_id) { migration.ensure_unique_id(table_name) }
+
+ context 'when table is partitioned' do
+ let(:table_name) { table_partitioned }
+ let(:trigger_name) { "assign_#{table_name}_id_trigger" }
+ let(:function_name) { "assign_#{table_name}_id_value" }
+
+ context 'when trigger already exists' do
+ before do
+ allow(migration).to receive(:trigger_exists?)
+ .with(table_name, trigger_name)
+ .and_return(true)
+ end
+
+ it 'does not modify existing trigger' do
+ expect(migration).not_to receive(:change_column_default)
+ expect(migration).not_to receive(:create_trigger_function)
+ expect(migration).not_to receive(:create_trigger)
+
+ expect do
+ ensure_unique_id
+ end.not_to raise_error
+ end
+ end
+
+ context 'when trigger is not defined' do
+ it 'creates trigger', :aggregate_failures do
+ expect(migration).to receive(:change_column_default).with(table_name, :id, nil).and_call_original
+ expect(migration).to receive(:create_trigger_function).with(function_name).and_call_original
+ expect(migration).to receive(:create_trigger)
+ .with(table_name, trigger_name, function_name, fires: 'BEFORE INSERT')
+ .and_call_original
+
+ expect do
+ ensure_unique_id
+ end.not_to raise_error
+
+ expect(migration.trigger_exists?(table_name, trigger_name)).to eq(true)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index 3ccdb907cba..d5584342dd5 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -239,7 +239,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
end
- context 'when uniquiness validation is tested', type: :model do
+ context 'when uniqueness validation is tested', type: :model do
subject { build(:ci_variable) }
it 'does not raise exceptions' do
diff --git a/spec/lib/gitlab/database/sharding_key_spec.rb b/spec/lib/gitlab/database/sharding_key_spec.rb
index b47f5ea5df0..67c1422af3c 100644
--- a/spec/lib/gitlab/database/sharding_key_spec.rb
+++ b/spec/lib/gitlab/database/sharding_key_spec.rb
@@ -21,6 +21,17 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
]
end
+ # Some reasons to exempt a table:
+ # 1. It has no foreign key for performance reasons
+ # 2. It does not yet have a foreign key as the index is still being backfilled
+ let(:allowed_to_be_missing_foreign_key) do
+ [
+ 'p_catalog_resource_sync_events.project_id',
+ 'zoekt_indices.namespace_id',
+ 'namespace_descendants.namespace_id'
+ ]
+ end
+
let(:starting_from_milestone) { 16.6 }
let(:allowed_sharding_key_referenced_tables) { %w[projects namespaces organizations] }
@@ -38,6 +49,19 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
expect(column_exists?(table_name, column_name)).to eq(true),
"Could not find sharding key column #{table_name}.#{column_name}"
expect(referenced_table_name).to be_in(allowed_sharding_key_referenced_tables)
+
+ if allowed_to_be_missing_foreign_key.include?("#{table_name}.#{column_name}")
+ expect(has_foreign_key?(table_name, column_name)).to eq(false),
+ "The column `#{table_name}.#{column_name}` has a foreign key so cannot be " \
+ "allowed_to_be_missing_foreign_key. " \
+ "If this is a foreign key referencing the specified table #{referenced_table_name} " \
+ "then you must remove it from allowed_to_be_missing_foreign_key"
+ else
+ expect(has_foreign_key?(table_name, column_name, to_table_name: referenced_table_name)).to eq(true),
+ "Missing a foreign key constraint for `#{table_name}.#{column_name}` " \
+ "referencing #{referenced_table_name}. " \
+ "All sharding keys must have a foreign key constraint"
+ end
end
end
end
@@ -137,6 +161,25 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
result.count > 0
end
+ def has_foreign_key?(from_table_name, column_name, to_table_name: nil)
+ where_clause = {
+ constrained_table_name: from_table_name,
+ constrained_columns: [column_name]
+ }
+
+ where_clause[:referenced_table_name] = to_table_name if to_table_name
+
+ fk = ::Gitlab::Database::PostgresForeignKey.where(where_clause).first
+
+ lfk = ::Gitlab::Database::LooseForeignKeys.definitions.find do |d|
+ d.from_table == from_table_name &&
+ (to_table_name.nil? || d.to_table == to_table_name) &&
+ d.options[:column] == column_name
+ end
+
+ fk.present? || lfk.present?
+ end
+
def column_exists?(table_name, column_name)
sql = <<~SQL
SELECT 1
diff --git a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
index 127f437dd54..e3cddceb7a9 100644
--- a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
@@ -100,5 +100,21 @@ RSpec.describe Gitlab::DependencyLinker::PackageJsonLinker do
it 'does not link scripts with the same key as a package' do
expect(subject).not_to include(link('karma start config/karma.config.js --single-run', 'https://github.com/karma start config/karma.config.js --single-run'))
end
+
+ context 'when dependency is not a string' do
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ {
+ "dependencies": {
+ "wrong": {}
+ }
+ }
+ CONTENT
+ end
+
+ it 'does not link it' do
+ expect(subject).not_to include(%(<a href))
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb
index 8feab0f8017..4da0b9d8c0d 100644
--- a/spec/lib/gitlab/dependency_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::DependencyLinker do
describe '.link' do
@@ -107,5 +107,16 @@ RSpec.describe Gitlab::DependencyLinker do
described_class.link(blob_name, nil, nil)
end
+
+ it 'increments usage counter based on specified used_on', :prometheus do
+ allow(described_class::GemfileLinker).to receive(:link)
+
+ described_class.link('Gemfile', nil, nil, used_on: :diff)
+
+ dependency_linker_usage_counter = Gitlab::Metrics.registry.get(:dependency_linker_usage)
+
+ expect(dependency_linker_usage_counter.get(used_on: :diff)).to eq(1)
+ expect(dependency_linker_usage_counter.get(used_on: :blob)).to eq(0)
+ end
end
end
diff --git a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
index 891336658ce..6c9a5341695 100644
--- a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
@@ -9,9 +9,10 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
let(:diffable) { merge_request.merge_request_diff }
let(:diff_files_relation) { diffable.merge_request_diff_files }
let(:diff_files) { subject.diff_files }
+ let(:diff_options) { {} }
subject do
- described_class.new(diffable, page, per_page)
+ described_class.new(diffable, page, per_page, diff_options)
end
describe '#diff_files' do
@@ -77,18 +78,32 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
context 'when last page' do
it 'returns correct diff files' do
last_page = diff_files_relation.count - per_page
- collection = described_class.new(diffable, last_page, per_page)
+ collection = described_class.new(diffable, last_page, per_page, diff_options)
expected_batch_files = diff_files_relation.page(last_page).per(per_page).map(&:new_path)
expect(collection.diff_files.map(&:new_path)).to eq(expected_batch_files)
end
end
+
+ context 'when collapse_generated is given' do
+ let(:diff_options) { { collapse_generated: true } }
+
+ it 'returns generated value' do
+ expect(diff_files.first.generated?).not_to be_nil
+ end
+ end
+
+ context 'when collapse_generated is not given' do
+ it 'returns nil' do
+ expect(diff_files.first.generated?).to be_nil
+ end
+ end
end
it_behaves_like 'unfoldable diff' do
subject do
- described_class.new(merge_request.merge_request_diff, page, per_page)
+ described_class.new(merge_request.merge_request_diff, page, per_page, diff_options)
end
end
@@ -100,7 +115,7 @@ RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_
let(:diffable) { merge_request.merge_request_diff }
subject do
- described_class.new(merge_request.merge_request_diff, page, per_page)
+ described_class.new(merge_request.merge_request_diff, page, per_page, diff_options)
end
end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 2cd27472440..9e5d3ab0a20 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -820,7 +820,7 @@ RSpec.describe Gitlab::Diff::File do
end
context 'when neither blob exists' do
- let(:blank_diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: Gitlab::Git::BLANK_SHA, head_sha: Gitlab::Git::BLANK_SHA) }
+ let(:blank_diff_refs) { Gitlab::Diff::DiffRefs.new(base_sha: Gitlab::Git::SHA1_BLANK_SHA, head_sha: Gitlab::Git::SHA1_BLANK_SHA) }
let(:diff_file) { described_class.new(diff, diff_refs: blank_diff_refs, repository: project.repository) }
describe '#blob' do
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index e65f5a618a5..e9e65f64887 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -25,119 +25,131 @@ RSpec.describe Gitlab::Diff::Highlight, feature_category: :source_code_managemen
end
describe '#highlight' do
- context "with a diff file" do
- let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+ shared_examples_for 'diff highlighter' do
+ context "with a diff file" do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
- it 'returns Gitlab::Diff::Line elements' do
- expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
- end
+ it 'returns Gitlab::Diff::Line elements' do
+ expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
+ end
- it 'does not modify "match" lines' do
- expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
- expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
- end
+ it 'does not modify "match" lines' do
+ expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
+ expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
+ end
- it 'highlights and marks unchanged lines' do
- code = %{ <span id="LC7" class="line" lang="ruby"> <span class="k">def</span> <span class="nf">popen</span><span class="p">(</span><span class="n">cmd</span><span class="p">,</span> <span class="n">path</span><span class="o">=</span><span class="kp">nil</span><span class="p">)</span></span>\n}
+ it 'highlights and marks unchanged lines' do
+ code = %{ <span id="LC7" class="line" lang="ruby"> <span class="k">def</span> <span class="nf">popen</span><span class="p">(</span><span class="n">cmd</span><span class="p">,</span> <span class="n">path</span><span class="o">=</span><span class="kp">nil</span><span class="p">)</span></span>\n}
- expect(subject[2].rich_text).to eq(code)
- end
+ expect(subject[2].rich_text).to eq(code)
+ end
- it 'highlights and marks removed lines' do
- code = %(-<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+ it 'highlights and marks removed lines' do
+ code = %(-<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="s2">"System commands must be given as an array of strings"</span></span>\n)
- expect(subject[4].rich_text).to eq(code)
- end
+ expect(subject[4].rich_text).to eq(code)
+ end
- it 'highlights and marks added lines' do
- code = %(+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left addition">RuntimeError</span></span><span class="p"><span class="idiff addition">,</span></span><span class="idiff right addition"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+ it 'highlights and marks added lines' do
+ code = %(+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left addition">RuntimeError</span></span><span class="p"><span class="idiff addition">,</span></span><span class="idiff right addition"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n)
+
+ expect(subject[5].rich_text).to eq(code)
+ end
- expect(subject[5].rich_text).to eq(code)
+ context 'when no diff_refs' do
+ before do
+ allow(diff_file).to receive(:diff_refs).and_return(nil)
+ end
+
+ context 'when no inline diffs' do
+ it_behaves_like 'without inline diffs'
+ end
+ end
end
- context 'when no diff_refs' do
- before do
- allow(diff_file).to receive(:diff_refs).and_return(nil)
+ context "with diff lines" do
+ let(:subject) { described_class.new(diff_file.diff_lines, repository: project.repository).highlight }
+
+ it 'returns Gitlab::Diff::Line elements' do
+ expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
end
- context 'when no inline diffs' do
- it_behaves_like 'without inline diffs'
+ it 'does not modify "match" lines' do
+ expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
+ expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
end
- end
- end
- context "with diff lines" do
- let(:subject) { described_class.new(diff_file.diff_lines, repository: project.repository).highlight }
+ it 'marks unchanged lines' do
+ code = %q{ def popen(cmd, path=nil)}
- it 'returns Gitlab::Diff::Line elements' do
- expect(subject.first).to be_an_instance_of(Gitlab::Diff::Line)
- end
+ expect(subject[2].text).to eq(code)
+ expect(subject[2].text).not_to be_html_safe
+ end
- it 'does not modify "match" lines' do
- expect(subject[0].text).to eq('@@ -6,12 +6,18 @@ module Popen')
- expect(subject[22].text).to eq('@@ -19,6 +25,7 @@ module Popen')
- end
+ it 'marks removed lines' do
+ code = %q(- raise "System commands must be given as an array of strings")
- it 'marks unchanged lines' do
- code = %q{ def popen(cmd, path=nil)}
+ expect(subject[4].text).to eq(code)
+ expect(subject[4].text).not_to be_html_safe
+ end
- expect(subject[2].text).to eq(code)
- expect(subject[2].text).not_to be_html_safe
- end
+ it 'marks added lines' do
+ code = %q(+ raise <span class="idiff left right addition">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;)
- it 'marks removed lines' do
- code = %q(- raise "System commands must be given as an array of strings")
+ expect(subject[5].rich_text).to eq(code)
+ expect(subject[5].rich_text).to be_html_safe
+ end
- expect(subject[4].text).to eq(code)
- expect(subject[4].text).not_to be_html_safe
- end
+ context 'when the inline diff marker has an invalid range' do
+ before do
+ allow_any_instance_of(Gitlab::Diff::InlineDiffMarker).to receive(:mark).and_raise(RangeError)
+ end
- it 'marks added lines' do
- code = %q(+ raise <span class="idiff left right addition">RuntimeError, </span>&quot;System commands must be given as an array of strings&quot;)
+ it 'keeps the original rich line' do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(subject[5].rich_text).to eq(code)
- expect(subject[5].rich_text).to be_html_safe
- end
+ code = %q(+ raise RuntimeError, "System commands must be given as an array of strings")
- context 'when the inline diff marker has an invalid range' do
- before do
- allow_any_instance_of(Gitlab::Diff::InlineDiffMarker).to receive(:mark).and_raise(RangeError)
- end
+ expect(subject[5].text).to eq(code)
+ expect(subject[5].text).not_to be_html_safe
+ end
- it 'keeps the original rich line' do
- allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ it 'reports to Sentry if configured' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
- code = %q(+ raise RuntimeError, "System commands must be given as an array of strings")
+ expect { subject }.to raise_exception(RangeError)
+ end
+ end
- expect(subject[5].text).to eq(code)
- expect(subject[5].text).not_to be_html_safe
+ context 'when no inline diffs' do
+ it_behaves_like 'without inline diffs'
end
+ end
- it 'reports to Sentry if configured' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
+ context 'when blob is too large' do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
- expect { subject }.to raise_exception(RangeError)
+ before do
+ allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
end
- end
- context 'when no inline diffs' do
- it_behaves_like 'without inline diffs'
+ it 'blobs are highlighted as plain text without loading all data' do
+ expect(diff_file.blob).not_to receive(:load_all_data!)
+
+ expect(subject[2].rich_text).to eq(%{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
+ expect(subject[2].rich_text).to be_html_safe
+ end
end
end
- context 'when blob is too large' do
- let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+ it_behaves_like 'diff highlighter'
+ context 'when diff_line_syntax_highlighting feature flag is disabled' do
before do
- allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
+ stub_feature_flags(diff_line_syntax_highlighting: false)
end
- it 'blobs are highlighted as plain text without loading all data' do
- expect(diff_file.blob).not_to receive(:load_all_data!)
-
- expect(subject[2].rich_text).to eq(%{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
- expect(subject[2].rich_text).to be_html_safe
- end
+ it_behaves_like 'diff highlighter'
end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
index ad92d90e253..4dd29e1fb15 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'set'
+require 'set' # rubocop:disable Lint/RedundantRequireStatement -- Ruby 3.1 and earlier needs this. Drop this line after Ruby 3.2+ is only supported.
MOCK_LINE = Struct.new(:text, :type, :index, :old_pos, :new_pos)
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index c9b2e21d934..7b2c5ca27cb 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -173,6 +173,20 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
)
end.to raise_error(RuntimeError)
end
+
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
+
+ expect do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ described_class.track_and_raise_exception(
+ exception,
+ issue_url: issue_url,
+ some_other_info: 'info'
+ )
+ end
+ end.to raise_error(RuntimeError, /boom/)
+ end
end
describe '.log_and_raise_exception' do
@@ -188,6 +202,16 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect { log_and_raise_exception }.to raise_error(RuntimeError)
end
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
+
+ expect do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ log_and_raise_exception
+ end
+ end.to raise_error(RuntimeError)
+ end
+
context 'when extra details are provided' do
let(:extra) { { test: 1, my_token: 'test' } }
@@ -230,6 +254,14 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
end
+ it 'processes the exception even it is called within a `restrict_within_concurrent_ruby` block' do
+ Gitlab::Utils.restrict_within_concurrent_ruby do
+ track_exception
+ end
+
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(logger_payload)
+ end
+
context 'with tags' do
let(:tags) { { 'mytag' => 2 } }
diff --git a/spec/lib/gitlab/event_store/event_spec.rb b/spec/lib/gitlab/event_store/event_spec.rb
index edcb0e5dd1a..367b3334020 100644
--- a/spec/lib/gitlab/event_store/event_spec.rb
+++ b/spec/lib/gitlab/event_store/event_spec.rb
@@ -93,6 +93,13 @@ RSpec.describe Gitlab::EventStore::Event, feature_category: :shared do
expect(event_class.json_schema_valid).to eq(false)
end
+
+ it 'does not store JSON schema on subclass' do
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent)
+
+ expect(event_class.instance_variables).not_to include(:@json_schema)
+ expect(described_class.instance_variables).to include(:@json_schema)
+ end
end
end
end
diff --git a/spec/lib/gitlab/git/changed_path_spec.rb b/spec/lib/gitlab/git/changed_path_spec.rb
index ef51021ba4c..50cdddc9e8a 100644
--- a/spec/lib/gitlab/git/changed_path_spec.rb
+++ b/spec/lib/gitlab/git/changed_path_spec.rb
@@ -3,9 +3,12 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Git::ChangedPath do
- subject(:changed_path) { described_class.new(path: path, status: status) }
+ subject(:changed_path) { described_class.new(path: path, status: status, old_mode: old_mode, new_mode: new_mode) }
let(:path) { 'test_path' }
+ let(:status) { :MODIFIED }
+ let(:old_mode) { '100644' }
+ let(:new_mode) { '100644' }
describe '#new_file?' do
subject(:new_file?) { changed_path.new_file? }
@@ -19,11 +22,33 @@ RSpec.describe Gitlab::Git::ChangedPath do
end
context 'when it is not a new file' do
- let(:status) { :MODIFIED }
-
it 'returns false' do
expect(new_file?).to eq(false)
end
end
end
+
+ describe '#submodule_change?' do
+ subject(:submodule_change?) { changed_path.submodule_change? }
+
+ context 'with a regular file change' do
+ it { is_expected.to eq false }
+ end
+
+ context 'with a submodule addition' do
+ let(:status) { :ADDED }
+ let(:old_mode) { '0' }
+ let(:new_mode) { '160000' }
+
+ it { is_expected.to eq true }
+ end
+
+ context 'with a submodule deletion' do
+ let(:status) { :MODIFIED }
+ let(:old_mode) { '160000' }
+ let(:new_mode) { '0' }
+
+ it { is_expected.to eq true }
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 6c8634281ae..a924137b8ec 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -522,7 +522,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
end
context 'when the commit cannot be found' do
- let(:commit_id) { Gitlab::Git::BLANK_SHA }
+ let(:commit_id) { Gitlab::Git::SHA1_BLANK_SHA }
it 'returns nil' do
expect(subject).to be_nil
diff --git a/spec/lib/gitlab/git/compare_spec.rb b/spec/lib/gitlab/git/compare_spec.rb
index 5ee5e18d5af..521d4588d84 100644
--- a/spec/lib/gitlab/git/compare_spec.rb
+++ b/spec/lib/gitlab/git/compare_spec.rb
@@ -116,22 +116,20 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
describe '#generated_files' do
subject(:generated_files) { compare.generated_files }
- context 'with a detected generated file' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:repository) { project.repository.raw }
- let_it_be(:branch) { 'generated-file-test' }
- let_it_be(:base) do
- project
- .repository
- .create_file(
- project.creator,
- '.gitattributes',
- "*.txt gitlab-generated\n",
- branch_name: branch,
- message: 'Add .gitattributes file')
- end
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => '*.txt gitlab-generated'
+ })
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:branch) { 'generated-file-test' }
+ let(:base) { project.default_branch }
+ let(:head) { branch }
- let_it_be(:head) do
+ context 'with a detected generated file' do
+ before do
+ project.repository.create_branch(branch, project.default_branch)
project
.repository
.create_file(
@@ -150,7 +148,7 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
message: 'Add file2')
end
- it 'sets the diff as generated' do
+ it 'returns a set that incldues the generated file' do
expect(generated_files).to eq Set.new(['file1.txt'])
end
@@ -175,19 +173,16 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
end
end
- context 'with updated .gitattributes in the HEAD' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:repository) { project.repository.raw }
- let_it_be(:branch) { 'generated-file-test' }
- let_it_be(:head) do
+ context 'with deleted .gitattributes in the HEAD' do
+ before do
+ project.repository.create_branch(branch, project.default_branch)
project
.repository
- .create_file(
+ .delete_file(
project.creator,
'.gitattributes',
- "*.txt gitlab-generated\n",
branch_name: branch,
- message: 'Add .gitattributes file')
+ message: 'Delete .gitattributes file')
project
.repository
.create_file(
@@ -206,8 +201,8 @@ RSpec.describe Gitlab::Git::Compare, feature_category: :source_code_management d
message: 'Add file2')
end
- it 'does not set any files as generated' do
- expect(generated_files).to eq Set.new
+ it 'ignores the .gitattributes changes in the HEAD' do
+ expect(generated_files).to eq Set.new(['file1.txt'])
end
end
end
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index a0a4a22699b..f249e8316da 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -55,13 +55,13 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when old revision is blank' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.not_to be_branch_updated }
end
context 'when it is not a branch push' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.not_to be_branch_updated }
end
@@ -105,7 +105,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when old revision is not defined' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.to be_branch_added }
end
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when new revision is not defined' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it { is_expected.to be_branch_removed }
end
@@ -136,7 +136,7 @@ RSpec.describe Gitlab::Git::Push do
end
context 'when a push is not a branch update' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'raises an error' do
expect { subject.modified_paths }.to raise_error(ArgumentError)
@@ -155,7 +155,7 @@ RSpec.describe Gitlab::Git::Push do
let(:oldrev) { nil }
it 'returns blank SHA' do
- expect(subject.oldrev).to eq Gitlab::Git::BLANK_SHA
+ expect(subject.oldrev).to eq Gitlab::Git::SHA1_BLANK_SHA
end
end
end
@@ -171,7 +171,7 @@ RSpec.describe Gitlab::Git::Push do
let(:newrev) { nil }
it 'returns blank SHA' do
- expect(subject.newrev).to eq Gitlab::Git::BLANK_SHA
+ expect(subject.newrev).to eq Gitlab::Git::SHA1_BLANK_SHA
end
end
end
diff --git a/spec/lib/gitlab/git/repository_cleaner_spec.rb b/spec/lib/gitlab/git/repository_cleaner_spec.rb
index 9f1bf9e48ee..a9ba87ad5b5 100644
--- a/spec/lib/gitlab/git/repository_cleaner_spec.rb
+++ b/spec/lib/gitlab/git/repository_cleaner_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Git::RepositoryCleaner do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:head_sha) { repository.head_commit.id }
- let(:object_map_data) { "#{head_sha} #{Gitlab::Git::BLANK_SHA}" }
+ let(:object_map_data) { "#{head_sha} #{Gitlab::Git::SHA1_BLANK_SHA}" }
let(:clean_refs) { %W[refs/environments/1 refs/merge-requests/1 refs/keep-around/#{head_sha}] }
let(:keep_refs) { %w[refs/heads/_keep refs/tags/_keep] }
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Git::RepositoryCleaner do
Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
type: :COMMIT,
old_oid: head_sha,
- new_oid: Gitlab::Git::BLANK_SHA
+ new_oid: Gitlab::Git::SHA1_BLANK_SHA
)
)
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index cc07a16d362..7c6a54161ae 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -566,7 +566,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
[
{
old_sha: sha,
- new_sha: Gitlab::Git::BLANK_SHA,
+ new_sha: Gitlab::Git::SHA1_BLANK_SHA,
reference: tmp_ref
}
]
@@ -1155,7 +1155,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'blank revisions' do
- let(:revisions) { [::Gitlab::Git::BLANK_SHA] }
+ let(:revisions) { [::Gitlab::Git::SHA1_BLANK_SHA] }
let(:expected_blobs) { 0 }
before do
@@ -1278,7 +1278,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with partially blank revisions' do
- let(:newrevs) { [nil, commit, Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [nil, commit, Gitlab::Git::SHA1_BLANK_SHA] }
let(:expected_newrevs) { ['--not', '--all', '--not', commit] }
let(:expected_blobs) do
[
@@ -1326,7 +1326,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with a single zero newrev' do
- let(:newrevs) { Gitlab::Git::BLANK_SHA }
+ let(:newrevs) { Gitlab::Git::SHA1_BLANK_SHA }
it_behaves_like '#new_blobs without revisions'
end
@@ -1338,7 +1338,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
context 'with array containing only empty refs' do
- let(:newrevs) { [nil, Gitlab::Git::BLANK_SHA] }
+ let(:newrevs) { [nil, Gitlab::Git::SHA1_BLANK_SHA] }
it_behaves_like '#new_blobs without revisions'
end
@@ -1400,7 +1400,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
let(:changes) { repository.raw_changes_between(old_rev, new_rev) }
context 'initial commit' do
- let(:old_rev) { Gitlab::Git::BLANK_SHA }
+ let(:old_rev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:new_rev) { '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863' }
it 'returns the changes' do
@@ -1681,11 +1681,11 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(collection.to_a).to be_empty
end
- it 'returns no Gitaly::DiffStats when there is a BLANK_SHA' do
+ it 'returns no Gitaly::DiffStats when there is a SHA1_BLANK_SHA' do
expect_any_instance_of(Gitlab::GitalyClient::CommitService)
.not_to receive(:diff_stats)
- collection = repository.diff_stats(Gitlab::Git::BLANK_SHA, 'master')
+ collection = repository.diff_stats(Gitlab::Git::SHA1_BLANK_SHA, 'master')
expect(collection).to be_a(Gitlab::Git::DiffStatsCollection)
expect(collection).to be_a(Enumerable)
@@ -1702,25 +1702,25 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
let_it_be(:diff_tree) { Gitlab::Git::DiffTree.from_commit(initial_commit) }
let(:commit_1_files) do
- [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls")]
+ [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "files/executables/ls", old_mode: "0", new_mode: "100755")]
end
let(:commit_2_files) do
- [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "bar/branch-test.txt")]
+ [Gitlab::Git::ChangedPath.new(status: :ADDED, path: "bar/branch-test.txt", old_mode: "0", new_mode: "100644")]
end
let(:commit_3_files) do
[
- Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "gitlab-shell")
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: ".gitmodules", old_mode: "100644", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "gitlab-shell", old_mode: "0", new_mode: "160000")
]
end
let(:diff_tree_files) do
[
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: ".gitignore"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "LICENSE"),
- Gitlab::Git::ChangedPath.new(status: :ADDED, path: "README.md")
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: ".gitignore", old_mode: "0", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "LICENSE", old_mode: "0", new_mode: "100644"),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: "README.md", old_mode: "0", new_mode: "100644")
]
end
@@ -2222,7 +2222,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
it 'returns empty for unknown ID' do
- expect(repository.refs_by_oid(oid: Gitlab::Git::BLANK_SHA, limit: 0)).to eq([])
+ expect(repository.refs_by_oid(oid: Gitlab::Git::SHA1_BLANK_SHA, limit: 0)).to eq([])
end
it 'returns empty for an empty repo' do
@@ -2822,11 +2822,37 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
})
end
+ let(:gitattr_content) { "" }
+
let(:repository) { project.repository.raw }
- let(:rev) { 'master' }
- let(:paths) { ['file1.txt', 'file2.txt'] }
+ let(:base) { project.default_branch }
+ let(:branch) { 'detect-generated-files-test' }
+ let(:head) { branch }
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'file2.txt', old_mode: '100644', new_mode: '8')
+ ]
+ end
+
+ before do
+ project.repository.create_branch(branch, project.default_branch)
- subject(:generated_files) { repository.detect_generated_files(rev, paths) }
+ project.repository.update_file(
+ project.creator,
+ 'file1.txt',
+ 'updated first file',
+ message: 'Update file',
+ branch_name: branch)
+
+ project.repository.delete_file(
+ project.creator,
+ 'file2.txt',
+ message: 'Delete file',
+ branch_name: branch)
+ end
+
+ subject(:generated_files) { repository.detect_generated_files(base, head, paths) }
context 'when the linguist-generated attribute is used' do
let(:gitattr_content) { "*.txt text\nfile1.txt linguist-generated\n" }
@@ -2852,11 +2878,99 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
- context 'when the all files are generated' do
+ context 'when the gitlab-generated attribute is used to unset' do
+ let(:gitattr_content) { "file1.txt -gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ context 'with an automatically detected file' do
+ before do
+ project.repository.create_file(
+ project.creator,
+ 'package-lock.json',
+ 'generated file content',
+ message: 'Add generated file',
+ branch_name: branch)
+ end
+
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'file2.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ context 'when the manual override is used on non-detectable file' do
+ let(:gitattr_content) { "file1.txt gitlab-generated\n" }
+
+ it 'returns both manually overriden file and the detected file' do
+ expect(generated_files).to contain_exactly('file1.txt', 'package-lock.json')
+ end
+ end
+
+ context 'when the manual override is used on the detectable file' do
+ let(:gitattr_content) { "package-lock.json gitlab-generated\n" }
+
+ it 'returns the overriden file' do
+ expect(generated_files).to contain_exactly('package-lock.json')
+ end
+ end
+
+ context 'when the manual override is used on the detectable file to unset' do
+ let(:gitattr_content) { "package-lock.json -gitlab-generated\n" }
+
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+ end
+
+ shared_examples 'an invalid request' do
+ it 'returns an empty set' do
+ expect(generated_files).to eq Set.new
+ end
+
+ it 'reports the exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(Gitlab::Git::CommandError),
+ gl_project_path: repository.gl_project_path,
+ base: base,
+ head: head,
+ paths: paths.map(&:path)
+ )
+
+ generated_files
+ end
+ end
+
+ context 'when an unknown revision is given' do
+ let(:head) { 'unknownrevision' }
+
+ it_behaves_like 'an invalid request'
+ end
+
+ context 'when an unknown path is given' do
+ let(:paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'file1.txt', old_mode: '100644', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :MODIFIED, path: 'unknownpath', old_mode: '100644', new_mode: '100644')
+ ]
+ end
+
+ it_behaves_like 'an invalid request'
+ end
+ end
+
+ context 'when all files are marked as generated' do
let(:gitattr_content) { "*.txt gitlab-generated\n" }
it 'returns all generated files' do
- expect(generated_files).to eq paths.to_set
+ expect(generated_files).to eq paths.map(&:path).to_set
end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index a15c74a058d..7f1887f519b 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Git::Tag, feature_category: :source_code_management do
end
context 'when the tag cannot be found' do
- let(:tag_id) { Gitlab::Git::BLANK_SHA }
+ let(:tag_id) { Gitlab::Git::SHA1_BLANK_SHA }
it 'raises GRPC::Internal' do
expect { subject }.to raise_error(GRPC::Internal)
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 090f9af2620..468df96b356 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -9,7 +9,17 @@ RSpec.describe Gitlab::Git::Tree, feature_category: :source_code_management do
let_it_be(:repository) { project.repository.raw }
shared_examples 'repo' do
- subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, skip_flat_paths, rescue_not_found, pagination_params) }
+ subject(:tree) do
+ Gitlab::Git::Tree.tree_entries(
+ repository: repository,
+ sha: sha,
+ path: path,
+ recursive: recursive,
+ skip_flat_paths: skip_flat_paths,
+ rescue_not_found: rescue_not_found,
+ pagination_params: pagination_params
+ )
+ end
let(:sha) { SeedRepo::Commit::ID }
let(:path) { nil }
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
index ad593cbf005..abcc5c5863e 100644
--- a/spec/lib/gitlab/git_access_project_spec.rb
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -141,7 +141,9 @@ RSpec.describe Gitlab::GitAccessProject do
end
context 'when check contains actual changes' do
- let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
+ let(:changes) do
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch"
+ end
it_behaves_like 'no project is created'
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 975e8bdd3ac..82daaba6448 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -802,13 +802,13 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
let(:changes) do
{ any: Gitlab::GitAccess::ANY,
- push_new_branch: "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow",
+ push_new_branch: "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/heads/wow",
push_master: '6f6d7e7ed 570e7b2ab refs/heads/master',
push_protected_branch: '6f6d7e7ed 570e7b2ab refs/heads/feature',
- push_remove_protected_branch: "570e7b2ab #{Gitlab::Git::BLANK_SHA} "\
+ push_remove_protected_branch: "570e7b2ab #{Gitlab::Git::SHA1_BLANK_SHA} "\
'refs/heads/feature',
push_tag: '6f6d7e7ed 570e7b2ab refs/tags/v1.0.0',
- push_new_tag: "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/tags/v7.8.9",
+ push_new_tag: "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/tags/v7.8.9",
push_all: ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature'],
merge_into_protected_branch: "0b4bc9a #{merge_into_protected_branch} refs/heads/feature" }
end
@@ -1018,7 +1018,7 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
context 'when pushing to a project' do
let(:project) { create(:project, :public, :repository) }
- let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2ab refs/heads/wow" }
+ let(:changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2ab refs/heads/wow" }
before do
project.add_developer(user)
@@ -1062,14 +1062,14 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
# additional queries.
access.check('git-receive-pack', changes)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
access.check('git-receive-pack', changes)
end
changes = ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature']
# There is still an N+1 query with protected branches
- expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control_count).with_threshold(2)
+ expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control).with_threshold(2)
end
it 'raises TimeoutError when #check_access! raises a timeout error' do
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 0f6ef55b4b1..61f66c9cd0c 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Git do
'zzz25dc642cb6eb9a060e54bf8d69288fbee4904' | false
'4b825dc642cb6eb9a060e54bf8d69288fbee4904' | true
- Gitlab::Git::BLANK_SHA | true
+ Gitlab::Git::SHA1_BLANK_SHA | true
end
with_them do
diff --git a/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb b/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb
new file mode 100644
index 00000000000..c57d9b9592c
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/analysis_service_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GitalyClient::AnalysisService, feature_category: :gitaly do
+ let_it_be(:project) do
+ create(:project, :repository)
+ end
+
+ let(:repository) { project.repository.raw }
+ let(:base) { project.default_branch }
+ let(:head) { branch }
+ let(:branch) { 'test-check-blobs-generated' }
+
+ let(:client) { described_class.new(repository) }
+
+ describe '#check_blobs_generated' do
+ subject(:check_blobs_generated) { client.check_blobs_generated(base, head, changed_paths) }
+
+ before do
+ project.repository.create_branch(branch, project.default_branch)
+
+ project.repository.create_file(
+ project.creator,
+ 'file1.txt',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+
+ project.repository.create_file(
+ project.creator,
+ 'package-lock.json',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+
+ project.repository.delete_file(
+ project.creator,
+ 'README',
+ message: 'Delete README',
+ branch_name: branch)
+ end
+
+ context 'when valid changed_paths are given' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'README', old_mode: '100644', new_mode: '0'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'file1.txt', old_mode: '0', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'returns an expected array' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: false, path: 'README' },
+ { generated: false, path: 'file1.txt' },
+ { generated: true, path: 'package-lock.json' }
+ )
+ end
+
+ context 'when changed_paths includes a submodule' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'package-lock.json', old_mode: '0', new_mode: '100644'),
+ Gitlab::Git::ChangedPath.new(status: :DELETED, path: 'gitlab-shell', old_mode: '160000', new_mode: '0')
+ ]
+ end
+
+ it 'returns an array wihout the submodule change' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: true, path: 'package-lock.json' }
+ )
+ end
+ end
+
+ context 'when changed_paths only has a submodule' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'gitlab-shell', old_mode: '0', new_mode: '160000')
+ ]
+ end
+
+ it 'returns an empty array' do
+ expect(check_blobs_generated).to eq([])
+ end
+ end
+ end
+
+ context 'when changed_paths includes a path with :' do
+ before do
+ project.repository.create_file(
+ project.creator,
+ 'abc:def',
+ 'new file content',
+ message: 'Add new file',
+ branch_name: branch)
+ end
+
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'abc:def', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'returns an expected array' do
+ expect(check_blobs_generated).to contain_exactly(
+ { generated: false, path: 'abc:def' }
+ )
+ end
+ end
+
+ context 'when an unknown revision is given' do
+ let(:head) { 'unknownrevision' }
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'file1.txt', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'raises an error' do
+ expect { check_blobs_generated }.to raise_error(GRPC::Internal)
+ end
+ end
+
+ context 'when an unknown path is given' do
+ let(:changed_paths) do
+ [
+ Gitlab::Git::ChangedPath.new(status: :ADDED, path: 'unknownpath', old_mode: '0', new_mode: '100644')
+ ]
+ end
+
+ it 'raises an error' do
+ expect { check_blobs_generated }.to raise_error(GRPC::Internal)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 02c7abadd99..9b924440989 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -203,15 +203,15 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
shared_examples 'includes paths different in any parent' do
let(:changed_paths) do
[
- { path: 'files/locked/foo.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ { path: 'files/locked/foo.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' }
].as_json
end
@@ -223,12 +223,12 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
shared_examples 'includes paths different in all parents' do
let(:changed_paths) do
[
- { path: 'files/locked/foo.lfs', status: 'ADDED' },
- { path: 'files/locked/foo.lfs', status: 'MODIFIED' },
- { path: 'files/locked/bar.lfs', status: 'ADDED' },
- { path: 'files/locked/bar.lfs', status: 'MODIFIED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' },
- { path: 'files/locked/baz.lfs', status: 'ADDED' }
+ { path: 'files/locked/foo.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/foo.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/bar.lfs', status: 'MODIFIED', old_mode: '100644', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' },
+ { path: 'files/locked/baz.lfs', status: 'ADDED', old_mode: '0', new_mode: '100644' }
].as_json
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
index d555a847ea5..b6a57ef8b57 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_categ
let_it_be(:user) { create(:user) }
let(:client) { instance_double('Gitlab::GithubImport::Client', rate_limit_resets_in: 5) }
let(:token) { 'token' }
- let(:page_counter) { instance_double('Gitlab::GithubImport::PageCounter', current: 1, set: true, expire!: true) }
+ let(:page_counter) { instance_double('Gitlab::Import::PageCounter', current: 1, set: true, expire!: true) }
let(:page) { instance_double('Gitlab::GithubImport::Client::Page', objects: [gist], number: 1) }
let(:url) { 'https://gist.github.com/foo/bar.git' }
let(:waiter) { Gitlab::JobWaiter.new(0, 'some-job-key') }
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_categ
.with(token, parallel: true)
.and_return(client)
- allow(Gitlab::GithubImport::PageCounter)
+ allow(Gitlab::Import::PageCounter)
.to receive(:new)
.with(user, :gists, 'github-gists-importer')
.and_return(page_counter)
diff --git a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
index 65c5a7daeb2..c7dd2a9538c 100644
--- a/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
+++ b/spec/lib/gitlab/github_import/attachments_downloader_spec.rb
@@ -8,27 +8,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
let_it_be(:file_url) { 'https://example.com/avatar.png' }
let_it_be(:content_type) { 'application/octet-stream' }
- let(:content_length) { 1000 }
let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 200) }
- let(:headers_double) do
- instance_double(
- HTTParty::Response,
- code: 200,
- success?: true,
- parsed_response: {},
- headers: {
- 'content-length' => content_length,
- 'content-type' => content_type
- }
- )
- end
describe '#perform' do
before do
allow(Gitlab::HTTP).to receive(:perform_request)
.with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
- allow(Gitlab::HTTP).to receive(:perform_request)
- .with(Net::HTTP::Head, file_url, {}).and_return(headers_double)
end
context 'when file valid' do
@@ -71,12 +56,12 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
context 'when file size exceeds limit' do
- let(:content_length) { 26.megabytes }
+ subject(:downloader) { described_class.new(file_url, file_size_limit: 1.byte) }
it 'raises expected exception' do
expect { downloader.perform }.to raise_exception(
Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
- 'File size 26 MiB exceeds limit of 25 MiB'
+ 'File size 57 B exceeds limit of 1 B'
)
end
end
@@ -94,6 +79,33 @@ RSpec.describe Gitlab::GithubImport::AttachmentsDownloader, feature_category: :i
end
end
+ context 'when chunk download returns a redirect' do
+ let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 302, http_response: {}) }
+
+ it 'skips the redirect and continues' do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
+
+ file = downloader.perform
+
+ expect(File.exist?(file.path)).to eq(true)
+ end
+ end
+
+ context 'when chunk download returns an error' do
+ let(:chunk_double) { instance_double(HTTParty::ResponseFragment, code: 500, http_response: {}) }
+
+ it 'raises expected exception' do
+ allow(Gitlab::HTTP).to receive(:perform_request)
+ .with(Net::HTTP::Get, file_url, stream_body: true).and_yield(chunk_double)
+
+ expect { downloader.perform }.to raise_exception(
+ Gitlab::GithubImport::AttachmentsDownloader::DownloadError,
+ "Error downloading file from #{file_url}. Error code: #{chunk_double.code}"
+ )
+ end
+ end
+
context 'when attachment is behind a github asset endpoint' do
let(:file_url) { "https://github.com/test/project/assets/142635249/4b9f9c90-f060-4845-97cf-b24c558bcb11" }
let(:redirect_url) { "https://github-production-user-asset-6210df.s3.amazonaws.com/142635249/740edb05293e.jpg" }
diff --git a/spec/lib/gitlab/github_import/events_cache_spec.rb b/spec/lib/gitlab/github_import/events_cache_spec.rb
new file mode 100644
index 00000000000..8637f236977
--- /dev/null
+++ b/spec/lib/gitlab/github_import/events_cache_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::EventsCache, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project) { build_stubbed(:project, id: 1) }
+ let(:issue) { build_stubbed(:issue, iid: 2) }
+
+ let(:event_cache) { described_class.new(project) }
+
+ def build_event(event)
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(event)
+ end
+
+ describe '#add' do
+ it 'adds event to cache' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:list_add).with(
+ 'github-importer/events/1/Issue/2',
+ an_instance_of(String),
+ limit: described_class::MAX_NUMBER_OF_EVENTS
+ )
+
+ event_cache.add(issue, build_event({ event: 'closed' }))
+ end
+
+ context 'when events is too large to cache' do
+ before do
+ stub_const("#{described_class}::MAX_EVENT_SIZE", 1.byte)
+ end
+
+ it 'does not add event to cache' do
+ expect(Gitlab::Cache::Import::Caching).not_to receive(:list_add)
+ expect(Gitlab::GithubImport::Logger).to receive(:warn).with(
+ message: 'Event too large to cache',
+ project_id: project.id,
+ github_identifiers: {
+ event: 'closed',
+ id: '99',
+ issuable_iid: '2'
+ }
+ )
+
+ event_cache.add(issue, build_event({ event: 'closed', id: '99', issue: { number: '2' } }))
+ end
+ end
+ end
+
+ describe '#events' do
+ it 'retrieves the list of events from the cache in the correct order' do
+ key = 'github-importer/events/1/Issue/2'
+
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'merged', created_at: '2023-01-02T00:00:00Z' }.to_json)
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'closed', created_at: '2023-01-03T00:00:00Z' }.to_json)
+ Gitlab::Cache::Import::Caching.list_add(key, { event: 'commented', created_at: '2023-01-01T00:00:00Z' }.to_json)
+
+ events = event_cache.events(issue).map(&:to_hash)
+
+ expect(events).to match([
+ a_hash_including(event: 'commented', created_at: '2023-01-01 00:00:00 UTC'),
+ a_hash_including(event: 'merged', created_at: '2023-01-02 00:00:00 UTC'),
+ a_hash_including(event: 'closed', created_at: '2023-01-03 00:00:00 UTC')
+ ])
+ end
+
+ context 'when no event was added' do
+ it 'returns an empty array' do
+ expect(event_cache.events(issue)).to eq([])
+ end
+ end
+ end
+
+ describe '#delete' do
+ it 'deletes the list' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:del).with('github-importer/events/1/Issue/2')
+
+ event_cache.delete(issue)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
index 5e60be44621..bc1b32661b8 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::Attachments::BaseImporter do
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::BaseImporter, feature_category: :importers do
subject(:importer) { importer_class.new(project, client) }
let(:project) { instance_double(Project, id: 1) }
diff --git a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
index b44f1ec85f3..20152020897 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
@@ -10,39 +10,68 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter, feat
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:issue_1) { create(:issue, project: project) }
- let_it_be(:issue_2) { create(:issue, project: project) }
+ let_it_be(:issue) { create(:issue, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:issue_with_attachment) do
+ create(:issue,
+ project: project,
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ it 'selects both issues, and selects only properties it needs' do
+ stubbed_collection = class_double(Issue, each_batch: [])
+
+ expect(project.issues).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :iid).and_return(stubbed_collection)
- it 'imports each project issue attachments' do
- expect(project.issues).to receive(:id_not_in).with([]).and_return(project.issues)
- expect(project.issues).to receive(:select).with(:id, :description, :iid).and_call_original
+ importer.sequential_import
+ end
- expect_next_instances_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer only for the issue with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: issue_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
- context 'when issue is already processed' do
- it "doesn't import this issue attachments" do
- importer.mark_as_imported(issue_1)
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
- expect(project.issues).to receive(:id_not_in).with([issue_1.id.to_s]).and_call_original
- expect_next_instance_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer for both issues' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
end
+
+ context 'when issue has already been processed' do
+ before do
+ importer.mark_as_imported(issue_with_attachment)
+ end
+
+ it 'does not select issues that were processed' do
+ expect(project.issues).to receive(:id_not_in).with([issue_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
+
+ it 'does not execute importer for the issue with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
+
+ importer.sequential_import
+ end
+ end
end
describe '#sidekiq_worker_class' do
diff --git a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
index 381cb17bb52..5ed6dce8507 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
@@ -10,39 +10,69 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporte
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:merge_request_1) { create(:merge_request, source_project: project, target_branch: 'feature1') }
- let_it_be(:merge_request_2) { create(:merge_request, source_project: project, target_branch: 'feature2') }
+ let_it_be(:mr) { create(:merge_request, source_project: project, target_branch: 'feature1') }
+
+ let_it_be(:mr_with_attachment) do
+ create(:merge_request,
+ source_project: project,
+ target_branch: 'feature2',
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ it 'selects both merge requests, and selects only properties it needs' do
+ stubbed_collection = class_double(MergeRequest, each_batch: [])
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ expect(project.merge_requests).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :iid).and_return(stubbed_collection)
- it 'imports each project merge request attachments' do
- expect(project.merge_requests).to receive(:id_not_in).with([]).and_return(project.merge_requests)
- expect(project.merge_requests).to receive(:select).with(:id, :description, :iid).and_call_original
+ importer.sequential_import
+ end
- expect_next_instances_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer only for the merge request with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: mr_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
- context 'when merge request is already processed' do
- it "doesn't import this merge request attachments" do
- importer.mark_as_imported(merge_request_1)
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
- expect(project.merge_requests).to receive(:id_not_in).with([merge_request_1.id.to_s]).and_call_original
- expect_next_instance_of(
- Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
- ) do |note_attachments_importer|
- expect(note_attachments_importer).to receive(:execute)
+ it 'executes importer for both merge requests' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
end
importer.sequential_import
end
end
+
+ context 'when merge request has already been processed' do
+ before do
+ importer.mark_as_imported(mr_with_attachment)
+ end
+
+ it 'does not select merge requests that were processed' do
+ expect(project.merge_requests).to receive(:id_not_in).with([mr_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
+
+ it 'does not execute importer for the merge request with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
+
+ importer.sequential_import
+ end
+ end
end
describe '#sidekiq_worker_class' do
diff --git a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
index 5b3ad032702..da0ee1ed0dd 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
@@ -10,30 +10,75 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter, featu
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:note_1) { create(:note, project: project) }
- let_it_be(:note_2) { create(:note, project: project) }
- let_it_be(:system_note) { create(:note, :system, project: project) }
+ let_it_be(:note) { create(:note, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:note_with_attachment) do
+ create(:note,
+ project: project,
+ note: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
+
+ let_it_be(:system_note_with_attachment) do
+ create(:note,
+ :system,
+ project: project,
+ note: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
- it 'imports each project user note' do
- expect(project.notes).to receive(:id_not_in).with([]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).twice.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).twice
+ it 'selects only user notes, and selects only properties it needs' do
+ stubbed_collection = class_double(Note, each_batch: [])
+
+ expect(project.notes).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:user).and_return(stubbed_collection)
+ expect(stubbed_collection)
+ .to receive(:select).with(:id, :note, :system, :noteable_type)
+ .and_return(stubbed_collection)
importer.sequential_import
end
- context 'when note is already processed' do
- it "doesn't import this note" do
- importer.mark_as_imported(note_1)
+ it 'executes importer only for the note with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: note_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
+
+ it 'executes importer for both user notes' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+
+ context 'when note has already been processed' do
+ before do
+ importer.mark_as_imported(note_with_attachment)
+ end
+
+ it 'does not select notes that were processed' do
+ expect(project.notes).to receive(:id_not_in).with([note_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
- expect(project.notes).to receive(:id_not_in).with([note_1.id.to_s]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).once.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).once
+ it 'does not execute importer for the note with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
importer.sequential_import
end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
index c1c19c40afb..cf51760d966 100644
--- a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
@@ -10,31 +10,64 @@ RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter, fe
let(:client) { instance_double(Gitlab::GithubImport::Client) }
describe '#sequential_import', :clean_gitlab_redis_cache do
- let_it_be(:release_1) { create(:release, project: project) }
- let_it_be(:release_2) { create(:release, project: project) }
+ let_it_be(:release) { create(:release, project: project) }
- let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
- let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+ let_it_be(:release_with_attachment) do
+ create(:release,
+ project: project,
+ description: "![image](https://user-images.githubusercontent.com/1/uuid-1.png)"
+ )
+ end
- it 'imports each project release' do
- expect(project.releases).to receive(:id_not_in).with([]).and_return(project.releases)
- expect(project.releases).to receive(:select).with(:id, :description, :tag).and_call_original
+ it 'selects both releases, and selects only properties it needs' do
+ stubbed_collection = class_double(Release, each_batch: [])
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).twice.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).twice
+ expect(project.releases).to receive(:id_not_in).with([]).and_return(stubbed_collection)
+ expect(stubbed_collection).to receive(:select).with(:id, :description, :tag).and_return(stubbed_collection)
importer.sequential_import
end
- context 'when note is already processed' do
- it "doesn't import this release" do
- importer.mark_as_imported(release_1)
+ it 'executes importer only for the release with an attachment' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ have_attributes(record_db_id: release_with_attachment.id),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when flag is disabled' do
+ before do
+ stub_feature_flags(github_importer_attachments: false)
+ end
+
+ it 'executes importer for both releases' do
+ expect_next_instances_of(Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+
+ context 'when release has already been processed' do
+ before do
+ importer.mark_as_imported(release_with_attachment)
+ end
+
+ it 'does not select releases that were processed' do
+ expect(project.releases).to receive(:id_not_in).with([release_with_attachment.id.to_s]).and_call_original
+
+ importer.sequential_import
+ end
- expect(project.releases).to receive(:id_not_in).with([release_1.id.to_s]).and_call_original
- expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
- .with(*importer_attrs).once.and_return(importer_stub)
- expect(importer_stub).to receive(:execute).once
+ it 'does not execute importer for the release with an attachment' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).not_to receive(:new)
importer.sequential_import
end
diff --git a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
index c1e9bed5681..d0d3e6c6da8 100644
--- a/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/collaborators_importer_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::GithubImport::Importer::CollaboratorsImporter, feature_ca
it 'imports each collaborator in parallel' do
expect(Gitlab::GithubImport::ImportCollaboratorWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 1eb146ea958..ed74e978f16 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_catego
.and_yield(github_comment)
expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/events/commented_spec.rb b/spec/lib/gitlab/github_import/importer/events/commented_spec.rb
new file mode 100644
index 00000000000..bd3bea87688
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/commented_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Commented, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:issuable) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.new(
+ id: 1196850910,
+ actor: { id: user.id, login: user.username },
+ event: 'commented',
+ created_at: '2022-07-27T14:41:11Z',
+ updated_at: '2022-07-27T14:41:11Z',
+ body: 'This is my note',
+ issue: { number: issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
+ )
+ end
+
+ let(:extended_events) { true }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issuable.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+ end
+
+ shared_examples 'new note' do
+ it 'creates a note' do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ expect(issuable.notes.last).to have_attributes(
+ note: 'This is my note',
+ author_id: user.id,
+ noteable_type: issuable.class.name.to_s
+ )
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'does not create a note' do
+ expect { importer.execute(issue_event) }.not_to change { Note.count }
+ end
+ end
+ end
+
+ context 'with Issue' do
+ it_behaves_like 'new note'
+ end
+
+ context 'with MergeRequest' do
+ let(:issuable) { create(:merge_request, source_project: project, target_project: project) }
+
+ it_behaves_like 'new note'
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
index 4ea62557dd6..30bc8aabe12 100644
--- a/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/merged_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:commit_id) { nil }
+ let(:extended_events) { false }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
@@ -32,6 +33,9 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
end
it 'creates expected event and state event' do
@@ -71,4 +75,27 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
expect(state_event.source_commit).to eq commit_id[0..40]
end
end
+
+ describe 'extended events' do
+ context 'when using extended events' do
+ let(:extended_events) { true }
+
+ it 'creates a merged by note' do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.created_at).to eq(issue_event.created_at)
+ expect(last_note.author).to eq(project.owner)
+ expect(last_note.note).to eq("*Merged by: #{user.username} at #{issue_event.created_at}*")
+ end
+ end
+
+ context 'when not using extended events' do
+ let(:extended_events) { false }
+
+ it 'does not create a merged by note' do
+ expect { importer.execute(issue_event) }.not_to change { Note.count }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb b/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb
new file mode 100644
index 00000000000..f60a9d65269
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/reviewed_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::Reviewed, feature_category: :importers do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+ let(:extended_events) { true }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.new(
+ id: 1196850910,
+ actor: { id: user.id, login: user.username },
+ event: 'reviewed',
+ submitted_at: '2022-07-27T14:41:11Z',
+ body: 'This is my review',
+ state: state,
+ issue: { number: merge_request.iid, pull_request: true }
+ )
+ end
+
+ let(:state) { 'commented' }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(merge_request.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+ end
+
+ it 'creates a review note', :aggregate_failures do
+ expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to include("This is my review")
+ expect(last_note.author).to eq(user)
+ expect(last_note.created_at).to eq(issue_event.submitted_at)
+ end
+
+ it 'does not create a reviewer for the Merge Request', :aggregate_failures do
+ expect { importer.execute(issue_event) }.not_to change { MergeRequestReviewer.count }
+ end
+
+ context 'when stage is approved' do
+ let(:state) { 'approved' }
+
+ it 'creates an approval for the Merge Request', :aggregate_failures do
+ expect { importer.execute(issue_event) }.to change { Approval.count }.by(1).and change { Note.count }.by(2)
+
+ expect(merge_request.approved_by_users.reload).to include(user)
+ expect(merge_request.approvals.last.created_at).to eq(issue_event.submitted_at)
+
+ note = merge_request.notes.where(system: false).last
+ expect(note.note).to include("This is my review")
+ expect(note.author).to eq(user)
+ expect(note.created_at).to eq(issue_event.submitted_at)
+
+ system_note = merge_request.notes.where(system: true).last
+ expect(system_note.note).to eq('approved this merge request')
+ expect(system_note.author).to eq(user)
+ expect(system_note.created_at).to eq(issue_event.submitted_at)
+ expect(system_note.system_note_metadata.action).to eq('approved')
+ end
+ end
+
+ context 'when extended events is false' do
+ let(:extended_events) { false }
+
+ it 'does nothing' do
+ expect { importer.execute(issue_event) }
+ .to not_change { Note.count }
+ .and not_change { Approval.count }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index 2389489e867..ffe6c237506 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -115,6 +115,18 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Merged
end
+ context "when it's commented issue event" do
+ let(:event_name) { 'commented' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Commented
+ end
+
+ context "when it's reviewed issue event" do
+ let(:event_name) { 'reviewed' }
+
+ it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Reviewed
+ end
+
context "when it's unknown issue event" do
let(:event_name) { 'fake' }
diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
index f7ee6fee6dc..7e926b3af46 100644
--- a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
struct = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :source,
:assignee, :assigner, :review_requester, :requested_reviewer, :issue, :created_at, :performed_via_github_app,
- keyword_init: true
+ :body, :updated_at, :submitted_at, :state, keyword_init: true
)
struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
end
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
expect(Gitlab::GithubImport::ImportIssueEventWorker).to receive(:perform_in).with(
- 1, project.id, an_instance_of(Hash), an_instance_of(String)
+ an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String)
)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 9451d1dfc37..93466497ceb 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category:
expect(Gitlab::GithubImport::ImportIssueWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index a5ec39b4177..eaf4d41df43 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter, feature_categ
end
expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 92d3071c826..722470cbc1d 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category:
.and_yield(github_comment)
expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index b0892767fb3..b2fc1bea39e 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
it 'imports each protected branch in parallel' do
expect(Gitlab::GithubImport::ImportProtectedBranchWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).with(project, :protected_branch, :fetched)
@@ -166,7 +166,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, featur
# when user has no admin rights on repo
let(:unknown_protection_branch) { branch_struct.new(name: 'development', protection: nil) }
- let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+ let(:page_counter) { instance_double(Gitlab::Import::PageCounter) }
before do
allow(client).to receive(:branches).with(project.import_source)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
index 6846c99fb63..1651774b5ce 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_importer_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
expect(merge_request.reviewers).to contain_exactly(author)
end
+ context 'when add_reviewer option is false' do
+ it 'does not change Merge Request reviewers' do
+ expect { subject.execute(add_reviewer: false) }.not_to change { MergeRequestReviewer.count }
+ end
+ end
+
context 'when reviewer already exists' do
before do
create(
@@ -309,6 +315,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
extra.reverse_merge(
author: { id: 999, login: 'author' },
merge_request_id: merge_request.id,
+ merge_request_iid: merge_request.iid,
review_type: type,
note: 'note',
submitted_at: submitted_at.to_s
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
index 1977815e3a0..7ba88b4fa79 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb
@@ -116,10 +116,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
it 'schedule import for each merge request reviewers' do
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.first)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.first)
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.second)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.second)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).twice.with(project, :pull_request_review_request, :fetched)
@@ -137,7 +137,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor
it "doesn't schedule import this merge request reviewers" do
expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker)
- .to receive(:perform_in).with(1, *expected_worker_payload.second)
+ .to receive(:perform_in).with(an_instance_of(Float), *expected_worker_payload.second)
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment).once.with(project, :pull_request_review_request, :fetched)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
index f5779f300b8..94248f60a0b 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests/reviews_importer_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewsImporter, fe
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/pull_request_reviews")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 1a0adbbe3a3..4c6b6a81d35 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_cat
expect(Gitlab::GithubImport::ImportPullRequestWorker)
.to receive(:perform_in)
- .with(1, project.id, an_instance_of(Hash), an_instance_of(String))
+ .with(an_instance_of(Float), project.id, an_instance_of(Hash), an_instance_of(String))
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
new file mode 100644
index 00000000000..2b21232c642
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/replay_events_importer_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::ReplayEventsImporter, feature_category: :importers do
+ let_it_be(:association) { create(:merged_merge_request) }
+ let_it_be(:project) { association.project }
+ let(:user1) { build(:user1) }
+ let(:user2) { build(:user2) }
+ let(:user3) { build(:user3) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: association.class.name.to_s, issuable_iid: association.iid
+ )
+ end
+
+ let(:events) do
+ [
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 1, login: 'user1' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_request_removed'
+ },
+ {
+ requested_reviewer: { id: 2, login: 'user2' },
+ event: 'review_requested'
+ },
+ {
+ requested_reviewer: { id: 3, login: 'user3' },
+ event: 'review_requested'
+ }
+ ]
+ end
+
+ subject(:importer) { described_class.new(representation, project, client) }
+
+ describe '#execute' do
+ before do
+ representations = events.map { |e| Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(e) }
+
+ allow_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ allow(events_cache).to receive(:events).with(association).and_return(representations)
+ end
+ end
+
+ context 'when association is a MergeRequest' do
+ it 'imports reviewers' do
+ representation = instance_double(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests)
+
+ expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
+ merge_request_id: association.id,
+ merge_request_iid: association.iid,
+ users: [
+ { id: 2, login: 'user2' },
+ { id: 3, login: 'user3' }
+ ]
+ ).and_return(representation)
+
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, anything, project, client
+ ) do |review_impoter|
+ expect(review_impoter).to receive(:execute)
+ end
+
+ importer.execute
+ end
+
+ context 'when reviewer is a team' do
+ let(:events) do
+ [
+ {
+ event: 'review_requested',
+ requested_team: { name: 'backend-team' }
+ },
+ {
+ event: 'review_requested',
+ requested_team: { name: 'frontend-team' }
+ },
+ {
+ event: 'review_request_removed',
+ requested_team: { name: 'frontend-team' }
+ }
+ ]
+ end
+
+ it 'ignores the events and do not assign the reviewers' do
+ expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
+ merge_request_id: association.id,
+ merge_request_iid: association.iid,
+ users: []
+ ).and_call_original
+
+ importer.execute
+ end
+ end
+ end
+
+ context 'when association is not found' do
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: association.class.name.to_s, issuable_iid: -1
+ )
+ end
+
+ it 'does not read events' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ importer.execute
+ end
+ end
+
+ context 'when issueable type is not supported' do
+ let(:representation) do
+ Gitlab::GithubImport::Representation::ReplayEvent.new(
+ issuable_type: 'Issue', issuable_iid: association.iid
+ )
+ end
+
+ it 'does not read events' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ importer.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
index 6fe0494d7cd..d2e63eba954 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/pull_request_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index 91f89f0779c..19142b94519 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter, feature_category: :importers do
- let(:client) { double }
+ let(:client) { Gitlab::GithubImport::Client.new('token') }
- let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
+ let_it_be(:project) { create(:project, :import_started, import_source: 'foo/bar') }
let!(:issuable) { create(:issue, project: project) }
@@ -88,23 +88,32 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue_event) do
struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
- struct.new(id: 1, event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
+ struct.new(id: 1, event: event_name, created_at: '2022-04-26 18:30:53 UTC')
end
+ let(:event_name) { 'closed' }
+
+ let(:page_events) { [issue_event] }
+
let(:page) do
instance_double(
Gitlab::GithubImport::Client::Page,
- number: 1, objects: [issue_event]
+ number: 1, objects: page_events
)
end
- let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
+ let(:page_counter) { instance_double(Gitlab::Import::PageCounter) }
+
+ let(:extended_events) { true }
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:each_page).once.with(:issue_timeline,
project.import_source, issuable.iid, { state: 'all', sort: 'created', direction: 'asc', page: 1 }
).and_yield(page)
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
end
context 'with issues' do
@@ -152,7 +161,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
it 'triggers page number increment' do
- expect(Gitlab::GithubImport::PageCounter)
+ expect(Gitlab::Import::PageCounter)
.to receive(:new).with(project, 'issues/1/issue_timeline')
.and_return(page_counter)
expect(page_counter).to receive(:current).and_return(1)
@@ -166,7 +175,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
context 'when page is already processed' do
before do
- page_counter = Gitlab::GithubImport::PageCounter.new(
+ page_counter = Gitlab::Import::PageCounter.new(
project, subject.page_counter_id(issuable)
)
page_counter.set(page.number)
@@ -190,10 +199,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
context 'when event is not supported' do
- let(:issue_event) do
- struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
- struct.new(id: 1, event: 'not_supported_event', created_at: '2022-04-26 18:30:53 UTC')
- end
+ let(:event_name) { 'not_supported_event' }
it "doesn't process this event" do
counter = 0
@@ -201,5 +207,188 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
expect(counter).to eq 0
end
end
+
+ describe 'increment object counter' do
+ it 'increments counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+
+ context 'when event should increment a mapped fetched counter' do
+ before do
+ stub_const('Gitlab::GithubImport::Importer::IssueEventImporter::EVENT_COUNTER_MAP', {
+ 'closed' => 'custom_type'
+ })
+ end
+
+ it 'increments the mapped fetched counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, 'custom_type', :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'increments the issue_event fetched counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :fetched)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+ end
+
+ describe 'save events' do
+ shared_examples 'saves event' do
+ it 'saves event' do
+ expect(Gitlab::GithubImport::Representation::IssueEvent).to receive(:from_api_response).with(issue_event.to_h)
+ .and_call_original
+
+ expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ expect(events_cache).to receive(:add).with(
+ issuable,
+ an_instance_of(Gitlab::GithubImport::Representation::IssueEvent)
+ )
+ end
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when event is review_requested' do
+ let(:event_name) { 'review_requested' }
+
+ it_behaves_like 'saves event'
+ end
+
+ context 'when event is review_request_removed' do
+ let(:event_name) { 'review_request_removed' }
+
+ it_behaves_like 'saves event'
+ end
+
+ context 'when event is closed' do
+ let(:event_name) { 'closed' }
+
+ it 'does not save event' do
+ expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
+ expect(events_cache).not_to receive(:add)
+ end
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when extended_events is disabled' do
+ let(:event_name) { 'review_requested' }
+ let(:extended_events) { false }
+
+ it 'does not save event' do
+ expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+
+ describe 'after batch processed' do
+ context 'when events should be replayed' do
+ let(:event_name) { 'review_requested' }
+
+ it 'enqueues worker to replay events' do
+ allow(Gitlab::JobWaiter).to receive(:generate_key).and_return('job_waiter_key')
+
+ expect(Gitlab::GithubImport::ReplayEventsWorker).to receive(:perform_async)
+ .with(
+ project.id,
+ { 'issuable_type' => issuable.class.name.to_s, 'issuable_iid' => issuable.iid },
+ 'job_waiter_key'
+ )
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when events are not relevant' do
+ let(:event_name) { 'closed' }
+
+ it 'does not replay events' do
+ expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'does not replay events' do
+ expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
+
+ subject.each_object_to_import { |event| event }
+ end
+ end
+ end
+ end
+
+ describe '#execute', :clean_gitlab_redis_cache do
+ let(:extended_events) { false }
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+
+ stub_request(:get, 'https://api.github.com/rate_limit')
+ .to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
+
+ events = [
+ {
+ id: 1,
+ event: 'review_requested',
+ created_at: '2022-04-26 18:30:53 UTC',
+ issue: {
+ number: issuable.iid,
+ pull_request: true
+ }
+ }
+ ]
+
+ endpoint = 'https://api.github.com/repos/foo/bar/issues/1/timeline' \
+ '?direction=asc&page=1&per_page=100&sort=created&state=all'
+
+ stub_request(:get, endpoint)
+ .to_return(status: 200, body: events.to_json, headers: { 'Content-Type' => 'application/json' })
+ end
+
+ context 'when extended_events is disabled' do
+ it 'enqueues importer worker' do
+ expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(0)
+ .and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
+ end
+
+ it 'returns job waiter with the correct remaining jobs count' do
+ job_waiter = subject.execute
+
+ expect(job_waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ context 'when extended_events is enabled' do
+ let(:extended_events) { true }
+
+ it 'enqueues importer worker and replay worker' do
+ expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(1)
+ .and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
+ end
+
+ it 'returns job waiter with the correct remaining jobs count' do
+ job_waiter = subject.execute
+
+ expect(job_waiter.jobs_remaining).to eq(2)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
index 88613244c8b..c0f0d86d625 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "issue/#{issue.id}/issue_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
index 601cd7a8f15..2d981a3d14f 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
end
it 'skips cached pages' do
- Gitlab::GithubImport::PageCounter
+ Gitlab::Import::PageCounter
.new(project, "merge_request/#{merge_request.id}/issue_comments")
.set(2)
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 3f771970588..ff3821dedec 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::MarkdownText do
+RSpec.describe Gitlab::GithubImport::MarkdownText, feature_category: :importers do
describe '.format' do
it 'formats the text' do
author = double(:author, login: 'Alice')
@@ -103,6 +103,10 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
"https://github.com/nickname/public-test-repo/files/3/git-cheat-sheet.#{doc_extension}"
)
end
+
+ it 'returns an empty array when passed nil' do
+ expect(described_class.fetch_attachments(nil)).to be_empty
+ end
end
describe '#to_s' do
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 3188206de5b..2418c2e08af 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -279,26 +279,53 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
allow(importer).to receive(:representation_class).and_return(repr_class)
allow(importer).to receive(:sidekiq_worker_class).and_return(worker_class)
allow(repr_class).to receive(:from_api_response).with(object, {})
- .and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' })
+ .and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' }, { title: 'Four' })
end
it 'imports data in parallel with delays respecting parallel_import_batch definition and return job waiter' do
- allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
-
- expect(importer).to receive(:each_object_to_import)
- .and_yield(object).and_yield(object).and_yield(object)
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'One' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(61, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
-
- job_waiter = importer.parallel_import
-
- expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(3)
+ freeze_time do
+ allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+
+ expect(importer).to receive(:each_object_to_import)
+ .and_yield(object).and_yield(object).and_yield(object).and_yield(object)
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(31.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(61.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(91.0, project.id, { 'title' => 'Four' }, 'waiter-key').ordered
+
+ job_waiter = importer.parallel_import
+
+ expect(job_waiter.key).to eq('waiter-key')
+ expect(job_waiter.jobs_remaining).to eq(4)
+ end
+ end
+
+ context 'when job is running for a long time' do
+ it 'deducts the job runtime from the delay' do
+ freeze_time do
+ allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key')
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute })
+ allow(importer).to receive(:job_started_at).and_return(45.seconds.ago)
+ allow(importer).to receive(:each_object_to_import)
+ .and_yield(object).and_yield(object).and_yield(object).and_yield(object)
+
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(16.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(46.0, project.id, { 'title' => 'Four' }, 'waiter-key').ordered
+
+ importer.parallel_import
+ end
+ end
end
context 'when job restarts due to API rate limit or Sidekiq interruption' do
@@ -313,21 +340,23 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :impo
end
it "restores job waiter's key and jobs_remaining" do
- allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
+ freeze_time do
+ allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute })
- expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
+ expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object)
- expect(worker_class).to receive(:perform_in)
- .with(1, project.id, { 'title' => 'One' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(61, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
- expect(worker_class).to receive(:perform_in)
- .with(121, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(1.0, project.id, { 'title' => 'One' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(61.0, project.id, { 'title' => 'Two' }, 'waiter-key').ordered
+ expect(worker_class).to receive(:perform_in)
+ .with(121.0, project.id, { 'title' => 'Three' }, 'waiter-key').ordered
- job_waiter = importer.parallel_import
+ job_waiter = importer.parallel_import
- expect(job_waiter.key).to eq('waiter-key')
- expect(job_waiter.jobs_remaining).to eq(6)
+ expect(job_waiter.key).to eq('waiter-key')
+ expect(job_waiter.jobs_remaining).to eq(6)
+ end
end
end
end
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
index 6620dee0fd0..de0509c3e5e 100644
--- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -168,8 +168,8 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
describe '.from_api_response' do
let(:response) do
event_resource = Struct.new(
- :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone,
- :source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at,
+ :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :state, :body,
+ :source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at, :updated_at, :submitted_at,
:performed_via_github_app,
keyword_init: true
)
diff --git a/spec/lib/gitlab/github_import/representation/note_text_spec.rb b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
index 7aa458a1c33..b1ca1512855 100644
--- a/spec/lib/gitlab/github_import/representation/note_text_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Representation::NoteText do
+RSpec.describe Gitlab::GithubImport::Representation::NoteText, feature_category: :importers do
shared_examples 'a Note text data' do |match_record_type|
it 'returns an instance of NoteText' do
expect(representation).to be_an_instance_of(described_class)
@@ -153,4 +153,36 @@ RSpec.describe Gitlab::GithubImport::Representation::NoteText do
end
end
end
+
+ describe '#has_attachments?' do
+ subject { described_class.new({ text: text }).has_attachments? }
+
+ context 'when text has attachments' do
+ let(:text) { 'See ![image](https://user-images.githubusercontent.com/1/uuid-1.png) for details' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when text does not have attachments' do
+ let(:text) { 'Some text here' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#attachments' do
+ subject { described_class.new({ text: text }).attachments }
+
+ context 'when text has attachments' do
+ let(:text) { 'See ![image](https://user-images.githubusercontent.com/1/uuid-1.png) for details' }
+
+ it { is_expected.to contain_exactly(instance_of(Gitlab::GithubImport::Markdown::Attachment)) }
+ end
+
+ context 'when text does not have attachments' do
+ let(:text) { 'Some text here' }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/replay_event_spec.rb b/spec/lib/gitlab/github_import/representation/replay_event_spec.rb
new file mode 100644
index 00000000000..1afefb76c6a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/replay_event_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::ReplayEvent, feature_category: :importers do
+ describe '.from_json_hash' do
+ it 'returns an instance of ReplayEvent' do
+ representation = described_class.from_json_hash(issuable_iid: 1, issuable_type: 'MergeRequest')
+
+ expect(representation).to be_an_instance_of(described_class)
+ end
+ end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ representation = described_class.new(issuable_type: 'MergeRequest', issuable_iid: 1)
+
+ expect(representation.github_identifiers).to eq({
+ issuable_type: 'MergeRequest',
+ issuable_iid: 1
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
index ea1526ca25f..d268f3a8650 100644
--- a/spec/lib/gitlab/github_import/settings_spec.rb
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -21,12 +21,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
stages = described_class::OPTIONAL_STAGES
[
{
- name: 'single_endpoint_issue_events_import',
- label: stages[:single_endpoint_issue_events_import][:label],
- selected: false,
- details: stages[:single_endpoint_issue_events_import][:details]
- },
- {
name: 'single_endpoint_notes_import',
label: stages[:single_endpoint_notes_import][:label],
selected: false,
@@ -48,7 +42,31 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
end
it 'returns stages list as array' do
- expect(described_class.stages_array).to match_array(expected_list)
+ expect(described_class.stages_array(project.owner)).to match_array(expected_list)
+ end
+
+ context 'when `github_import_extended_events` feature flag is disabled' do
+ let(:expected_list_with_deprecated_options) do
+ stages = described_class::OPTIONAL_STAGES
+
+ expected_list.concat(
+ [
+ {
+ name: 'single_endpoint_issue_events_import',
+ label: stages[:single_endpoint_issue_events_import][:label],
+ selected: false,
+ details: stages[:single_endpoint_issue_events_import][:details]
+ }
+ ])
+ end
+
+ before do
+ stub_feature_flags(github_import_extended_events: false)
+ end
+
+ it 'returns stages list as array' do
+ expect(described_class.stages_array(project.owner)).to match_array(expected_list_with_deprecated_options)
+ end
end
end
@@ -99,4 +117,24 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
expect(settings.disabled?(:collaborators_import)).to eq true
end
end
+
+ describe '#extended_events?' do
+ it 'when extended_events is set to true' do
+ project.build_or_assign_import_data(data: { extended_events: true })
+
+ expect(settings.extended_events?).to eq(true)
+ end
+
+ it 'when extended_events is set to false' do
+ project.build_or_assign_import_data(data: { extended_events: false })
+
+ expect(settings.extended_events?).to eq(false)
+ end
+
+ it 'when extended_events is not present' do
+ project.build_or_assign_import_data(data: {})
+
+ expect(settings.extended_events?).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index a394b4eba13..998fa8b2c9f 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -211,6 +211,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
let(:username) { 'kittens' }
let(:user) { {} }
let(:etag) { 'etag' }
+ let(:lease_name) { "gitlab:github_import:user_finder:#{project.id}" }
let(:cache_key) { described_class::EMAIL_FOR_USERNAME_CACHE_KEY % username }
let(:etag_cache_key) { described_class::USERNAME_ETAG_CACHE_KEY % username }
let(:email_fetched_for_project_key) do
@@ -305,6 +306,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes an API call' do
expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -315,6 +319,14 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
end
it_behaves_like 'returns and caches the email'
+
+ context 'when retried' do
+ before do
+ allow(finder).to receive(:in_lock).and_yield(true)
+ end
+
+ it_behaves_like 'returns and caches the email'
+ end
end
context 'if the response does not contain an email' do
@@ -344,6 +356,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes a non-rate-limited API call' do
expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -413,6 +428,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it 'makes a non-rate-limited API call' do
expect(client).to receive(:user).with(username, { headers: { 'If-None-Match' => etag } }).once
+ expect(finder).to receive(:in_lock).with(
+ lease_name, ttl: 3.minutes, sleep_sec: 1.second, retries: 30
+ ).and_call_original
email_for_github_username
end
@@ -443,6 +461,30 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache, feat
it_behaves_like 'a user resource not found on GitHub'
end
+
+ context 'if the cached etag is nil' do
+ context 'when lock was executed by another process and an email was fetched' do
+ it 'does not fetch user detail' do
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return(email)
+ expect(finder).to receive(:in_lock).and_yield(true)
+ expect(client).not_to receive(:user)
+
+ email_for_github_username
+ end
+ end
+
+ context 'when lock was executed by another process and an email in cache is still blank' do
+ it 'fetch user detail' do
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:read_email_from_cache).ordered.and_return('')
+ expect(finder).to receive(:in_lock).and_yield(true)
+ expect(client).to receive(:user).with(username, { headers: {} }).and_return({ email: email }).once
+
+ email_for_github_username
+ end
+ end
+ end
end
context 'if the email has been checked for the project' do
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index e4684597ddf..d9dcae3cdc7 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -205,7 +205,6 @@ RSpec.describe Gitlab::GonHelper do
context 'when feature flag is false' do
before do
- stub_feature_flags(browsersdk_tracking: false)
stub_feature_flags(gl_analytics_tracking: false)
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index ef3765e479f..cd596555107 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Gitlab::Highlight do
it 'links dependencies via DependencyLinker' do
expect(Gitlab::DependencyLinker).to receive(:link)
- .with('file.name', 'Contents', anything).and_call_original
+ .with('file.name', 'Contents', anything, used_on: :blob).and_call_original
described_class.highlight('file.name', 'Contents')
end
@@ -133,5 +133,32 @@ RSpec.describe Gitlab::Highlight do
highlight
end
end
+
+ it 'increments usage counter', :prometheus do
+ described_class.highlight(file_name, content)
+
+ gitlab_highlight_usage_counter = Gitlab::Metrics.registry.get(:gitlab_highlight_usage)
+
+ expect(gitlab_highlight_usage_counter.get(used_on: :blob)).to eq(1)
+ expect(gitlab_highlight_usage_counter.get(used_on: :diff)).to eq(0)
+ end
+
+ context 'when used_on is specified' do
+ it 'increments usage counter', :prometheus do
+ described_class.highlight(file_name, content, used_on: :diff)
+
+ gitlab_highlight_usage_counter = Gitlab::Metrics.registry.get(:gitlab_highlight_usage)
+
+ expect(gitlab_highlight_usage_counter.get(used_on: :diff)).to eq(1)
+ expect(gitlab_highlight_usage_counter.get(used_on: :blob)).to eq(0)
+ end
+
+ it 'links dependencies via DependencyLinker' do
+ expect(Gitlab::DependencyLinker).to receive(:link)
+ .with(file_name, content, anything, used_on: :diff).and_call_original
+
+ described_class.highlight(file_name, content, used_on: :diff)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 3fc486a8984..c23f4ea8ffa 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -55,8 +55,19 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
context 'when there is a DB call in the concurrent thread' do
- it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
+ before do
+ # Simulating Sentry is active and configured.
+ # More info: https://gitlab.com/gitlab-org/gitlab/-/issues/432145#note_1671305713
+ stub_sentry_settings
+ allow(Gitlab::ErrorTracking).to receive(:sentry_configurable?).and_return(true)
+ Gitlab::ErrorTracking.configure
+ end
+
+ after do
+ clear_sentry_settings
+ end
+
+ it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error' do
stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
result = described_class.get('http://example.org', async: true) do |_fragment|
@@ -104,40 +115,4 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
end
end
-
- context 'when the FF use_gitlab_http_v2 is disabled' do
- before do
- stub_feature_flags(use_gitlab_http_v2: false)
- end
-
- describe '.get' do
- it 'calls Gitlab::LegacyHTTP.get with default options' do
- expect(Gitlab::LegacyHTTP).to receive(:get).with('/path', {})
-
- described_class.get('/path')
- end
- end
-
- describe '.try_get' do
- it 'calls .get' do
- expect(described_class).to receive(:get).with('/path', {})
-
- described_class.try_get('/path')
- end
-
- it 'returns nil when .get raises an error' do
- expect(described_class).to receive(:get).and_raise(SocketError)
-
- expect(described_class.try_get('/path')).to be_nil
- end
- end
-
- describe '.perform_request' do
- it 'calls Gitlab::LegacyHTTP.perform_request with default options' do
- expect(Gitlab::LegacyHTTP).to receive(:perform_request).with(Net::HTTP::Get, '/path', {})
-
- described_class.perform_request(Net::HTTP::Get, '/path', {})
- end
- end
- end
end
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/import/page_counter_spec.rb
index ddb62cc8fad..a7a4e301aa3 100644
--- a/spec/lib/gitlab/github_import/page_counter_spec.rb
+++ b/spec/lib/gitlab/import/page_counter_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache, feature_category: :importers do
- let(:project) { double(:project, id: 1) }
+RSpec.describe Gitlab::Import::PageCounter, :clean_gitlab_redis_cache, feature_category: :importers do
+ let(:project) { instance_double(Project, id: 1) }
let(:counter) { described_class.new(project, :issues) }
describe '#initialize' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 688487df778..8da05ed7b7e 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -439,6 +439,7 @@ builds:
- dast_scanner_profile
- job_annotations
- job_artifacts_annotations
+- project_mirror
bridges:
- user
- pipeline
@@ -500,6 +501,7 @@ protected_branches:
- push_access_levels
- unprotect_access_levels
- approval_project_rules
+- approval_project_rules_with_unique_policies
- external_status_checks
- required_code_owners_sections
protected_tags:
@@ -588,6 +590,7 @@ project:
- hangouts_chat_integration
- unify_circuit_integration
- buildkite_integration
+- diffblue_cover_integration
- bamboo_integration
- teamcity_integration
- pushover_integration
@@ -848,6 +851,7 @@ timelogs:
- merge_request
- user
- note
+- timelog_category
push_event_payload:
- event
issue_assignees:
@@ -1081,3 +1085,8 @@ approval_project_rules_users:
approval_project_rules_protected_branches:
- protected_branch
- approval_project_rule
+timelog_category:
+ - name
+ - description
+ - billable
+ - billing_rate
diff --git a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
index 9852f6c9652..8ab99875a0a 100644
--- a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
@@ -52,10 +52,10 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
)
end
- subject { relation_tree_restorer.restore }
+ subject(:restore_relations) { relation_tree_restorer.restore }
it 'restores group tree' do
- expect(subject).to eq(true)
+ expect(restore_relations).to eq(true)
end
it 'logs top-level relation creation' do
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
.with(hash_including(message: '[Project/Group Import] Created new object relation'))
.at_least(:once)
- subject
+ restore_relations
end
describe 'relation object saving' do
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
error_messages: "Label can't be blank, Position can't be blank, and Position is not a number"
)
- subject
+ restore_relations
board = importable.boards.last
failure = importable.import_failures.first
@@ -115,6 +115,33 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
end
end
+ context 'when invalid relation object has a loggable external identifier' do
+ before do
+ allow(relation_reader)
+ .to receive(:consume_relation)
+ .with(importable_name, 'milestones')
+ .and_return([
+ [invalid_milestone, 0],
+ [invalid_milestone_with_no_iid, 1]
+ ])
+ end
+
+ let(:invalid_milestone) { build(:milestone, iid: 123, name: nil) }
+ let(:invalid_milestone_with_no_iid) { build(:milestone, iid: nil, name: nil) }
+
+ it 'logs invalid record with external identifier' do
+ restore_relations
+
+ iids_for_failures = importable.import_failures.collect { |f| [f.relation_key, f.external_identifiers] }
+ expected_iids = [
+ ["milestones", { "iid" => invalid_milestone.iid }],
+ ["milestones", {}]
+ ]
+
+ expect(iids_for_failures).to match_array(expected_iids)
+ end
+ end
+
context 'when relation object is persisted' do
before do
allow(relation_reader)
@@ -129,7 +156,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer, feature_catego
it 'saves import failure with nested errors' do
label.priorities << [LabelPriority.new, LabelPriority.new]
- subject
+ restore_relations
failure = importable.import_failures.first
diff --git a/spec/lib/gitlab/import_export/import_failure_service_spec.rb b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
index 30d16347828..9628e9fbf4d 100644
--- a/spec/lib/gitlab/import_export/import_failure_service_spec.rb
+++ b/spec/lib/gitlab/import_export/import_failure_service_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe Gitlab::ImportExport::ImportFailureService, feature_category: :im
relation_key: relation_key,
relation_index: relation_index,
exception: exception,
- retry_count: retry_count)
+ retry_count: retry_count,
+ external_identifiers: { iid: 1234 })
end
before do
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 14af3028a6e..d565f3f3150 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -1114,9 +1114,10 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
let(:user) { create(:user) }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+ let(:project_fixture) { 'with_invalid_records' }
before do
- setup_import_export_config('with_invalid_records')
+ setup_import_export_config(project_fixture)
setup_reader
subject
@@ -1142,6 +1143,21 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
expect(import_failure.correlation_id_value).not_to be_empty
expect(import_failure.created_at).to be_present
end
+
+ context 'when there are a mix of invalid milestones and issues with IIDs' do
+ let(:project_fixture) { 'with_invalid_issues_and_milestones' }
+
+ it 'tracks the relation IID if present' do
+ iids_for_failures = project.import_failures.collect { |f| [f.relation_key, f.external_identifiers] }
+ expected_iids = [
+ ["milestones", { "iid" => 1 }],
+ ["issues", { "iid" => 9 }],
+ ["issues", {}]
+ ]
+
+ expect(iids_for_failures).to match_array(expected_iids)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 3efa33d8879..73b945d4274 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -765,6 +765,7 @@ Timelog:
- created_at
- updated_at
- summary
+- timelog_category_id
ProjectAutoDevops:
- id
- enabled
@@ -1087,3 +1088,9 @@ ApprovalProjectRulesProtectedBranch:
- branch_name
WorkItems::Type:
- base_type
+TimeTracking::TimelogCategories:
+ - id
+ - name
+ - description
+ - billable
+ - billing_rate
diff --git a/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
new file mode 100644
index 00000000000..eca75d93c80
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+require 'support/helpers/rails_helpers'
+
+RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
+ using RSpec::Parameterized::TableSyntax
+ include RedisHelpers
+
+ let_it_be(:redis_store_class) { define_helper_redis_store_class }
+ let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
+
+ before do
+ redis_client.call("flushdb")
+ end
+
+ describe 'read and write' do
+ where(:setup, :command, :expect_write, :expect_read) do
+ # The response is 'OK', the request size is the combined size of array
+ # elements. Exercise counting of a status reply.
+ [] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
+
+ # The response is 1001, so 4 bytes. Exercise counting an integer reply.
+ [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
+
+ # Exercise counting empty multi bulk reply. Returns an empty hash `{}`
+ [] | [:hgetall, 'foobar'] | (7 + 6) | 2
+
+ # Hgetall response length is combined length of keys and values in the
+ # hash. Exercises counting of a multi bulk reply
+ # Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
+ [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
+
+ # Exercise counting of a bulk reply
+ [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
+
+ # Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
+ [[:zadd, 'myset', 0, 'foo'],
+ [:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
+ end
+
+ with_them do
+ it 'counts bytes read and written' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+ RequestStore.clear!
+ redis_client.call(*command)
+
+ expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
+ expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
+ end
+ end
+ end
+
+ describe 'counting' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ it 'counts successful requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
+
+ redis_client.call(:get, 'foobar')
+ end
+
+ it 'counts successful pipelined requests' do
+ expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ context 'when encountering exceptions' do
+ before do
+ allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
+ RedisClient::ConnectionError, 'Connection was closed or lost')
+ end
+
+ it 'counts exception' do
+ expect(instrumentation_class).to receive(:instance_count_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:log_exception)
+ .with(instance_of(RedisClient::ConnectionError)).and_call_original
+ expect(instrumentation_class).to receive(:instance_count_request).and_call_original
+
+ expect do
+ redis_client.call(:auth, 'foo', 'bar')
+ end.to raise_error(RedisClient::Error)
+ end
+ end
+
+ context 'in production environment' do
+ before do
+ stub_rails_env('production') # to avoid raising CrossSlotError
+ end
+
+ it 'counts disallowed cross-slot requests' do
+ expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+
+ it 'does not count allowed cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'foo', 'bar')
+ end
+ end
+
+ it 'does not count allowed non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis_client.call(:mget, 'bar')
+ end
+ end
+
+ it 'skips count for non-cross-slot requests' do
+ expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
+ expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
+
+ redis_client.call(:mget, '{foo}bar', '{foo}baz')
+ end
+ end
+
+ context 'without active RequestStore' do
+ before do
+ ::RequestStore.end!
+ end
+
+ it 'still runs cross-slot validation' do
+ expect do
+ redis_client.call('mget', 'foo', 'bar')
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'latency' do
+ let(:instrumentation_class) { redis_store_class.instrumentation_class }
+
+ describe 'commands in the apdex' do
+ where(:command) do
+ [
+ [[:get, 'foobar']],
+ [%w[GET foobar]]
+ ]
+ end
+
+ with_them do
+ it 'measures requests we want in the apdex' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
+ .and_call_original
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'measures requests that do not have blocking commands' do
+ expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
+ .and_call_original
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:get, '{foobar}baz')
+ end
+ end
+
+ it 'raises error when keys are not from the same slot' do
+ expect do
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, 'foo')
+ pipeline.call(:get, 'bar')
+ end
+ end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
+ end
+ end
+ end
+
+ describe 'commands not in the apdex' do
+ where(:setup, :command) do
+ [['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
+ [['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
+ [['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
+ [['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
+ [['xadd', 'foobar', 1, 'myfield', 'mydata'],
+ ['xgroup', 'create', 'foobar', 'mygroup',
+ 0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
+ [] | ['command']
+ end
+
+ with_them do
+ it 'skips requests we do not want in the apdex' do
+ setup.each { |cmd| redis_client.call(*cmd) }
+
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.call(*command)
+ end
+ end
+
+ context 'with pipelined commands' do
+ it 'skips requests that have blocking commands' do
+ expect(instrumentation_class).not_to receive(:instance_observe_duration)
+
+ redis_client.pipelined do |pipeline|
+ pipeline.call(:get, '{foobar}buz')
+ pipeline.call(:rpush, '{foobar}baz', 1)
+ pipeline.call(:brpop, '{foobar}baz', 0)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
index 8d6415b8179..8b6d628833e 100644
--- a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter, feature_category: :importers do
let_it_be(:project) { create(:project) }
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
@@ -76,12 +76,6 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
context 'when author is a GitLab user' do
let(:raw) { base.merge(user: octocat) }
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(comment.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -89,7 +83,7 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
end
it 'returns note without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(comment.attributes.fetch(:note)).to eq("I'm having a problem with this.")
end
diff --git a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index d3548fecbcd..9baf234b14b 100644
--- a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter, feature_category: :importers do
let_it_be(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
@@ -82,12 +82,6 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
expect(issue.attributes.fetch(:assignee_ids)).to be_empty
end
- it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id]
- end
-
it 'returns GitLab user id associated with GitHub email as assignee_id' do
gl_user = create(:user, email: octocat[:email])
@@ -117,12 +111,6 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
expect(issue.attributes.fetch(:author_id)).to eq project.creator_id
end
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(issue.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -130,7 +118,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(issue.attributes.fetch(:description)).to eq("I'm having a problem with this.")
end
diff --git a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 90469693820..1555e3e0d4c 100644
--- a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter, feature_category: :importers do
let_it_be(:project) { create(:project, :repository) }
let(:client) { double }
let(:source_sha) { create(:commit, project: project).id }
@@ -136,12 +136,6 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
expect(pull_request.attributes.fetch(:assignee_id)).to be_nil
end
- it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(pull_request.attributes.fetch(:assignee_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as assignee_id' do
gl_user = create(:user, email: octocat[:email])
@@ -156,12 +150,6 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
expect(pull_request.attributes.fetch(:author_id)).to eq project.creator_id
end
- it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(pull_request.attributes.fetch(:author_id)).to eq gl_user.id
- end
-
it 'returns GitLab user id associated with GitHub email as author_id' do
gl_user = create(:user, email: octocat[:email])
@@ -169,7 +157,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
+ create(:user, email: octocat[:email])
expect(pull_request.attributes.fetch(:description)).to eq('Please pull these awesome changes')
end
diff --git a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
index 0844ab7eccc..d387d79aa30 100644
--- a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
+RSpec.describe Gitlab::LegacyGithubImport::UserFormatter, feature_category: :importers do
let(:client) { double }
let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:gitea_ghost) { { id: -1, login: 'Ghost', email: '' } }
@@ -15,12 +15,6 @@ RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
end
context 'when GitHub user is a GitLab user' do
- it 'return GitLab user id when user associated their account with GitHub' do
- gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
-
- expect(user.gitlab_id).to eq gl_user.id
- end
-
it 'returns GitLab user id when user confirmed primary email matches GitHub email' do
gl_user = create(:user, email: octocat[:email])
diff --git a/spec/lib/gitlab/legacy_http_spec.rb b/spec/lib/gitlab/legacy_http_spec.rb
deleted file mode 100644
index 07a30b194b6..00000000000
--- a/spec/lib/gitlab/legacy_http_spec.rb
+++ /dev/null
@@ -1,448 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::LegacyHTTP, feature_category: :shared do
- include StubRequests
-
- let(:default_options) { Gitlab::HTTP::DEFAULT_TIMEOUT_OPTIONS }
-
- context 'when allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
-
- described_class.get('https://example.org:8080', allow_local_requests: false)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.8:8080').once
- end
- end
-
- context 'when not allow_local_requests' do
- it 'sends the request to the correct URI' do
- stub_full_request('https://example.org:8080')
-
- described_class.get('https://example.org:8080', allow_local_requests: true)
-
- expect(WebMock).to have_requested(:get, 'https://8.8.8.9:8080').once
- end
- end
-
- context 'when reading the response is too slow' do
- before_all do
- # Override Net::HTTP to add a delay between sending each response chunk
- mocked_http = Class.new(Net::HTTP) do
- def request(*)
- super do |response|
- response.instance_eval do
- def read_body(*)
- mock_stream = @body.split(' ')
- mock_stream.each do |fragment|
- sleep 0.002.seconds
-
- yield fragment if block_given?
- end
-
- @body
- end
- end
-
- yield response if block_given?
-
- response
- end
- end
- end
-
- @original_net_http = Net.send(:remove_const, :HTTP)
- @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
-
- Net.send(:const_set, :HTTP, mocked_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
-
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
- end
-
- before do
- stub_const("Gitlab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
-
- WebMock.stub_request(:post, /.*/).to_return do
- { body: "chunk-1 chunk-2", status: 200 }
- end
- end
-
- after(:all) do
- Net.send(:remove_const, :HTTP)
- Net.send(:const_set, :HTTP, @original_net_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
-
- # Reload Gitlab::NetHttpAdapter
- Gitlab.send(:remove_const, :NetHttpAdapter)
- load "#{Rails.root}/lib/gitlab/net_http_adapter.rb"
- end
-
- let(:options) { {} }
-
- subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
-
- it 'raises an error' do
- expect { request_slow_responder }.to raise_error(
- Gitlab::HTTP::ReadTotalTimeout, /Request timed out after ?([0-9]*[.])?[0-9]+ seconds/)
- end
-
- context 'and timeout option is greater than DEFAULT_READ_TOTAL_TIMEOUT' do
- let(:options) { { timeout: 10.seconds } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
-
- context 'and stream_body option is truthy' do
- let(:options) { { stream_body: true } }
-
- it 'does not raise an error' do
- expect { request_slow_responder }.not_to raise_error
- end
- end
- end
-
- it 'calls a block' do
- WebMock.stub_request(:post, /.*/)
-
- expect { |b| described_class.post('http://example.org', &b) }.to yield_with_args
- end
-
- describe 'allow_local_requests_from_web_hooks_and_services is' do
- before do
- WebMock.stub_request(:get, /.*/).to_return(status: 200, body: 'Success')
- end
-
- context 'disabled' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
- end
-
- it 'deny requests to localhost' do
- expect { described_class.get('http://localhost:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
-
- it 'deny requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.to raise_error(Gitlab::HTTP::BlockedUrlError)
- end
-
- context 'if allow_local_requests set to true' do
- it 'override the global value and allow requests to localhost or private network' do
- stub_full_request('http://localhost:3003')
-
- expect { described_class.get('http://localhost:3003', allow_local_requests: true) }.not_to raise_error
- end
- end
- end
-
- context 'enabled' do
- before do
- allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
- end
-
- it 'allow requests to localhost' do
- stub_full_request('http://localhost:3003')
-
- expect { described_class.get('http://localhost:3003') }.not_to raise_error
- end
-
- it 'allow requests to private network' do
- expect { described_class.get('http://192.168.1.2:3003') }.not_to raise_error
- end
-
- context 'if allow_local_requests set to false' do
- it 'override the global value and ban requests to localhost or private network' do
- expect { described_class.get('http://localhost:3003', allow_local_requests: false) }.to raise_error(
- Gitlab::HTTP::BlockedUrlError)
- end
- end
- end
- end
-
- describe 'handle redirect loops' do
- before do
- stub_full_request("http://example.org", method: :any).to_raise(
- HTTParty::RedirectionTooDeep.new("Redirection Too Deep"))
- end
-
- it 'handles GET requests' do
- expect { described_class.get('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
-
- it 'handles HEAD requests' do
- expect { described_class.head('http://example.org') }.to raise_error(Gitlab::HTTP::RedirectionTooDeep)
- end
- end
-
- describe 'setting default timeouts' do
- before do
- stub_full_request('http://example.org', method: :any)
- end
-
- context 'when no timeouts are set' do
- it 'sets default open and read and write timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options
- ).and_call_original
-
- described_class.get('http://example.org')
- end
- end
-
- context 'when :timeout is set' do
- it 'does not set any default timeouts' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', { timeout: 1 }
- ).and_call_original
-
- described_class.get('http://example.org', { timeout: 1 })
- end
- end
-
- context 'when :open_timeout is set' do
- it 'only sets default read and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
- ).and_call_original
-
- described_class.get('http://example.org', open_timeout: 1)
- end
- end
-
- context 'when :read_timeout is set' do
- it 'only sets default open and write timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
- ).and_call_original
-
- described_class.get('http://example.org', read_timeout: 1)
- end
- end
-
- context 'when :write_timeout is set' do
- it 'only sets default open and read timeout' do
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
- ).and_call_original
-
- described_class.put('http://example.org', write_timeout: 1)
- end
- end
- end
-
- describe '.try_get' do
- let(:path) { 'http://example.org' }
-
- let(:extra_log_info_proc) do
- proc do |error, url, options|
- { klass: error.class, url: url, options: options }
- end
- end
-
- let(:request_options) do
- default_options.merge({
- verify: false,
- basic_auth: { username: 'user', password: 'pass' }
- })
- end
-
- Gitlab::HTTP::HTTP_ERRORS.each do |exception_class|
- context "with #{exception_class}" do
- let(:klass) { exception_class }
-
- context 'with path' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, default_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { url: path, klass: klass, options: {} })
-
- expect(described_class.try_get(path, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path and options' do
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b })).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc)).to be_nil
- end
- end
-
- context 'with path, options, and block' do
- let(:block) do
- proc {}
- end
-
- before do
- expect(described_class).to receive(:httparty_perform_request)
- .with(Net::HTTP::Get, path, request_options, &block)
- .and_raise(klass)
- end
-
- it 'handles requests without extra_log_info' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), {})
-
- expect(described_class.try_get(path, request_options, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as hash' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { a: :b })
-
- expect(described_class.try_get(path, **request_options, extra_log_info: { a: :b }, &block)).to be_nil
- end
-
- it 'handles requests with extra_log_info as proc' do
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(instance_of(klass), { klass: klass, url: path, options: request_options })
-
- expect(
- described_class.try_get(path, **request_options, extra_log_info: extra_log_info_proc, &block)
- ).to be_nil
- end
- end
- end
- end
- end
-
- describe 'silent mode', feature_category: :geo_replication do
- before do
- stub_full_request("http://example.org", method: :any)
- stub_application_setting(silent_mode_enabled: silent_mode)
- end
-
- context 'when silent mode is enabled' do
- let(:silent_mode) { true }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
-
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
- end
-
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
-
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
-
- it 'logs blocked requests' do
- expect(::Gitlab::AppJsonLogger).to receive(:info).with(
- message: "Outbound HTTP request blocked",
- outbound_http_request_method: 'Net::HTTP::Post',
- silent_mode_enabled: true
- )
-
- expect { described_class.post('http://example.org') }.to raise_error(Gitlab::HTTP::SilentModeBlockedError)
- end
- end
-
- context 'when silent mode is disabled' do
- let(:silent_mode) { false }
-
- it 'allows GET requests' do
- expect { described_class.get('http://example.org') }.not_to raise_error
- end
-
- it 'allows HEAD requests' do
- expect { described_class.head('http://example.org') }.not_to raise_error
- end
-
- it 'allows OPTIONS requests' do
- expect { described_class.options('http://example.org') }.not_to raise_error
- end
-
- it 'blocks POST requests' do
- expect { described_class.post('http://example.org') }.not_to raise_error
- end
-
- it 'blocks PUT requests' do
- expect { described_class.put('http://example.org') }.not_to raise_error
- end
-
- it 'blocks DELETE requests' do
- expect { described_class.delete('http://example.org') }.not_to raise_error
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
index 68dd784fb7e..1c62f5679d0 100644
--- a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
before do
allow(Gitlab::Metrics::System).to receive(:monotonic_time)
- .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2)
+ .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq.default_configuration[:timeout] + 2)
allow(Process).to receive(:kill)
allow(::Sidekiq).to receive(:logger).and_return(logger)
allow(logger).to receive(:warn)
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2]
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2]
]
end
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2],
+ [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2],
[:KILL, kill_pid, 'hard shut down', nil]
]
end
diff --git a/spec/lib/gitlab/namespaced_session_store_spec.rb b/spec/lib/gitlab/namespaced_session_store_spec.rb
index 2c258ce3da6..4e9b35e6859 100644
--- a/spec/lib/gitlab/namespaced_session_store_spec.rb
+++ b/spec/lib/gitlab/namespaced_session_store_spec.rb
@@ -8,19 +8,28 @@ RSpec.describe Gitlab::NamespacedSessionStore do
context 'current session' do
subject { described_class.new(key) }
- it 'stores data under the specified key' do
- Gitlab::Session.with_session({}) do
- subject[:new_data] = 123
-
- expect(Thread.current[:session_storage][key]).to eq(new_data: 123)
- end
- end
-
it 'retrieves data from the given key' do
Thread.current[:session_storage] = { key => { existing_data: 123 } }
expect(subject[:existing_data]).to eq 123
end
+
+ context 'when namespace key does not exist' do
+ before do
+ Thread.current[:session_storage] = {}
+ end
+
+ it 'does not create namespace key when reading a value' do
+ expect(subject[:non_existent_key]).to eq(nil)
+ expect(Thread.current[:session_storage]).to eq({})
+ end
+
+ it 'stores data under the specified key' do
+ subject[:new_data] = 123
+
+ expect(Thread.current[:session_storage][key]).to eq(new_data: 123)
+ end
+ end
end
context 'passed in session' do
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index afaad48d363..326f3c6d344 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -46,12 +46,6 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
end
end
- it 'raises error when ordering configuration cannot be automatically determined' do
- expect do
- described_class.new(scope: MergeRequestDiffCommit.order(:merge_request_diff_id, :relative_order))
- end.to raise_error /The order on the scope does not support keyset pagination/
- end
-
it 'accepts a custom batch size' do
count = 0
diff --git a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
index 230ac01af31..16c5b3ab748 100644
--- a/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/paginator_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::Pagination::Keyset::Paginator do
- let_it_be(:project_1) { create(:project, created_at: 10.weeks.ago) }
- let_it_be(:project_2) { create(:project, created_at: 2.weeks.ago) }
- let_it_be(:project_3) { create(:project, created_at: 3.weeks.ago) }
- let_it_be(:project_4) { create(:project, created_at: 5.weeks.ago) }
- let_it_be(:project_5) { create(:project, created_at: 2.weeks.ago) }
+ let_it_be(:project_1) { create(:project, :public, name: 'Project A', created_at: 10.weeks.ago) }
+ let_it_be(:project_2) { create(:project, :public, name: 'Project E', created_at: 2.weeks.ago) }
+ let_it_be(:project_3) { create(:project, :private, name: 'Project C', created_at: 3.weeks.ago) }
+ let_it_be(:project_4) { create(:project, :private, name: 'Project B', created_at: 5.weeks.ago) }
+ let_it_be(:project_5) { create(:project, :private, name: 'Project B', created_at: 2.weeks.ago) }
describe 'pagination' do
let(:per_page) { 10 }
@@ -98,6 +98,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Paginator do
end
end
+ context 'when the relation is ordered by more than 2 columns' do
+ let(:scope) { Project.order(visibility_level: :asc, name: :asc, id: :asc) }
+ let(:expected_order) { [project_4, project_5, project_3, project_1, project_2] }
+
+ it { expect(paginator.records).to eq(expected_order) }
+ end
+
describe 'default keyset direction parameter' do
let(:cursor_converter_class) { Gitlab::Pagination::Keyset::Paginator::Base64CursorConverter }
let(:per_page) { 2 }
@@ -110,14 +117,6 @@ RSpec.describe Gitlab::Pagination::Keyset::Paginator do
end
end
- context 'when unsupported order is given' do
- it 'raises error' do
- scope = Project.order(path: :asc, name: :asc, id: :desc) # Cannot build 3 column order automatically
-
- expect { scope.keyset_paginate }.to raise_error(/does not support keyset pagination/)
- end
- end
-
context 'when use_union_optimization option is true and ordering by two columns' do
let(:scope) { Project.order(name: :asc, id: :desc) }
diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
index e85b0354ff6..fd38fff2b81 100644
--- a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
context 'when more than 2 columns are given for the order' do
let(:scope) { Project.order(created_at: :asc, updated_at: :desc, id: :asc) }
- it { is_expected.to eq(false) }
+ it { is_expected.to eq(true) }
end
end
end
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index bb0adbc87f1..ef76b1e5fdf 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -185,21 +185,21 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
context 'at the start of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "/assign @joe\nworld" }
- let(:final_msg) { "\n/assign @joe\n\nworld" }
+ let(:final_msg) { "<p>/assign @joe</p>\nworld" }
end
end
context 'in the middle of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "hello\n/assign @joe\nworld" }
- let(:final_msg) { "hello\n\n/assign @joe\n\nworld" }
+ let(:final_msg) { "hello\n<p>/assign @joe</p>\nworld" }
end
end
context 'at the end of content' do
it_behaves_like 'command with a single argument' do
let(:original_msg) { "hello\n/assign @joe" }
- let(:final_msg) { "hello\n\n/assign @joe" }
+ let(:final_msg) { "hello\n<p>/assign @joe</p>" }
end
end
end
@@ -282,7 +282,7 @@ RSpec.describe Gitlab::QuickActions::Extractor, feature_category: :team_planning
msg, commands = extractor.extract_commands(msg)
expect(commands).to match_array [['reopen'], ['substitution', 'wow this is a thing.']]
- expect(msg).to eq "hello\nworld\n\n/reopen\n\nfoo"
+ expect(msg).to eq "hello\nworld\n<p>/reopen</p>\nfoo"
end
it 'extracts multiple commands' do
diff --git a/spec/lib/gitlab/redis/cluster_util_spec.rb b/spec/lib/gitlab/redis/cluster_util_spec.rb
index 3993004518d..f167065fd3f 100644
--- a/spec/lib/gitlab/redis/cluster_util_spec.rb
+++ b/spec/lib/gitlab/redis/cluster_util_spec.rb
@@ -29,10 +29,15 @@ RSpec.describe Gitlab::Redis::ClusterUtil, feature_category: :scalability do
with_them do
it 'returns expected value' do
- primary_store = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
- secondary_store = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
- multistore = Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'teststore')
- expect(described_class.cluster?(multistore)).to eq(expected_val)
+ primary_redis = pri_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ secondary_redis = sec_store == :cluster ? ::Redis.new(cluster: ['redis://localhost:6000']) : ::Redis.new
+ primary_pool = ConnectionPool.new { primary_redis }
+ secondary_pool = ConnectionPool.new { secondary_redis }
+ multistore = Gitlab::Redis::MultiStore.new(primary_pool, secondary_pool, 'teststore')
+
+ multistore.with_borrowed_connection do
+ expect(described_class.cluster?(multistore)).to eq(expected_val)
+ end
end
end
end
diff --git a/spec/lib/gitlab/redis/cross_slot_spec.rb b/spec/lib/gitlab/redis/cross_slot_spec.rb
index e2f5fcf7694..ccf2de1f28f 100644
--- a/spec/lib/gitlab/redis/cross_slot_spec.rb
+++ b/spec/lib/gitlab/redis/cross_slot_spec.rb
@@ -38,7 +38,9 @@ RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
let_it_be(:secondary_db) { 2 }
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
- let_it_be(:multistore) { Gitlab::Redis::MultiStore.new(primary_store, secondary_store, 'testing') }
+ let_it_be(:primary_pool) { ConnectionPool.new { primary_store } }
+ let_it_be(:secondary_pool) { ConnectionPool.new { secondary_store } }
+ let_it_be(:multistore) { Gitlab::Redis::MultiStore.new(primary_pool, secondary_pool, 'testing') }
before do
primary_store.set('a', 1)
@@ -52,9 +54,11 @@ RSpec.describe Gitlab::Redis::CrossSlot, feature_category: :redis do
expect(
Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- described_class::Pipeline.new(multistore).pipelined do |p|
- p.get('a')
- p.set('b', 1)
+ multistore.with_borrowed_connection do
+ described_class::Pipeline.new(multistore).pipelined do |p|
+ p.get('a')
+ p.set('b', 1)
+ end
end
end
).to eq(%w[1 OK])
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index 6b1c0fb2e81..125bfec990c 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -10,11 +10,15 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:secondary_db) { 2 }
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
+ let_it_be(:primary_pool) { ConnectionPool.new { primary_store } }
+ let_it_be(:secondary_pool) { ConnectionPool.new { secondary_store } }
let_it_be(:instance_name) { 'TestStore' }
- let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let_it_be(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
subject do
- multi_store.send(name, *args)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, *args)
+ end
end
before do
@@ -23,12 +27,12 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
after(:all) do
- primary_store.flushdb
- secondary_store.flushdb
+ primary_store.with(&:flushdb)
+ secondary_store.with(&:flushdb)
end
context 'when primary_store is nil' do
- let(:multi_store) { described_class.new(nil, secondary_store, instance_name) }
+ let(:multi_store) { described_class.new(nil, secondary_pool, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
@@ -36,7 +40,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
context 'when secondary_store is nil' do
- let(:multi_store) { described_class.new(primary_store, nil, instance_name) }
+ let(:multi_store) { described_class.new(primary_pool, nil, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
@@ -45,7 +49,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when instance_name is nil' do
let(:instance_name) { nil }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
@@ -58,7 +62,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }
+ expect { described_class.new(primary_pool, secondary_pool, instance_name) }
.to raise_error(ArgumentError, /invalid primary_store/)
end
end
@@ -69,7 +73,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }
+ expect { described_class.new(primary_pool, secondary_pool, instance_name) }
.to raise_error(ArgumentError, /invalid secondary_store/)
end
end
@@ -77,7 +81,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
# rubocop:disable RSpec/MultipleMemoizedHelpers
context 'with READ redis commands' do
subject do
- multi_store.send(name, *args, **kwargs)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, *args, **kwargs)
+ end
end
let(:args) { 'args' }
@@ -117,7 +123,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when reading from default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
@@ -132,8 +140,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args, **kwargs)
+ multi_store.with_borrowed_connection do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args, **kwargs)
+ end
end
end
@@ -153,7 +163,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when block provided' do
subject do
- multi_store.send(name, expected_args) { nil }
+ multi_store.with_borrowed_connection do
+ multi_store.send(name, expected_args) { nil }
+ end
end
it 'only default store to execute' do
@@ -167,7 +179,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'with both primary and secondary store using same redis instance' do
let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
+ let(:primary_pool) { ConnectionPool.new { primary_store } }
+ let(:secondary_pool) { ConnectionPool.new { secondary_store } }
+ let(:multi_store) { described_class.new(primary_pool, secondary_pool, instance_name) }
it_behaves_like 'secondary store'
end
@@ -219,8 +233,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.mget(values) do |v|
- multi_store.sadd(skey, v)
+ multi_store.with_borrowed_connection do
+ multi_store.mget(values) do |v|
+ multi_store.sadd(skey, v)
+ end
end
end
@@ -335,19 +351,27 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).with(*args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).with(*args).and_raise(StandardError)
+ end
+
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'raises error and does not execute on non default instance', :aggregate_failures do
- expect(multi_store.non_default_store).not_to receive(name).with(*args)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(name).with(*args)
+ end
+
expect { subject }.to raise_error(StandardError)
end
end
context 'when executing on the non default instance is raising an exception' do
before do
- allow(multi_store.non_default_store).to receive(name).with(*args).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.non_default_store).to receive(name).with(*args).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
@@ -355,7 +379,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(:multi_store_error_message,
command_name: name, instance_name: instance_name))
- expect(multi_store.default_store).to receive(name).with(*args)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(name).with(*args)
+ end
subject
end
@@ -363,8 +389,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined do |pipeline|
- pipeline.send(name, *args)
+ multi_store.with_borrowed_connection do
+ multi_store.pipelined do |pipeline|
+ pipeline.send(name, *args)
+ end
end
end
@@ -390,7 +418,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
it "#{store} redis store contains correct values", :aggregate_failures do
subject
- redis_store = multi_store.send(store)
+ redis_store = multi_store.with_borrowed_connection { multi_store.send(store) }
if expected_value.is_a?(Array)
# :smembers does not guarantee the order it will return the values
@@ -425,8 +453,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.send(name) do |redis|
- redis.set(key1, value1)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name) do |redis|
+ redis.set(key1, value1)
+ end
end
end
@@ -444,11 +474,15 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the default instance is raising an exception' do
before do
- allow(multi_store.default_store).to receive(name).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(name).and_raise(StandardError)
+ end
end
it 'raises error and does not execute on non default instance', :aggregate_failures do
- expect(multi_store.non_default_store).not_to receive(name)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(name)
+ end
expect { subject }.to raise_error(StandardError)
end
@@ -456,14 +490,18 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when executing on the non default instance is raising an exception' do
before do
- allow(multi_store.non_default_store).to receive(name).and_raise(StandardError)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.non_default_store).to receive(name).and_raise(StandardError)
+ end
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
it 'logs the exception and execute on default instance', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
hash_including(:multi_store_error_message, command_name: name))
- expect(multi_store.default_store).to receive(name).and_call_original
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(name).and_call_original
+ end
subject
end
@@ -481,8 +519,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
subject do
- multi_store.send(name) do |redis|
- redis.get(key1)
+ multi_store.with_borrowed_connection do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
+ end
end
end
@@ -501,8 +541,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the value exists on both but differ' do
before do
- multi_store.non_default_store.set(key1, value1)
- multi_store.default_store.set(key1, value2)
+ multi_store.with_borrowed_connection do
+ multi_store.non_default_store.set(key1, value1)
+ multi_store.default_store.set(key1, value2)
+ end
end
it 'returns the value from the secondary store, logging an error' do
@@ -522,7 +564,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the value does not exist on the non-default store but it does on the default' do
before do
- multi_store.default_store.set(key1, value2)
+ multi_store.with_borrowed_connection { multi_store.default_store.set(key1, value2) }
end
it 'returns the value from the secondary store, logging an error' do
@@ -584,18 +626,22 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
before do
allow(client).to receive(:instance_of?).with(::Redis::Cluster).and_return(true)
allow(pipeline).to receive(:pipelined)
- allow(multi_store.default_store).to receive(:_client).and_return(client)
+ multi_store.with_borrowed_connection do
+ allow(multi_store.default_store).to receive(:_client).and_return(client)
+ end
end
it 'calls cross-slot pipeline within multistore' do
if name == :pipelined
# we intentionally exclude `.and_call_original` since primary_store/secondary_store
# may not be running on a proper Redis Cluster.
- expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new)
- .with(multi_store.default_store)
- .exactly(:once)
- .and_return(pipeline)
- expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(multi_store.non_default_store)
+ multi_store.with_borrowed_connection do
+ expect(Gitlab::Redis::CrossSlot::Pipeline).to receive(:new)
+ .with(multi_store.default_store)
+ .exactly(:once)
+ .and_return(pipeline)
+ expect(Gitlab::Redis::CrossSlot::Pipeline).not_to receive(:new).with(multi_store.non_default_store)
+ end
end
subject
@@ -637,7 +683,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
describe '#ping' do
- subject { multi_store.ping }
+ subject { multi_store.with_borrowed_connection { multi_store.ping } }
context 'when using both stores' do
before do
@@ -652,7 +698,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'with message' do
it 'returns the same message' do
- expect(multi_store.ping('hello world')).to eq('hello world')
+ expect(multi_store.with_borrowed_connection { multi_store.ping('hello world') }).to eq('hello world')
end
end
@@ -757,11 +803,13 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
describe '#blpop' do
let_it_be(:key) { "mylist" }
- subject { multi_store.blpop(key, timeout: 0.1) }
+ subject { multi_store.with_borrowed_connection { multi_store.blpop(key, timeout: 0.1) } }
shared_examples 'calls blpop on default_store' do
it 'calls blpop on default_store' do
- expect(multi_store.default_store).to receive(:blpop).with(key, { timeout: 0.1 })
+ multi_store.with_borrowed_connection do
+ expect(multi_store.default_store).to receive(:blpop).with(key, { timeout: 0.1 })
+ end
subject
end
@@ -769,7 +817,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
shared_examples 'does not call lpop on non_default_store' do
it 'does not call blpop on non_default_store' do
- expect(multi_store.non_default_store).not_to receive(:blpop)
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).not_to receive(:blpop)
+ end
subject
end
@@ -784,11 +834,13 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context "when an element exists in the default_store" do
before do
- multi_store.default_store.lpush(key, 'abc')
+ multi_store.with_borrowed_connection { multi_store.default_store.lpush(key, 'abc') }
end
it 'calls lpop on non_default_store' do
- expect(multi_store.non_default_store).to receive(:blpop).with(key, { timeout: 1 })
+ multi_store.with_borrowed_connection do
+ expect(multi_store.non_default_store).to receive(:blpop).with(key, { timeout: 1 })
+ end
subject
end
@@ -818,7 +870,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
end
- subject { multi_store.command }
+ subject { multi_store.with_borrowed_connection { multi_store.command } }
context 'when in test environment' do
it 'raises error' do
@@ -868,7 +920,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- subject { multi_store.info }
+ subject { multi_store.with_borrowed_connection { multi_store.info } }
it 'does not log MethodMissingError' do
expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
@@ -907,7 +959,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
- multi_store.pipelined(&:command)
+ multi_store.with_borrowed_connection { multi_store.pipelined(&:command) }
end
it 'is executed only 1 time on each instance', :aggregate_failures do
@@ -927,7 +979,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
describe '#to_s' do
- subject { multi_store.to_s }
+ subject { multi_store.with_borrowed_connection { multi_store.to_s } }
it 'returns same value as primary_store' do
is_expected.to eq(primary_store.to_s)
@@ -936,13 +988,17 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
describe '#is_a?' do
it 'returns true for ::Redis::Store' do
- expect(multi_store.is_a?(::Redis::Store)).to be true
+ expect(multi_store.with_borrowed_connection { multi_store.is_a?(::Redis::Store) }).to be true
end
end
describe '#use_primary_and_secondary_stores?' do
+ subject(:use_both) do
+ multi_store.with_borrowed_connection { multi_store.use_primary_and_secondary_stores? }
+ end
+
it 'multi store is enabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be true
+ expect(use_both).to be true
end
context 'with empty DB' do
@@ -951,7 +1007,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(use_both).to be false
end
end
@@ -961,14 +1017,18 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(use_both).to be false
end
end
end
describe '#use_primary_store_as_default?' do
+ subject(:primary_default) do
+ multi_store.with_borrowed_connection { multi_store.use_primary_store_as_default? }
+ end
+
it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be true
+ expect(primary_default).to be true
end
context 'with empty DB' do
@@ -977,7 +1037,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(primary_default).to be false
end
end
@@ -987,7 +1047,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
+ expect(primary_default).to be false
end
end
end
@@ -1003,7 +1063,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
it 'publishes to one or more stores' do
expect(stores).to all(receive(:publish))
- multi_store.publish(channel_name, message)
+ multi_store.with_borrowed_connection { multi_store.publish(channel_name, message) }
end
end
@@ -1012,14 +1072,14 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(default_store).to receive(:subscribe)
expect(non_default_store).not_to receive(:subscribe)
- multi_store.subscribe(channel_name)
+ multi_store.with_borrowed_connection { multi_store.subscribe(channel_name) }
end
it 'unsubscribes to the default store' do
expect(default_store).to receive(:unsubscribe)
expect(non_default_store).not_to receive(:unsubscribe)
- multi_store.unsubscribe
+ multi_store.with_borrowed_connection { multi_store.unsubscribe }
end
end
@@ -1106,4 +1166,30 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
expect(duplicated_commands).to be_empty, "commands #{duplicated_commands} defined more than once"
end
end
+
+ describe '.with_borrowed_connection' do
+ it 'permits nested borrows' do
+ multi_store.with_borrowed_connection do
+ expect(Thread.current[multi_store.borrow_counter]).to eq(1)
+
+ multi_store.with_borrowed_connection do
+ multi_store.ping
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(2)
+ expect(multi_store.primary_store).not_to eq(nil)
+ expect(multi_store.secondary_store).not_to eq(nil)
+ end
+
+ multi_store.ping
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(1)
+ expect(multi_store.primary_store).not_to eq(nil)
+ expect(multi_store.secondary_store).not_to eq(nil)
+ end
+
+ expect(Thread.current[multi_store.borrow_counter]).to eq(0)
+ expect(multi_store.primary_store).to eq(nil)
+ expect(multi_store.secondary_store).to eq(nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb
index b02864cb73d..874822e3e6a 100644
--- a/spec/lib/gitlab/redis/sessions_spec.rb
+++ b/spec/lib/gitlab/redis/sessions_spec.rb
@@ -5,27 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Redis::Sessions do
it_behaves_like "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
- describe 'redis instance used in connection pool' do
- around do |example|
- clear_pool
- example.run
- ensure
- clear_pool
- end
-
- it 'uses ::Redis instance' do
- described_class.pool.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Redis)
- end
- end
-
- def clear_pool
- described_class.remove_instance_variable(:@pool)
- rescue NameError
- # raised if @pool was not set; ignore
- end
- end
-
describe '#store' do
subject(:store) { described_class.store(namespace: described_class::SESSION_NAMESPACE) }
diff --git a/spec/lib/gitlab/redis/shared_state_spec.rb b/spec/lib/gitlab/redis/shared_state_spec.rb
index a5247903d50..fe8f42cf8eb 100644
--- a/spec/lib/gitlab/redis/shared_state_spec.rb
+++ b/spec/lib/gitlab/redis/shared_state_spec.rb
@@ -6,4 +6,5 @@ RSpec.describe Gitlab::Redis::SharedState do
let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
include_examples "redis_shared_examples"
+ include_examples "multi_store_wrapper_shared_examples"
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 05bcdf2fc96..bd5914c9df8 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -127,10 +127,10 @@ RSpec.describe Gitlab::Runtime, feature_category: :cloud_connector do
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
+ allow(sidekiq_type).to receive(:default_configuration).and_return({ concurrency: 2 })
end
- it_behaves_like "valid runtime", :sidekiq, 5
+ it_behaves_like "valid runtime", :sidekiq, 2
it 'identifies as an application runtime' do
expect(described_class.application?).to be true
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
index 9151db3c5ff..491be85584b 100644
--- a/spec/lib/gitlab/security/scan_configuration_spec.rb
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -86,4 +86,105 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
it { is_expected.to be_falsey }
end
end
+
+ describe '#security_features' do
+ subject { scan.security_features }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:scan_type, :features_hash) do
+ :sast | { name: "Static Application Security Testing (SAST)",
+ short_name: "SAST",
+ description: "Analyze your source code for known vulnerabilities.",
+ help_path: "/help/user/application_security/sast/index",
+ configuration_help_path: "/help/user/application_security/sast/index#configuration",
+ type: "sast" }
+ :sast_iac | { name: "Infrastructure as Code (IaC) Scanning",
+ short_name: "SAST IaC",
+ description: "Analyze your infrastructure as code configuration files for known vulnerabilities.",
+ help_path: "/help/user/application_security/iac_scanning/index",
+ configuration_help_path: "/help/user/application_security/iac_scanning/index#configuration",
+ type: "sast_iac" }
+ :dast | {
+ badge: { text: "Available on demand",
+ tooltip_text: "On-demand scans run outside of the DevOps " \
+ "cycle and find vulnerabilities in your projects",
+ variant: "info" },
+ secondary: {
+ type: "dast_profiles",
+ name: "DAST profiles",
+ description: "Manage profiles for use by DAST scans.",
+ configuration_text: "Manage profiles"
+ },
+ name: "Dynamic Application Security Testing (DAST)",
+ short_name: "DAST",
+ description: "Analyze a deployed version of your web application for known " \
+ "vulnerabilities by examining it from the outside in. DAST works by simulating " \
+ "external attacks on your application while it is running.",
+ help_path: "/help/user/application_security/dast/index",
+ configuration_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
+ type: "dast",
+ anchor: "dast"
+ }
+ :dependency_scanning | { name: "Dependency Scanning",
+ description: "Analyze your dependencies for known vulnerabilities.",
+ help_path: "/help/user/application_security/dependency_scanning/index",
+ configuration_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
+ type: "dependency_scanning",
+ anchor: "dependency-scanning" }
+ :container_scanning | { name: "Container Scanning",
+ description: "Check your Docker images for known vulnerabilities.",
+ help_path: "/help/user/application_security/container_scanning/index",
+ configuration_help_path: "/help/user/application_security/container_scanning/index#configuration",
+ type: "container_scanning" }
+ :secret_detection | { name: "Secret Detection",
+ description: "Analyze your source code and git history for secrets.",
+ help_path: "/help/user/application_security/secret_detection/index",
+ configuration_help_path: "/help/user/application_security/secret_detection/index#configuration",
+ type: "secret_detection" }
+ :api_fuzzing | { name: "API Fuzzing",
+ description: "Find bugs in your code with API fuzzing.",
+ help_path: "/help/user/application_security/api_fuzzing/index",
+ type: "api_fuzzing" }
+ :coverage_fuzzing | { name: "Coverage Fuzzing",
+ description: "Find bugs in your code with coverage-guided fuzzing.",
+ help_path: "/help/user/application_security/coverage_fuzzing/index",
+ configuration_help_path: \
+ "/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
+ type: "coverage_fuzzing",
+ secondary: { type: "corpus_management",
+ name: "Corpus Management",
+ description: "Manage corpus files used as " \
+ "seed inputs with coverage-guided fuzzing.",
+ configuration_text: "Manage corpus" } }
+ :breach_and_attack_simulation | { anchor: "bas",
+ badge: { always_display: true,
+ text: "Incubating feature",
+ tooltip_text: "Breach and Attack Simulation is an incubating feature " \
+ "extending existing security " \
+ "testing by simulating adversary activity.",
+ variant: "info" },
+ description: "Simulate breach and attack scenarios against your running " \
+ "application by attempting to detect " \
+ "and exploit known vulnerabilities.",
+ name: "Breach and Attack Simulation (BAS)",
+ help_path: "/help/user/application_security/breach_and_attack_simulation/index",
+ secondary: { configuration_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
+ "index#extend-dynamic-application-security-testing-dast",
+ description: "Enable incubating Breach and " \
+ "Attack Simulation focused features " \
+ "such as callback attacks in your DAST scans.",
+ name: "Out-of-Band Application Security Testing (OAST)" },
+ short_name: "BAS",
+ type: "breach_and_attack_simulation" }
+ :invalid | {}
+ end
+
+ with_them do
+ let(:type) { scan_type }
+ let(:configured) { true }
+
+ it { is_expected.to eq features_hash }
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 5885151ecb5..f741fd8fae9 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -186,7 +186,8 @@ RSpec.describe Gitlab::SidekiqConfig do
allow(::Gitlab::SidekiqConfig::WorkerRouter)
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
- allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
+ allow(Sidekiq).to receive_message_chain(:default_configuration, :queues)
+ .and_return(%w[default background_migration])
mappings = described_class.current_worker_queue_mappings
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 2e07fa100e8..b1a8a9f4da3 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -492,7 +492,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'completed_at' => current_utc_time.to_i }
end
- subject { described_class.new }
+ subject { described_class.new(Sidekiq.logger) }
it 'update payload correctly' do
travel_to(current_utc_time) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 9cf9901007c..e1662903fa4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
end
it 'sets the concurrency metric' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
described_class.initialize_process_metrics
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index bf379d9cb0d..96d4042b1e6 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:migrator) { described_class.new(mappings) }
let(:set_after) do
- Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
+ Sidekiq.redis { |c| c.call("ZRANGE", set_name, 0, -1, "WITHSCORES") }
.map { |item, score| [Gitlab::Json.load(item), score] }
end
@@ -226,8 +226,9 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:logger) { nil }
def list_queues
- queues = Sidekiq.redis do |conn|
- conn.scan_each(match: "queue:*").to_a
+ queues = []
+ Sidekiq.redis do |conn|
+ conn.scan("MATCH", "queue:*") { |key| queues << key }
end
queues.uniq.map { |queue| queue.split(':', 2).last }
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 55e3885d257..ecdab2651a2 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
context 'when both multi-store feature flags are off' do
def with_redis(&block)
- Sidekiq.redis(&block)
+ Gitlab::Redis::Queues.with(&block)
end
before do
diff --git a/spec/lib/gitlab/ssh/commit_spec.rb b/spec/lib/gitlab/ssh/commit_spec.rb
index 3b53ed9d1db..0d2621a16c0 100644
--- a/spec/lib/gitlab/ssh/commit_spec.rb
+++ b/spec/lib/gitlab/ssh/commit_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
})
allow(Gitlab::Ssh::Signature).to receive(:new)
- .with(signature_text, signed_text, signer, commit.committer_email)
+ .with(signature_text, signed_text, signer, commit)
.and_return(verifier)
end
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
index cb0b1ff049c..608fa9d71d0 100644
--- a/spec/lib/gitlab/ssh/signature_spec.rb
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -8,7 +8,9 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
let_it_be(:public_key_text) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHZ8NHEnCIpC4mnot+BRxv6L+fq+TnN1CgsRrHWLmfwb' }
let_it_be_with_reload(:user) { create(:user, email: committer_email) }
let_it_be_with_reload(:key) { create(:key, usage_type: :signing, key: public_key_text, user: user) }
+ let_it_be_with_reload(:project) { create(:project, :repository, :in_group) }
+ let(:commit) { project.commit }
let(:signed_text) { 'This message was signed by an ssh key' }
let(:signer) { :SIGNER_USER }
@@ -24,12 +26,16 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
SIG
end
+ before do
+ allow(commit).to receive(:committer_email).and_return(committer_email)
+ end
+
subject(:signature) do
described_class.new(
signature_text,
signed_text,
signer,
- committer_email
+ commit
)
end
@@ -283,5 +289,30 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
it 'returns the pubkey sha256 fingerprint' do
expect(signature.key_fingerprint).to eq('dw7gPSvYtkCBU+BbTolbbckUEX3sL6NsGIJTQ4PYEnM')
end
+
+ context 'when a signature has been created with a certificate' do
+ let(:signature_text) do
+ # ssh-keygen -Y sign -n git -f id_test-cert.pub message.txt
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAAb0AAAAgc3NoLWVkMjU1MTktY2VydC12MDFAb3BlbnNzaC5jb20AAA
+ AgWbXlnjWbxTzOlRPcnSMlQQnnJTCsEv2y2ij5o7yVbcUAAAAgYAsBVqgfGrvGdSPjqY0H
+ t8yljpOS4VumZHnAh+wCvdEAAAAAAAAAAAAAAAEAAAARYWRtaW5AZXhhbXBsZS5jb20AAA
+ AAAAAAAGV9kqgAAAAAZX7kiwAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAA
+ AAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcn
+ dhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAAAAAAA
+ AAAAAAAzAAAAC3NzaC1lZDI1NTE5AAAAIINudhvW7P4c36bBwlWTaxnCCOaSfMrUbXHcP7
+ 7zH6LyAAAAUwAAAAtzc2gtZWQyNTUxOQAAAEBp9J9YQhaz+tNIKtNpZe5sAxcqvMgcYlB+
+ fVaDsYNOj445Bz7TBoFqjrs95yaF6pwARK11IEQTcwtrihLGzGkNAAAAA2dpdAAAAAAAAA
+ AGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUxOQAAAECfVh7AzwqRBMbnHBApCnMpu9Y1qpGM
+ sOSL1EeV3SIOlrThNTCerUpcaizcSY9L8WwP2TXlqw2Sq1BGM+PPSN0C
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it 'returns public key fingerprint' do
+ expect(signature.key_fingerprint).to eq('3dNIFKfIAXZb/JL30KKv95cps+mZwVAuAYQhIWxAb+8')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index a41f7d927fe..a5d42945101 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -47,18 +47,4 @@ RSpec.describe Gitlab::Themes, lib: true do
expect(ids).not_to be_empty
end
end
-
- describe 'theme.css_filename' do
- described_class.each do |theme|
- next unless theme.css_filename
-
- context "for #{theme.name}" do
- it 'returns an existing CSS filename' do
- css_file_path = Rails.root.join('app/assets/stylesheets/themes', theme.css_filename + '.scss')
-
- expect(File.exist?(css_file_path)).to eq(true)
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
deleted file mode 100644
index 5a5c7123971..00000000000
--- a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb
+++ /dev/null
@@ -1,140 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_stub_snowplow_by_default, feature_category: :application_instrumentation do
- let(:emitter) { SnowplowTracker::Emitter.new(endpoint: 'localhost', options: { buffer_size: 1 }) }
-
- let(:tracker) do
- SnowplowTracker::Tracker
- .new(
- emitters: [emitter],
- subject: SnowplowTracker::Subject.new,
- namespace: 'namespace',
- app_id: 'app_id'
- )
- end
-
- before do
- stub_application_setting(snowplow_app_id: '_abc123_')
- end
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'when snowplow is enabled' do
- before do
- allow(SnowplowTracker::AsyncEmitter)
- .to receive(:new)
- .with(endpoint: endpoint,
- options:
- {
- protocol: 'https',
- on_success: subject.method(:increment_successful_events_emissions),
- on_failure: subject.method(:failure_callback)
- }
- ).and_return(emitter)
-
- allow(SnowplowTracker::Tracker)
- .to receive(:new)
- .with(
- emitters: [emitter],
- subject: an_instance_of(SnowplowTracker::Subject),
- namespace: described_class::SNOWPLOW_NAMESPACE,
- app_id: '_abc123_'
- ).and_return(tracker)
- end
-
- describe '#event' do
- let(:endpoint) { 'localhost:9091' }
- let(:event_params) do
- {
- category: 'category',
- action: 'action',
- label: 'label',
- property: 'property',
- value: 1.5,
- context: nil,
- tstamp: (Time.now.to_f * 1000).to_i
- }
- end
-
- context 'when on gitlab.com environment' do
- let(:endpoint) { 'db-snowplow.trx.gitlab.net' }
-
- before do
- stub_application_setting(snowplow_database_collector_hostname: endpoint)
- end
-
- it 'sends event to tracker' do
- allow(Gitlab).to receive(:com?).and_return(true)
- allow(tracker).to receive(:track_struct_event).and_call_original
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
-
- expect(tracker).to have_received(:track_struct_event).with(event_params)
- end
- end
-
- it 'sends event to tracker' do
- allow(tracker).to receive(:track_struct_event).and_call_original
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
-
- expect(tracker).to have_received(:track_struct_event).with(event_params)
- end
-
- it 'increase total snowplow events counter' do
- counter = double
-
- expect(counter).to receive(:increment)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(:gitlab_db_events_snowplow_events_total, 'Number of Snowplow events')
- .and_return(counter)
-
- subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
- end
- end
- end
-
- context 'for callbacks' do
- describe 'on success' do
- it 'increase gitlab_successful_snowplow_events_total counter' do
- counter = double
-
- expect(counter).to receive(:increment).with({}, 2)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_successful_events_total,
- 'Number of successful Snowplow events emissions').and_return(counter)
-
- subject.method(:increment_successful_events_emissions).call(2)
- end
- end
-
- describe 'on failure' do
- it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do
- counter = double
- error_message = "Issue database_event_update failed to be reported to collector at localhost:9091"
- failures = [{ "e" => "se",
- "se_ca" => "Issue",
- "se_la" => "issues",
- "se_ac" => "database_event_update" }]
- allow(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_successful_events_total,
- 'Number of successful Snowplow events emissions').and_call_original
-
- expect(Gitlab::AppLogger).to receive(:error).with(error_message)
- expect(counter).to receive(:increment).with({}, 1)
- expect(Gitlab::Metrics).to receive(:counter)
- .with(
- :gitlab_db_events_snowplow_failed_events_total,
- 'Number of failed Snowplow events emissions').and_return(counter)
-
- subject.method(:failure_callback).call(2, failures)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
index 5e41c691da8..7c5047dc0c6 100644
--- a/spec/lib/gitlab/tracking/event_definition_spec.rb
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -31,10 +31,6 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
File.write(path, content)
end
- it 'has all definitions valid' do
- expect { described_class.definitions }.not_to raise_error
- end
-
it 'has no duplicated actions in InternalEventTracking events', :aggregate_failures do
definitions_by_action = described_class.definitions
.select { |d| d.category == 'InternalEventTracking' }
@@ -85,10 +81,8 @@ RSpec.describe Gitlab::Tracking::EventDefinition do
attributes[attribute] = value
end
- it 'raise exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Tracking::InvalidEventError))
-
- described_class.new(path, attributes).validate!
+ it 'has validation errors' do
+ expect(described_class.new(path, attributes).validation_errors).not_to be_empty
end
end
end
diff --git a/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb b/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb
new file mode 100644
index 00000000000..cc2ccc511bb
--- /dev/null
+++ b/spec/lib/gitlab/tracking/event_definition_validate_all_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::EventDefinition, feature_category: :product_analytics_data_management do
+ it 'only has valid event definitions', :aggregate_failures do
+ described_class.definitions.each do |definition|
+ validation_errors = definition.validation_errors
+ expect(validation_errors).to be_empty, validation_errors.join
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 46213532071..6e4ab00d04f 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -165,38 +165,6 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
end
- describe '.database_event' do
- context 'when the action is not passed in as a string' do
- it 'allows symbols' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', :some_action)
- end
-
- it 'allows nil' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', nil)
- end
-
- it 'allows integers' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
-
- described_class.database_event('category', 1)
- end
- end
-
- it_behaves_like 'rescued error raised by destination class' do
- let(:category) { 'Issue' }
- let(:action) { 'created' }
- let(:destination_class) { Gitlab::Tracking::Destinations::DatabaseEventsSnowplow }
-
- subject(:tracking_method) { described_class.database_event(category, action) }
- end
-
- it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :database_event
- end
-
describe '.event' do
context 'when the action is not passed in as a string' do
it 'allows symbols' do
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index fb46d48c1bb..caa114cb00f 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -33,6 +33,14 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
described_class.instance_variable_set(:@definitions, nil)
end
+ def expect_validation_errors
+ expect(described_class.new(path, attributes).validation_errors).not_to be_empty
+ end
+
+ def expect_no_validation_errors
+ expect(described_class.new(path, attributes).validation_errors).to be_empty
+ end
+
def write_metric(metric, path, content)
path = File.join(metric, path)
dir = File.dirname(path)
@@ -40,12 +48,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
File.write(path, content)
end
- it 'has only valid definitions' do
- described_class.all.each do |definition|
- expect { definition.validate! }.not_to raise_error
- end
- end
-
describe '.instrumentation_class' do
context 'for non internal events' do
let(:attributes) { { key_path: 'metric1', instrumentation_class: 'RedisHLLMetric', data_source: 'redis_hll' } }
@@ -197,10 +199,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[attribute] = value
end
- it 'raise exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ it 'has validation errors' do
+ expect_validation_errors
end
end
@@ -210,9 +210,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:status] = 'broken'
attributes.delete(:repair_issue_url)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
end
@@ -221,20 +219,16 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:status] = 'removed'
end
- it 'raise dev exception when removed_by_url is not provided' do
+ it 'has validation errors when removed_by_url is not provided' do
attributes.delete(:removed_by_url)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
- it 'raises dev exception when milestone_removed is not provided' do
+ it 'has validation errors when milestone_removed is not provided' do
attributes.delete(:milestone_removed)
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
-
- described_class.new(path, attributes).validate!
+ expect_validation_errors
end
end
@@ -251,18 +245,16 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
end
with_them do
- it 'raises dev exception when invalid' do
+ it 'has validation errors when invalid' do
attributes[:instrumentation_class] = instrumentation_class if instrumentation_class
attributes[:options] = options if options
attributes[:events] = events if events
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
@@ -294,12 +286,10 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:options] = options
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
@@ -340,12 +330,10 @@ RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :service_ping
attributes[:options] = options
if is_valid
- expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ expect_no_validation_errors
else
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Usage::MetricDefinition::InvalidError))
+ expect_validation_errors
end
-
- described_class.new(path, attributes).validate!
end
end
end
diff --git a/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb b/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb
new file mode 100644
index 00000000000..d6255b54068
--- /dev/null
+++ b/spec/lib/gitlab/usage/metric_definition_validate_all_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::MetricDefinition, feature_category: :product_analytics_data_management do
+ # rubocop:disable Rails/FindEach -- The all method invoked here is unrelated to the ActiveRecord scope all
+ it 'only has valid metric definitions', :aggregate_failures do
+ described_class.all.each do |definition|
+ validation_errors = definition.validation_errors
+ expect(validation_errors).to be_empty, validation_errors.join
+ end
+ end
+ # rubocop:enable Rails/FindEach
+end
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index a848c286fa9..09866198639 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -168,11 +168,6 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
stub_database_flavor_check('Cloud SQL for PostgreSQL')
-
- # in_product_marketing_email metrics values are extracted from a single group by query
- # to check if the queries for individual metrics return the same value as group by when the value is non-zero
- create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.current)
- create(:in_product_marketing_email, track: :verify, series: 0)
end
let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 2c9506dd498..05938fa08cd 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -50,18 +50,6 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter, feature_categ
end
end
- context 'with implicit includes', :snowplow do
- let(:config_source) { :auto_devops_source }
-
- described_class.all_included_templates('Auto-DevOps.gitlab-ci.yml').each do |template_name|
- context "for #{template_name}" do
- let(:template_path) { Gitlab::Template::GitlabCiYmlTemplate.find(template_name.delete_suffix('.gitlab-ci.yml')).full_name }
-
- include_examples 'tracks template'
- end
- end
- end
-
it 'expands short template names' do
expect do
described_class.track_unique_project_event(project: project, template: 'Dependency-Scanning.gitlab-ci.yml', config_source: :repository_source, user: user)
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index a7dc0b6a060..da8098bfee1 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -168,6 +168,54 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
end
+
+ describe "redis key overrides" do
+ let(:event_name) { "g_analytics_contribution" }
+
+ before do
+ allow(File).to receive(:read).and_call_original
+ allow(File).to receive(:read).with(described_class::KEY_OVERRIDES_PATH).and_return(overrides_file_content)
+ end
+
+ after do
+ described_class.clear_memoization(:key_overrides)
+ end
+
+ context "with an empty file" do
+ let(:overrides_file_content) { "{}" }
+
+ it "tracks the events using original Redis key" do
+ expected_key = "{hll_counters}_#{event_name}-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(event_name, values: entity1)
+ end
+ end
+
+ context "with the file including overrides" do
+ let(:overrides_file_content) { "#{event_name}1: new_key2\n#{event_name}: new_key" }
+
+ context "when the event is included in overrides file" do
+ it "tracks the events using overridden Redis key" do
+ expected_key = "{hll_counters}_new_key-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(:g_analytics_contribution, values: entity1)
+ end
+ end
+
+ context "when the event is not included in overrides file" do
+ let(:not_overridden_name) { "g_compliance_dashboard" }
+
+ it "tracks the events using original Redis key" do
+ expected_key = "{hll_counters}_#{not_overridden_name}-2020-23"
+ expect(Gitlab::Redis::HLL).to receive(:add).with(hash_including(key: expected_key))
+
+ described_class.track_event(not_overridden_name, values: entity1)
+ end
+ end
+ end
+ end
end
describe '.unique_events' do
@@ -236,6 +284,16 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
end
+
+ describe 'key overrides file' do
+ let(:key_overrides) { YAML.safe_load(File.read(described_class::KEY_OVERRIDES_PATH)) }
+
+ it "has a valid structure", :aggregate_failures do
+ expect(key_overrides).to be_a(Hash)
+
+ expect(key_overrides.keys + key_overrides.values).to all(be_a(String))
+ end
+ end
end
describe '.keys_for_aggregation' do
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index c3a718e669a..5c03ccb0d71 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -55,14 +55,6 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:merge_request) { create(:merge_request) }
let(:target_project) { merge_request.target_project }
- it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_USER_CREATE_ACTION }
- end
-
- it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_CREATE_ACTION }
- end
-
it_behaves_like 'internal event tracking' do
let(:event) { described_class::MR_USER_CREATE_ACTION }
let(:project) { target_project }
diff --git a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
index 1940442d2ad..903ae64cf33 100644
--- a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
@@ -31,6 +31,14 @@ RSpec.describe Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter, :cle
end
end
+ context 'when tracking react' do
+ let(:quickaction_name) { 'react' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_award' }
+ end
+ end
+
context 'tracking assigns' do
let(:quickaction_name) { 'assign' }
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index aaf509b6f81..0ab5dec8ecf 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -44,4 +44,12 @@ RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_
it_behaves_like 'work item unique counter'
end
+
+ describe '.track_work_item_todo_marked_action' do
+ subject(:track_event) { described_class.track_work_item_mark_todo_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_TODO_MARKED }
+
+ it_behaves_like 'work item unique counter'
+ end
end
diff --git a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
index 36fa350e46f..a258518953a 100644
--- a/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
+++ b/spec/lib/integrations/google_cloud_platform/artifact_registry/client_spec.rb
@@ -26,6 +26,10 @@ RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feat
describe '#list_docker_images' do
let(:page_token) { nil }
+ let(:expected_url) do
+ "#{described_class::GLGO_BASE_URL}/gcp/ar/projects/#{gcp_project_id}/" \
+ "locations/#{gcp_location}/repositories/#{gcp_repository}/docker"
+ end
subject(:list) { client.list_docker_images(page_token: page_token) }
@@ -36,6 +40,13 @@ RSpec.describe Integrations::GoogleCloudPlatform::ArtifactRegistry::Client, feat
it 'calls glgo list docker images API endpoint' do
stub_list_docker_image(body: dummy_list_body)
expect(client).to receive(:encoded_jwt).with(wlif: gcp_wlif)
+ expect(::Gitlab::HTTP).to receive(:get).with(
+ expected_url,
+ headers: an_instance_of(Hash),
+ query: an_instance_of(Hash),
+ format: :plain,
+ extra_allowed_uris: [URI(described_class::GLGO_BASE_URL)]
+ ).and_call_original
expect(list).to include(images: an_instance_of(Array), next_page_token: an_instance_of(String))
end
diff --git a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
index 999889a72ee..974531a2dec 100644
--- a/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/organizations/menus/scope_menu_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :navigation do
- let_it_be(:organization) { build(:organization) }
+ let_it_be(:organization_detail) { build(:organization_detail) }
+ let_it_be(:organization) { organization_detail.organization }
let_it_be(:user) { build(:user) }
let_it_be(:context) { Sidebars::Context.new(current_user: user, container: organization) }
@@ -11,7 +12,7 @@ RSpec.describe Sidebars::Organizations::Menus::ScopeMenu, feature_category: :nav
let(:menu) { described_class.new(context) }
let(:extra_attrs) do
{
- avatar: nil,
+ avatar: organization.avatar_url(size: 48),
entity_id: organization.id,
super_sidebar_parent: ::Sidebars::StaticMenu,
item_id: :organization_overview
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index 0cf95391a26..85c109615c8 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
before do
stub_container_registry_config(enabled: registry_enabled)
stub_config(packages: { enabled: packages_enabled })
- stub_feature_flags(ml_experiment_tracking: false)
+ stub_feature_flags(ml_experiment_tracking: false, model_registry: false)
end
context 'when Packages Registry is visible' do
@@ -195,5 +195,32 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu, feature_catego
end
end
end
+
+ describe 'Model registry' do
+ let(:item_id) { :model_registry }
+
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(user, :read_model_registry, project)
+ .and_return(model_registry_enabled)
+ end
+
+ context 'when user can read model registry' do
+ let(:model_registry_enabled) { true }
+
+ it 'shows the menu item' do
+ is_expected.not_to be_nil
+ end
+ end
+
+ context 'when user can not read model registry' do
+ let(:model_registry_enabled) { false }
+
+ it 'does not show the menu item' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
index 98d62948ac3..f23aaad71f9 100644
--- a/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
+++ b/spec/lib/sidebars/projects/super_sidebar_menus/deploy_menu_spec.rb
@@ -18,7 +18,8 @@ RSpec.describe Sidebars::Projects::SuperSidebarMenus::DeployMenu, feature_catego
:releases,
:feature_flags,
:packages_registry,
- :container_registry
+ :container_registry,
+ :model_registry
])
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 21878bc9b6d..34311a8ae22 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'email_spec'
-RSpec.describe Notify do
+RSpec.describe Notify, feature_category: :code_review_workflow do
include EmailSpec::Helpers
include EmailSpec::Matchers
include EmailHelpers
@@ -20,7 +20,10 @@ RSpec.describe Notify do
let_it_be(:assignee, reload: true) { create(:user, email: 'assignee@example.com', name: 'John Doe') }
let_it_be(:reviewer, reload: true) { create(:user, email: 'reviewer@example.com', name: 'Jane Doe') }
- let_it_be(:merge_request) do
+ let(:previous_assignee1) { create(:user, name: 'Previous Assignee 1') }
+ let(:previous_assignee_ids) { [previous_assignee1.id] }
+
+ let_it_be(:merge_request, reload: true) do
create(
:merge_request,
source_project: project,
@@ -84,11 +87,43 @@ RSpec.describe Notify do
end
end
+ shared_examples 'an assignee email with previous assignees' do
+ context 'when all assignees are removed' do
+ before do
+ resource.update!(assignees: [])
+ end
+
+ it_behaves_like 'email with default notification reason'
+
+ it 'uses fixed copy "All assignees were removed"' do
+ is_expected.to have_body_text("<p> All assignees were removed. </p>")
+ is_expected.to have_plain_text_content("All assignees were removed.")
+ end
+ end
+
+ context 'with multiple previous assignees' do
+ let(:previous_assignee2) { create(:user, name: 'Previous Assignee 2') }
+ let(:previous_assignee_ids) { [previous_assignee1.id, previous_assignee2.id] }
+
+ it_behaves_like 'email with default notification reason'
+
+ it 'has the correct subject and body' do
+ aggregate_failures do
+ is_expected.to have_referable_subject(resource, reply: true)
+ is_expected.to have_body_text("<p> <strong>#{assignee.name}</strong> was added as an assignee. </p> <p> <strong>#{previous_assignee1.name} and #{previous_assignee2.name}</strong> were removed as assignees. </p>")
+ is_expected.to have_plain_text_content("#{assignee.name} was added as an assignee.")
+ is_expected.to have_plain_text_content("#{previous_assignee1.name} and #{previous_assignee2.name} were removed as assignees.")
+ end
+ end
+ end
+ end
+
context 'for issues', feature_category: :team_planning do
describe 'that are new' do
subject { described_class.new_issue_email(issue.assignees.first.id, issue.id) }
it_behaves_like 'an assignee email'
+
it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
let(:model) { issue }
end
@@ -142,9 +177,7 @@ RSpec.describe Notify do
end
describe 'that are reassigned' do
- let(:previous_assignee) { create(:user, name: 'Previous Assignee') }
-
- subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id) }
+ subject { described_class.reassigned_issue_email(recipient.id, issue.id, previous_assignee_ids, current_user.id) }
it_behaves_like 'a multiple recipients email'
it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
@@ -165,23 +198,14 @@ RSpec.describe Notify do
it 'has the correct subject and body' do
aggregate_failures do
is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_body_text("Assignee changed from <strong>#{previous_assignee.name}</strong> to <strong>#{assignee.name}</strong>")
- is_expected.to have_plain_text_content("Assignee changed from #{previous_assignee.name} to #{assignee.name}")
+ is_expected.to have_body_text("<p> <strong>#{assignee.name}</strong> was added as an assignee. </p> <p> <strong>#{previous_assignee1.name}</strong> was removed as an assignee. </p>")
+ is_expected.to have_plain_text_content("#{assignee.name} was added as an assignee.")
+ is_expected.to have_plain_text_content("#{previous_assignee1.name} was removed as an assignee.")
end
end
- context 'without new assignee' do
- before do
- issue.update!(assignees: [])
- end
-
- it_behaves_like 'email with default notification reason'
- it_behaves_like 'email with link to issue'
-
- it 'uses "Unassigned" placeholder' do
- is_expected.to have_body_text("Assignee changed from <strong>#{previous_assignee.name}</strong> to <strong>Unassigned</strong>")
- is_expected.to have_plain_text_content("Assignee changed from #{previous_assignee.name} to Unassigned")
- end
+ it_behaves_like 'an assignee email with previous assignees' do
+ let(:resource) { issue }
end
context 'without previous assignees' do
@@ -190,14 +214,14 @@ RSpec.describe Notify do
it_behaves_like 'email with default notification reason'
it_behaves_like 'email with link to issue'
- it 'uses short text' do
- is_expected.to have_body_text("Assignee changed to <strong>#{assignee.name}</strong>")
- is_expected.to have_plain_text_content("Assignee changed to #{assignee.name}")
+ it 'does not mention any previous assignees' do
+ is_expected.to have_body_text("<p> <strong>#{assignee.name}</strong> was added as an assignee. </p>")
+ is_expected.to have_plain_text_content("#{assignee.name} was added as an assignee.")
end
end
context 'when sent with a reason' do
- subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id, NotificationReason::ASSIGNED) }
+ subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee1.id], current_user.id, NotificationReason::ASSIGNED) }
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
@@ -211,10 +235,10 @@ RSpec.describe Notify do
let(:email_obj) { create(:email, :confirmed, user_id: recipient.id, email: '123@abc') }
let(:recipient) { create(:user, preferred_language: :zh_CN) }
- it 'is translated into zh_CN' do
+ it 'is sent with html lang attribute set to the user\'s preferred language' do
recipient.notification_email = email_obj.email
recipient.save!
- is_expected.to have_body_text '指派人从 <strong>Previous Assignee</strong> 更改为 <strong>John Doe</strong>'
+ is_expected.to have_body_text '<html lang="zh-CN">'
end
end
end
@@ -450,6 +474,7 @@ RSpec.describe Notify do
subject { described_class.new_merge_request_email(merge_request.assignee_ids.first, merge_request.id) }
it_behaves_like 'an assignee email'
+
it_behaves_like 'an email starting a new thread with reply-by-email enabled' do
let(:model) { merge_request }
end
@@ -495,9 +520,7 @@ RSpec.describe Notify do
end
describe 'that are reassigned' do
- let(:previous_assignee) { create(:user, name: 'Previous Assignee') }
-
- subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id) }
+ subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee_ids, current_user.id) }
it_behaves_like 'a multiple recipients email'
it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
@@ -509,6 +532,10 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
+ it_behaves_like 'an assignee email with previous assignees' do
+ let(:resource) { merge_request }
+ end
+
it 'is sent as the author' do
expect_sender(current_user)
end
@@ -516,14 +543,14 @@ RSpec.describe Notify do
it 'has the correct subject and body' do
aggregate_failures do
is_expected.to have_referable_subject(merge_request, reply: true)
- is_expected.to have_body_text(previous_assignee.name)
+ is_expected.to have_body_text(previous_assignee1.name)
is_expected.to have_body_text(project_merge_request_path(project, merge_request))
is_expected.to have_body_text(assignee.name)
end
end
context 'when sent with a reason', type: :helper do
- subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, NotificationReason::ASSIGNED) }
+ subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee1.id], current_user.id, NotificationReason::ASSIGNED) }
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
@@ -536,11 +563,11 @@ RSpec.describe Notify do
text = EmailsHelper.instance_method(:notification_reason_text).bind_call(self, reason: NotificationReason::ASSIGNED, format: :html)
is_expected.to have_body_text(text)
- new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, NotificationReason::MENTIONED)
+ new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee1.id], current_user.id, NotificationReason::MENTIONED)
text = EmailsHelper.instance_method(:notification_reason_text).bind_call(self, reason: NotificationReason::MENTIONED, format: :html)
expect(new_subject).to have_body_text(text)
- new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee.id], current_user.id, nil)
+ new_subject = described_class.reassigned_merge_request_email(recipient.id, merge_request.id, [previous_assignee1.id], current_user.id, nil)
text = EmailsHelper.instance_method(:notification_reason_text).bind_call(self, format: :html)
expect(new_subject).to have_body_text(text)
end
@@ -2434,23 +2461,27 @@ RSpec.describe Notify do
end
it 'avoids N+1 cached queries when rendering html', :use_sql_query_cache, :request_store do
- control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
subject.html_part
end
create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request)
- expect { described_class.new_review_email(recipient.id, review.id).html_part }.not_to exceed_all_query_limit(control_count)
+ expect do
+ described_class.new_review_email(recipient.id, review.id).html_part
+ end.not_to exceed_all_query_limit(control)
end
it 'avoids N+1 cached queries when rendering text', :use_sql_query_cache, :request_store do
- control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
subject.text_part
end
create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request)
- expect { described_class.new_review_email(recipient.id, review.id).text_part }.not_to exceed_all_query_limit(control_count)
+ expect do
+ described_class.new_review_email(recipient.id, review.id).text_part
+ end.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb b/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb
index 7161ca35edd..a459ef96737 100644
--- a/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb
+++ b/spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb
@@ -4,5 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe AddNotificationsWorkItemWidget, :migration, feature_category: :team_planning do
- it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Notifications'
+ it_behaves_like 'migration that adds widget to work items definitions',
+ widget_name: described_class::WIDGET_NAME,
+ work_item_types: described_class::WORK_ITEM_TYPES
end
diff --git a/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb b/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb
index 1df80a519f2..c202638aa27 100644
--- a/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb
+++ b/spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb
@@ -4,5 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe AddCurrentUserTodosWorkItemWidget, :migration, feature_category: :team_planning do
- it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Current user todos'
+ it_behaves_like 'migration that adds widget to work items definitions',
+ widget_name: described_class::WIDGET_NAME,
+ work_item_types: described_class::WORK_ITEM_TYPES
end
diff --git a/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb b/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb
index 16a205c5da5..7987f0257ee 100644
--- a/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb
+++ b/spec/migrations/20230323101138_add_award_emoji_work_item_widget_spec.rb
@@ -4,5 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe AddAwardEmojiWorkItemWidget, :migration, feature_category: :team_planning do
- it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Award emoji'
+ it_behaves_like 'migration that adds widget to work items definitions',
+ widget_name: described_class::WIDGET_NAME,
+ work_item_types: described_class::WORK_ITEM_TYPES
end
diff --git a/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb b/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb
index cd6da15403f..eae4910eef3 100644
--- a/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb
+++ b/spec/migrations/20230807083334_add_linked_items_work_item_widget_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe AddLinkedItemsWorkItemWidget, :migration, feature_category: :portfolio_management do
- it_behaves_like 'migration that adds widget to work items definitions', widget_name: 'Linked items' do
- let(:work_item_type_count) { 8 }
- end
+ it_behaves_like 'migration that adds widget to work items definitions',
+ widget_name: described_class::WIDGET_NAME,
+ work_item_types: described_class::WORK_ITEM_TYPES
end
diff --git a/spec/migrations/20231207145335_cleanup_group_level_work_items_spec.rb b/spec/migrations/20231207145335_cleanup_group_level_work_items_spec.rb
new file mode 100644
index 00000000000..a6b547b091a
--- /dev/null
+++ b/spec/migrations/20231207145335_cleanup_group_level_work_items_spec.rb
@@ -0,0 +1,216 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupGroupLevelWorkItems, feature_category: :team_planning do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:users) { table(:users) }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:issues) { table(:issues) }
+ let(:notes) { table(:notes) }
+ let(:labels) { table(:labels) }
+ let(:label_links) { table(:label_links) }
+ let(:todos) { table(:todos) }
+ let(:work_item_types) { table(:work_item_types) }
+
+ let!(:user) { users.create!(name: 'Test User', email: 'test@example.com', projects_limit: 5) }
+
+ let!(:group1) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
+ let!(:group2) { namespaces.create!(name: 'group2', path: 'group2', type: 'Group') }
+ let!(:project_namespace) { namespaces.create!(name: 'project1', path: 'project1', type: 'Project') }
+ let!(:project) do
+ projects.create!(
+ name: 'project1', path: 'project1', namespace_id: group1.id, project_namespace_id: project_namespace.id
+ )
+ end
+
+ let!(:issue_type) do
+ ensure_work_item_type_exists
+ work_item_types.first
+ end
+
+ let!(:group1_issue1) { issues.create!(title: 'Issue1-1', namespace_id: group1.id, work_item_type_id: issue_type.id) }
+ let!(:group1_issue2) { issues.create!(title: 'Issue1-2', namespace_id: group1.id, work_item_type_id: issue_type.id) }
+ let!(:group2_issue1) { issues.create!(title: 'Issue2-1', namespace_id: group2.id, work_item_type_id: issue_type.id) }
+ let!(:group2_issue2) { issues.create!(title: 'Issue2-2', namespace_id: group2.id, work_item_type_id: issue_type.id) }
+ let!(:project_issue) do
+ issues.create!(
+ title: 'Issue2', project_id: project.id, namespace_id: project_namespace.id, work_item_type_id: issue_type.id
+ )
+ end
+
+ # associated labels
+ let!(:label1) { labels.create!(title: 'label1', group_id: group1.id) }
+ let!(:label2) { labels.create!(title: 'label2', group_id: group2.id) }
+
+ after(:all) do
+ # Make sure base types are recreated after running the migration
+ # because migration specs are not run in a transaction
+ reset_work_item_types
+ end
+
+ describe '#up' do
+ before do
+ # stub batch to make sure we are also testing the batching deletion
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ # Project label_link that must not be deleted
+ label_links.create!(label_id: label1.id, target_id: project_issue.id, target_type: 'Issue')
+
+ label_links.create!(label_id: label1.id, target_id: group1_issue1.id, target_type: 'Issue')
+ label_links.create!(label_id: label2.id, target_id: group1_issue1.id, target_type: 'Issue')
+ label_links.create!(label_id: label1.id, target_id: group1_issue2.id, target_type: 'Issue')
+ label_links.create!(label_id: label2.id, target_id: group1_issue2.id, target_type: 'Issue')
+ label_links.create!(label_id: label1.id, target_id: group2_issue1.id, target_type: 'Issue')
+ label_links.create!(label_id: label2.id, target_id: group2_issue1.id, target_type: 'Issue')
+ label_links.create!(label_id: label1.id, target_id: group2_issue2.id, target_type: 'Issue')
+ label_links.create!(label_id: label2.id, target_id: group2_issue2.id, target_type: 'Issue')
+
+ # associated notes
+
+ # Project issue note that must not be deleted
+ notes.create!(
+ noteable_id: project_issue.id,
+ noteable_type: 'Issue',
+ project_id: project.id,
+ namespace_id: project_namespace.id,
+ note: "project issue 1 note 1"
+ )
+
+ notes.create!(
+ noteable_id: group1_issue1.id, noteable_type: 'Issue', namespace_id: group1.id, note: "group1 issue 1 note 1"
+ )
+ notes.create!(
+ noteable_id: group1_issue1.id, noteable_type: 'Issue', namespace_id: group1.id, note: "group1 issue 1 note 2"
+ )
+ notes.create!(
+ noteable_id: group1_issue2.id, noteable_type: 'Issue', namespace_id: group1.id, note: "group1 issue 2 note 1"
+ )
+ notes.create!(
+ noteable_id: group1_issue2.id, noteable_type: 'Issue', namespace_id: group1.id, note: "group1 issue 2 note 2"
+ )
+ notes.create!(
+ noteable_id: group2_issue1.id, noteable_type: 'Issue', namespace_id: group2.id, note: "group2 issue 1 note 1"
+ )
+ notes.create!(
+ noteable_id: group2_issue1.id, noteable_type: 'Issue', namespace_id: group2.id, note: "group2 issue 1 note 2"
+ )
+ notes.create!(
+ noteable_id: group2_issue2.id, noteable_type: 'Issue', namespace_id: group2.id, note: "group2 issue 2 note 1"
+ )
+ notes.create!(
+ noteable_id: group2_issue2.id, noteable_type: 'Issue', namespace_id: group2.id, note: "group2 issue 2 note 2"
+ )
+
+ # associated todos
+
+ # Project issue todo that must not be deleted
+ todos.create!(
+ target_id: project_issue.id,
+ target_type: 'Issue',
+ project_id: project.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+
+ todos.create!(
+ target_id: group1_issue1.id,
+ target_type: 'Issue',
+ group_id: group1.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group1_issue1.id,
+ target_type: 'Issue',
+ group_id: group1.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group1_issue2.id,
+ target_type: 'Issue',
+ group_id: group1.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group1_issue2.id,
+ target_type: 'Issue',
+ group_id: group1.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group2_issue1.id,
+ target_type: 'Issue',
+ group_id: group2.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group2_issue1.id,
+ target_type: 'Issue',
+ group_id: group2.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group2_issue2.id,
+ target_type: 'Issue',
+ group_id: group2.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ todos.create!(
+ target_id: group2_issue2.id,
+ target_type: 'Issue',
+ group_id: group2.id,
+ user_id: user.id,
+ author_id: user.id,
+ action: 1,
+ state: 'pending'
+ )
+ end
+
+ it 'removes group level issues' do
+ # We have 1 record of each table that should not be deleted
+ expect do
+ migrate!
+ end.to change { issues.count }.from(5).to(1).and(
+ change { label_links.count }.from(9).to(1)
+ ).and(
+ change { notes.count }.from(9).to(1)
+ ).and(
+ change { todos.count }.from(9).to(1)
+ )
+ end
+ end
+
+ def ensure_work_item_type_exists
+ # We need to make sure at least one work item type exists for this spec and they might have been deleted
+ # by other migrations
+ work_item_types.find_or_create_by!(
+ name: 'Issue', namespace_id: nil, base_type: 0, icon_name: 'issue-type-issue'
+ )
+ end
+end
diff --git a/spec/migrations/20231211154654_add_work_items_rolledup_dates_widget_spec.rb b/spec/migrations/20231211154654_add_work_items_rolledup_dates_widget_spec.rb
new file mode 100644
index 00000000000..b7bae52eed5
--- /dev/null
+++ b/spec/migrations/20231211154654_add_work_items_rolledup_dates_widget_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddWorkItemsRolledupDatesWidget, :migration, feature_category: :team_planning do
+ it_behaves_like 'migration that adds widget to work items definitions',
+ widget_name: described_class::WIDGET_NAME,
+ work_item_types: described_class::WORK_ITEM_TYPES
+end
diff --git a/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb b/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb
new file mode 100644
index 00000000000..e3e08720950
--- /dev/null
+++ b/spec/migrations/20231212135235_queue_backfill_vs_code_settings_version_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillVsCodeSettingsVersion, feature_category: :web_ide do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vs_code_settings,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20231214111617_queue_backfill_owasp_top_ten_of_vulnerability_reads_spec.rb b/spec/migrations/20231214111617_queue_backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..9ee11715da4
--- /dev/null
+++ b/spec/migrations/20231214111617_queue_backfill_owasp_top_ten_of_vulnerability_reads_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillOwaspTopTenOfVulnerabilityReads, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :vulnerability_id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb b/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..bd99dd9998a
--- /dev/null
+++ b/spec/migrations/20231218092401_queue_backfill_partition_id_ci_pipeline_chat_data_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineChatData, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_chat_data_migration) do
+ batched_migrations.create!(
+ job_class_name: 'BackfillPartitionIdCiPipelineChatData',
+ table_name: :ci_pipeline_chat_data,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipeline_chat_data' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipeline_chat_data,
+ column_name: :id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20231221223259_queue_drop_vulnerabilities_without_finding_id_spec.rb b/spec/migrations/20231221223259_queue_drop_vulnerabilities_without_finding_id_spec.rb
new file mode 100644
index 00000000000..473b9b065bc
--- /dev/null
+++ b/spec/migrations/20231221223259_queue_drop_vulnerabilities_without_finding_id_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueDropVulnerabilitiesWithoutFindingId, feature_category: :vulnerability_management do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20240105144908_queue_backfill_issue_search_data_namespace_id_spec.rb b/spec/migrations/20240105144908_queue_backfill_issue_search_data_namespace_id_spec.rb
new file mode 100644
index 00000000000..461a6897051
--- /dev/null
+++ b/spec/migrations/20240105144908_queue_backfill_issue_search_data_namespace_id_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillIssueSearchDataNamespaceId, feature_category: :team_planning do
+ let!(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :issues,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20240108082419_queue_backfill_partition_id_ci_pipeline_metadata_spec.rb b/spec/migrations/20240108082419_queue_backfill_partition_id_ci_pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..625225b1f3e
--- /dev/null
+++ b/spec/migrations/20240108082419_queue_backfill_partition_id_ci_pipeline_metadata_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineMetadata, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_metadata_migration) do
+ batched_migrations.create!(
+ job_class_name: 'QueueBackfillPartitionIdCiPipelineMetadata',
+ table_name: :ci_pipeline_metadata,
+ column_name: :pipeline_id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipeline_metadata' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipeline_metadata,
+ column_name: :pipeline_id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20240108121335_copy_internal_ids_for_epics_and_issues_usage_on_groups_spec.rb b/spec/migrations/20240108121335_copy_internal_ids_for_epics_and_issues_usage_on_groups_spec.rb
new file mode 100644
index 00000000000..c54f1f06d43
--- /dev/null
+++ b/spec/migrations/20240108121335_copy_internal_ids_for_epics_and_issues_usage_on_groups_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CopyInternalIdsForEpicsAndIssuesUsageOnGroups, feature_category: :team_planning do
+ let(:internal_ids) { table(:internal_ids) }
+ let(:namespaces) { table(:namespaces) }
+
+ let!(:group1) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
+ let!(:group2) { namespaces.create!(name: 'group2', path: 'group2', type: 'Group') }
+ let!(:group3) { namespaces.create!(name: 'group3', path: 'group3', type: 'Group') }
+ let!(:project_namespace) { namespaces.create!(name: 'project1', path: 'project1', type: 'Project') }
+
+ let!(:project_issue_iid) { internal_ids.create!(namespace_id: project_namespace.id, usage: 0, last_value: 100) }
+ let!(:group1_epic_iid) { internal_ids.create!(namespace_id: group1.id, usage: 4, last_value: 101) }
+ let!(:group2_issue_iid) { internal_ids.create!(namespace_id: group2.id, usage: 0, last_value: 102) }
+ let!(:group2_epic_iid) { internal_ids.create!(namespace_id: group2.id, usage: 4, last_value: 103) }
+
+ describe '#on_update' do
+ it 'updates corresponding usage record between epics and issues' do
+ # create the triggers
+ migrate!
+
+ # initially there is no record for issues usage for group1
+ expect(internal_ids.where(usage: 0, namespace_id: group1.id).count).to eq(0)
+ # when epics usage record is updated the issues usage record is created and last_value is copied
+ group1_epic_iid.update!(last_value: 1000)
+ expect(internal_ids.where(usage: 0, namespace_id: group1.id).first.last_value).to eq(1000)
+
+ # when there is an issues usage record:
+ expect(internal_ids.where(usage: 0, namespace_id: group2.id).first.last_value).to eq(102)
+ # updates the issues usage record when epics usage record is updated
+ group2_epic_iid.update!(last_value: 1000)
+ expect(internal_ids.where(usage: 0, namespace_id: group2.id).first.last_value).to eq(1000)
+
+ expect(internal_ids.where(usage: 4, namespace_id: group2.id).first.last_value).to eq(1000)
+ group2_issue_iid.update!(last_value: 2000)
+ expect(internal_ids.where(usage: 4, namespace_id: group2.id).first.last_value).to eq(2000)
+ end
+ end
+
+ describe '#on_insert' do
+ it 'inserts corresponding usage record between epics and issues' do
+ migrate!
+
+ expect(internal_ids.where(usage: 0, namespace_id: group3.id).count).to eq(0)
+ expect(internal_ids.where(usage: 4, namespace_id: group3.id).count).to eq(0)
+
+ # create record for epics usage
+ internal_ids.create!(namespace_id: group3.id, usage: 4, last_value: 1000)
+
+ expect(internal_ids.where(usage: 0, namespace_id: group3.id).first.last_value).to eq(1000)
+ expect(internal_ids.where(usage: 4, namespace_id: group3.id).first.last_value).to eq(1000)
+
+ # cleanup records for group3
+ internal_ids.where(namespace_id: group3.id).delete_all
+
+ expect(internal_ids.where(usage: 0, namespace_id: group3.id).count).to eq(0)
+ expect(internal_ids.where(usage: 4, namespace_id: group3.id).count).to eq(0)
+
+ # create record for issues usage
+ internal_ids.create!(namespace_id: group3.id, usage: 0, last_value: 1000)
+
+ expect(internal_ids.where(usage: 0, namespace_id: group3.id).first.last_value).to eq(1000)
+ expect(internal_ids.where(usage: 4, namespace_id: group3.id).first.last_value).to eq(1000)
+ end
+ end
+end
diff --git a/spec/migrations/20240108185335_backfill_internal_ids_with_issues_usage_for_epics_spec.rb b/spec/migrations/20240108185335_backfill_internal_ids_with_issues_usage_for_epics_spec.rb
new file mode 100644
index 00000000000..31cc9c3dd39
--- /dev/null
+++ b/spec/migrations/20240108185335_backfill_internal_ids_with_issues_usage_for_epics_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillInternalIdsWithIssuesUsageForEpics, feature_category: :team_planning do
+ let(:internal_ids) { table(:internal_ids) }
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:epics) { table(:epics) }
+
+ let!(:author) { users.create!(projects_limit: 0, email: 'human@example.com') }
+
+ let!(:group1) { namespaces.create!(name: 'group1', path: 'group1', type: 'Group') }
+ let!(:group2) { namespaces.create!(name: 'group2', path: 'group2', type: 'Group') }
+ let!(:group3) { namespaces.create!(name: 'group3', path: 'group3', type: 'Group') }
+ let!(:group4) { namespaces.create!(name: 'group4', path: 'group4', type: 'Group') }
+ let!(:project_namespace) { namespaces.create!(name: 'project1', path: 'project1', type: 'Project') }
+
+ let!(:project_issue_iid) { internal_ids.create!(namespace_id: project_namespace.id, usage: 0, last_value: 100) }
+ let!(:group1_epic_iid) { internal_ids.create!(namespace_id: group1.id, usage: 4, last_value: 100) }
+
+ # when there are issues and epics usage records for same namespace and EPICS usage last_value is higher
+ let!(:group2_issue_iid) { internal_ids.create!(namespace_id: group2.id, usage: 0, last_value: 100) }
+ let!(:group2_epic_iid) { internal_ids.create!(namespace_id: group2.id, usage: 4, last_value: 110) }
+
+ # when there are issues and epics usage records for same namespace and ISSUES usage last_value is higher
+ let!(:group3_issue_iid) { internal_ids.create!(namespace_id: group3.id, usage: 0, last_value: 100) }
+ let!(:group3_epic_iid) { internal_ids.create!(namespace_id: group3.id, usage: 4, last_value: 110) }
+
+ let!(:group4_epic) do
+ epics.create!(title: 'Epic99', title_html: 'Epic99', group_id: group4.id, iid: 99, author_id: author.id)
+ end
+
+ describe '#up' do
+ it 'backfills internal_ids for epics as group level issues' do
+ issues_iid_namespaces = [group1.id, group2.id, group3.id, group4.id, project_namespace.id]
+ # project, group2, group3
+ expect(internal_ids.where(usage: 0).count).to eq(3)
+ # group1, group2, group3
+ expect(internal_ids.where(usage: 4).count).to eq(3)
+ migrate!
+
+ # project1, group1, group2, group3, group4(this just had the epics record but not the internal_ids record)
+ expect(internal_ids.where(usage: 0).count).to eq(5)
+ expect(internal_ids.where(usage: 0).pluck(:namespace_id)).to match_array(issues_iid_namespaces)
+ expect(internal_ids.where(usage: 0, namespace_id: group2.id).first.last_value).to eq(110)
+ expect(internal_ids.where(usage: 0, namespace_id: group3.id).first.last_value).to eq(110)
+ expect(internal_ids.where(usage: 4).count).to eq(0)
+ end
+ end
+end
diff --git a/spec/migrations/20240109090354_queue_backfill_partition_id_ci_pipeline_artifact_spec.rb b/spec/migrations/20240109090354_queue_backfill_partition_id_ci_pipeline_artifact_spec.rb
new file mode 100644
index 00000000000..b2764cfc882
--- /dev/null
+++ b/spec/migrations/20240109090354_queue_backfill_partition_id_ci_pipeline_artifact_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineArtifact, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_artifact_migration) do
+ batched_migrations.create!(
+ job_class_name: 'QueueBackfillPartitionIdCiPipelineArtifact',
+ table_name: :ci_pipeline_artifacts,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipeline_artifacts' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipeline_artifacts,
+ column_name: :id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb b/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb
new file mode 100644
index 00000000000..9d64708eb29
--- /dev/null
+++ b/spec/migrations/20240110090352_queue_backfill_partition_id_ci_pipeline_config_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillPartitionIdCiPipelineConfig, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let!(:batched_migrations) { table(:batched_background_migrations) }
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ context 'with migration present' do
+ let!(:ci_backfill_partition_id_ci_pipeline_config_migration) do
+ batched_migrations.create!(
+ job_class_name: 'QueueBackfillPartitionIdCiPipelineConfig',
+ table_name: :ci_pipelines_config,
+ column_name: :pipeline_id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 100,
+ gitlab_schema: :gitlab_ci,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+
+ it 'schedules background jobs for each batch of ci_pipelines_config' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipelines_config,
+ column_name: :pipeline_id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/sent_notifications_self_install_id_swap_spec.rb b/spec/migrations/sent_notifications_self_install_id_swap_spec.rb
new file mode 100644
index 00000000000..db66b72d2ec
--- /dev/null
+++ b/spec/migrations/sent_notifications_self_install_id_swap_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SentNotificationsSelfInstallIdSwap, feature_category: :database do
+ let(:connection) { described_class.new.connection }
+
+ describe '#up' do
+ before do
+ # rubocop: disable RSpec/AnyInstanceOf -- This mixin is only used for migrations, it's okay to use this
+ allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(dot_com?)
+ # rubocop: enable RSpec/AnyInstanceOf
+ end
+
+ context 'when we are NOT GitLab.com, dev, or test' do
+ let(:dot_com?) { false }
+
+ context 'when sent_notifications.id is not a bigint' do
+ around do |example|
+ connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE integer')
+ example.run
+ connection.execute('ALTER TABLE sent_notifications ALTER COLUMN id TYPE bigint')
+ end
+
+ context 'when id_convert_to_bigint exists' do
+ around do |example|
+ connection.execute('ALTER TABLE sent_notifications ADD COLUMN IF NOT EXISTS id_convert_to_bigint bigint')
+ Gitlab::Database::UnidirectionalCopyTrigger.on_table(:sent_notifications, connection: connection).create(
+ :id, :id_convert_to_bigint)
+ example.run
+ connection.execute('ALTER TABLE sent_notifications DROP COLUMN id_convert_to_bigint')
+ end
+
+ it 'swaps the integer and bigint columns' do
+ sent_notifications = table(:sent_notifications)
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ sent_notifications.reset_column_information
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('integer')
+ expect(sent_notifications.columns.find do |c|
+ c.name == 'id_convert_to_bigint'
+ end.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ sent_notifications.reset_column_information
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ expect(sent_notifications.columns.find do |c|
+ c.name == 'id_convert_to_bigint'
+ end.sql_type).to eq('integer')
+ }
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when any other condition' do
+ let(:dot_com?) { true }
+
+ it 'does not do anything' do
+ sent_notifications = table(:sent_notifications)
+
+ disable_migrations_output do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ }
+
+ migration.after -> {
+ expect(sent_notifications.columns.find { |c| c.name == 'id' }.sql_type).to eq('bigint')
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/analytics/cycle_analytics/aggregation_spec.rb b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
index e69093f454a..5494df84d68 100644
--- a/spec/models/analytics/cycle_analytics/aggregation_spec.rb
+++ b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
@@ -131,39 +131,52 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model, feature_cat
end
describe '#safe_create_for_namespace' do
- let_it_be(:group) { create(:group) }
- let_it_be(:subgroup) { create(:group, parent: group) }
+ context 'when group namespace is provided' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
- it 'creates the aggregation record' do
- record = described_class.safe_create_for_namespace(group)
-
- expect(record).to be_persisted
- end
-
- context 'when non top-level group is given' do
- it 'creates the aggregation record for the top-level group' do
- record = described_class.safe_create_for_namespace(subgroup)
+ it 'creates the aggregation record' do
+ record = described_class.safe_create_for_namespace(group)
expect(record).to be_persisted
end
- end
- context 'when the record is already present' do
- it 'does nothing' do
- described_class.safe_create_for_namespace(group)
+ context 'when non top-level group is given' do
+ it 'creates the aggregation record for the top-level group' do
+ record = described_class.safe_create_for_namespace(subgroup)
+
+ expect(record).to be_persisted
+ end
+ end
- expect do
+ context 'when the record is already present' do
+ it 'does nothing' do
described_class.safe_create_for_namespace(group)
- described_class.safe_create_for_namespace(subgroup)
- end.not_to change { described_class.count }
+
+ expect do
+ described_class.safe_create_for_namespace(group)
+ described_class.safe_create_for_namespace(subgroup)
+ end.not_to change { described_class.count }
+ end
+ end
+
+ context 'when the aggregation was disabled for some reason' do
+ it 're-enables the aggregation' do
+ create(:cycle_analytics_aggregation, enabled: false, namespace: group)
+
+ aggregation = described_class.safe_create_for_namespace(group)
+
+ expect(aggregation).to be_enabled
+ end
end
end
- context 'when the aggregation was disabled for some reason' do
- it 're-enables the aggregation' do
- create(:cycle_analytics_aggregation, enabled: false, namespace: group)
+ context 'when personal namespace is provided' do
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:project) { create(:project, :public, namespace: user2.namespace) }
- aggregation = described_class.safe_create_for_namespace(group)
+ it 'is successful' do
+ aggregation = described_class.safe_create_for_namespace(user2.namespace)
expect(aggregation).to be_enabled
end
diff --git a/spec/models/analytics/cycle_analytics/stage_spec.rb b/spec/models/analytics/cycle_analytics/stage_spec.rb
index 54ae0feca2c..abe041ae5d6 100644
--- a/spec/models/analytics/cycle_analytics/stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/stage_spec.rb
@@ -81,20 +81,4 @@ RSpec.describe Analytics::CycleAnalytics::Stage, feature_category: :value_stream
expect(current_event_pairs).to eq(expected_event_pairs)
end
end
-
- it_behaves_like 'database events tracking' do
- let(:namespace) { create(:group) }
- let(:value_stream) { create(:cycle_analytics_value_stream) }
- let(:record) { described_class.create!(stage_params) }
- let(:update_params) { { name: 'st 2' } }
- let(:stage_params) do
- {
- namespace: namespace,
- name: 'st1',
- start_event_identifier: :merge_request_created,
- end_event_identifier: :merge_request_merged,
- group_value_stream_id: value_stream.id
- }
- end
- end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index d16a78be533..b4003469ebb 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -27,6 +27,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { expect(setting.max_decompressed_archive_size).to eq(25600) }
it { expect(setting.decompress_archive_file_timeout).to eq(210) }
it { expect(setting.bulk_import_concurrent_pipeline_batch_limit).to eq(25) }
+ it { expect(setting.allow_project_creation_for_guest_and_below).to eq(true) }
+ it { expect(setting.members_delete_limit).to eq(60) }
end
describe 'validations' do
@@ -57,6 +59,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
}
end
+ it { expect(described_class).to validate_jsonb_schema(['application_setting_rate_limits']) }
+
it { is_expected.to allow_value(nil).for(:home_page_url) }
it { is_expected.to allow_value(http).for(:home_page_url) }
it { is_expected.to allow_value(https).for(:home_page_url) }
@@ -101,65 +105,18 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value(nil).for(:protected_paths_for_get_request) }
it { is_expected.to allow_value([]).for(:protected_paths_for_get_request) }
- it { is_expected.to allow_value(3).for(:push_event_hooks_limit) }
- it { is_expected.not_to allow_value('three').for(:push_event_hooks_limit) }
- it { is_expected.not_to allow_value(nil).for(:push_event_hooks_limit) }
-
- it { is_expected.to allow_value(3).for(:push_event_activities_limit) }
- it { is_expected.not_to allow_value('three').for(:push_event_activities_limit) }
- it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
-
- it { is_expected.to validate_numericality_of(:container_registry_delete_tags_service_timeout).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_cleanup_tags_service_max_list_size).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_data_repair_detail_worker_max_concurrency).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_expiration_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_inclusion_of(:container_registry_expiration_policies_caching).in_array([true, false]) }
- it { is_expected.to validate_numericality_of(:container_registry_import_max_tags_count).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_import_max_retries).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_import_start_max_retries).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_import_max_step_duration).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_data_repair_detail_worker_max_concurrency) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_step_duration) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_timeout) }
- it { is_expected.not_to allow_value(nil).for(:container_registry_import_timeout) }
it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to allow_value(1.5).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
- it { is_expected.to validate_numericality_of(:decompress_archive_file_timeout).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.not_to allow_value(nil).for(:decompress_archive_file_timeout) }
-
- it { is_expected.to validate_numericality_of(:dependency_proxy_ttl_group_policy_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.not_to allow_value(nil).for(:dependency_proxy_ttl_group_policy_worker_capacity) }
-
- it { is_expected.to validate_numericality_of(:packages_cleanup_package_file_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.not_to allow_value(nil).for(:packages_cleanup_package_file_worker_capacity) }
-
- it { is_expected.to validate_numericality_of(:package_registry_cleanup_policies_worker_capacity).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.not_to allow_value(nil).for(:package_registry_cleanup_policies_worker_capacity) }
-
- it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_inclusion_of(:wiki_asciidoc_allow_uri_includes).in_array([true, false]) }
- it { is_expected.to validate_presence_of(:max_artifacts_size) }
- it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
- it { is_expected.to validate_presence_of(:max_yaml_size_bytes) }
- it { is_expected.to validate_numericality_of(:max_yaml_size_bytes).only_integer.is_greater_than(0) }
- it { is_expected.to validate_presence_of(:max_yaml_depth) }
- it { is_expected.to validate_numericality_of(:max_yaml_depth).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_pages_size) }
- it { is_expected.to validate_presence_of(:max_pages_custom_domains_per_project) }
- it { is_expected.to validate_presence_of(:max_terraform_state_size_bytes) }
- it { is_expected.to validate_numericality_of(:max_terraform_state_size_bytes).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_inclusion_of(:user_defaults_to_private_profile).in_array([true, false]) }
@@ -174,40 +131,12 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
end
- it 'ensures max_pages_custom_domains_per_project is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
- is_expected
- .to validate_numericality_of(:max_pages_custom_domains_per_project)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- it { is_expected.to validate_presence_of(:jobs_per_stage_page_size) }
- it { is_expected.to validate_numericality_of(:jobs_per_stage_page_size).only_integer.is_greater_than_or_equal_to(0) }
-
it { is_expected.not_to allow_value(7).for(:minimum_password_length) }
it { is_expected.not_to allow_value(129).for(:minimum_password_length) }
it { is_expected.not_to allow_value(nil).for(:minimum_password_length) }
it { is_expected.not_to allow_value('abc').for(:minimum_password_length) }
it { is_expected.to allow_value(10).for(:minimum_password_length) }
- it { is_expected.to allow_value(300).for(:issues_create_limit) }
- it { is_expected.not_to allow_value('three').for(:issues_create_limit) }
- it { is_expected.not_to allow_value(nil).for(:issues_create_limit) }
- it { is_expected.not_to allow_value(10.5).for(:issues_create_limit) }
- it { is_expected.not_to allow_value(-1).for(:issues_create_limit) }
-
- it { is_expected.to allow_value(0).for(:raw_blob_request_limit) }
- it { is_expected.not_to allow_value('abc').for(:raw_blob_request_limit) }
- it { is_expected.not_to allow_value(nil).for(:raw_blob_request_limit) }
- it { is_expected.not_to allow_value(10.5).for(:raw_blob_request_limit) }
- it { is_expected.not_to allow_value(-1).for(:raw_blob_request_limit) }
-
- it { is_expected.to allow_value(0).for(:pipeline_limit_per_project_user_sha) }
- it { is_expected.not_to allow_value('abc').for(:pipeline_limit_per_project_user_sha) }
- it { is_expected.not_to allow_value(nil).for(:pipeline_limit_per_project_user_sha) }
- it { is_expected.not_to allow_value(10.5).for(:pipeline_limit_per_project_user_sha) }
- it { is_expected.not_to allow_value(-1).for(:pipeline_limit_per_project_user_sha) }
-
it { is_expected.not_to allow_value(false).for(:hashed_storage_enabled) }
it { is_expected.to allow_value('default' => 0).for(:repository_storages_weighted) }
@@ -219,15 +148,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.not_to allow_value('default' => 101).for(:repository_storages_weighted).with_message("value for 'default' must be between 0 and 100") }
it { is_expected.not_to allow_value('default' => 100, shouldntexist: 50).for(:repository_storages_weighted).with_message("can't include: shouldntexist") }
- %i[notes_create_limit search_rate_limit search_rate_limit_unauthenticated users_get_by_id_limit
- projects_api_rate_limit_unauthenticated gitlab_shell_operation_limit].each do |setting|
- it { is_expected.to allow_value(400).for(setting) }
- it { is_expected.not_to allow_value('two').for(setting) }
- it { is_expected.not_to allow_value(nil).for(setting) }
- it { is_expected.not_to allow_value(5.5).for(setting) }
- it { is_expected.not_to allow_value(-2).for(setting) }
- end
-
def many_usernames(num = 100)
Array.new(num) { |i| "username#{i}" }
end
@@ -280,23 +200,132 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_inclusion_of(:silent_mode_enabled).in_array([true, false]) }
- it { is_expected.to allow_value(0).for(:ci_max_includes) }
- it { is_expected.to allow_value(200).for(:ci_max_includes) }
- it { is_expected.not_to allow_value('abc').for(:ci_max_includes) }
- it { is_expected.not_to allow_value(nil).for(:ci_max_includes) }
- it { is_expected.not_to allow_value(10.5).for(:ci_max_includes) }
- it { is_expected.not_to allow_value(-1).for(:ci_max_includes) }
+ context 'for non-null integer attributes starting from 0' do
+ where(:attribute) do
+ %i[
+ bulk_import_max_download_file_size
+ ci_max_includes
+ ci_max_total_yaml_size_bytes
+ container_registry_cleanup_tags_service_max_list_size
+ container_registry_data_repair_detail_worker_max_concurrency
+ container_registry_delete_tags_service_timeout
+ container_registry_expiration_policies_worker_capacity
+ container_registry_import_max_retries
+ container_registry_import_max_step_duration
+ container_registry_import_max_tags_count
+ container_registry_import_start_max_retries
+ container_registry_import_timeout
+ container_registry_pre_import_timeout
+ decompress_archive_file_timeout
+ dependency_proxy_ttl_group_policy_worker_capacity
+ gitlab_shell_operation_limit
+ inactive_projects_min_size_mb
+ issues_create_limit
+ jobs_per_stage_page_size
+ max_decompressed_archive_size
+ max_export_size
+ max_import_remote_file_size
+ max_import_size
+ max_pages_custom_domains_per_project
+ max_terraform_state_size_bytes
+ members_delete_limit
+ notes_create_limit
+ package_registry_cleanup_policies_worker_capacity
+ packages_cleanup_package_file_worker_capacity
+ pipeline_limit_per_project_user_sha
+ projects_api_rate_limit_unauthenticated
+ raw_blob_request_limit
+ search_rate_limit
+ search_rate_limit_unauthenticated
+ session_expire_delay
+ sidekiq_job_limiter_compression_threshold_bytes
+ sidekiq_job_limiter_limit_bytes
+ terminal_max_session_time
+ users_get_by_id_limit
+ ]
+ end
- it { is_expected.to allow_value(0).for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.to allow_value(200).for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.not_to allow_value('abc').for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.not_to allow_value(nil).for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.not_to allow_value(10.5).for(:ci_max_total_yaml_size_bytes) }
- it { is_expected.not_to allow_value(-1).for(:ci_max_total_yaml_size_bytes) }
+ with_them do
+ it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(attribute) }
+ end
+ end
- it { is_expected.to validate_inclusion_of(:remember_me_enabled).in_array([true, false]) }
+ context 'for non-null numerical attributes starting from 0' do
+ where(:attribute) do
+ %i[
+ push_event_hooks_limit
+ push_event_activities_limit
+ ]
+ end
- it { is_expected.to validate_numericality_of(:namespace_aggregation_schedule_lease_duration_in_seconds).only_integer.is_greater_than(0) }
+ with_them do
+ it { is_expected.to validate_numericality_of(attribute).is_greater_than_or_equal_to(0) }
+ it { is_expected.not_to allow_value(nil).for(attribute) }
+ end
+ end
+
+ context 'for non-null integer attributes starting from 1' do
+ where(:attribute) do
+ %i[
+ max_attachment_size
+ max_artifacts_size
+ container_registry_token_expire_delay
+ housekeeping_optimize_repository_period
+ bulk_import_concurrent_pipeline_batch_limit
+ snippet_size_limit
+ max_yaml_size_bytes
+ max_yaml_depth
+ namespace_aggregation_schedule_lease_duration_in_seconds
+ throttle_unauthenticated_api_requests_per_period
+ throttle_unauthenticated_api_period_in_seconds
+ throttle_unauthenticated_requests_per_period
+ throttle_unauthenticated_period_in_seconds
+ throttle_unauthenticated_packages_api_requests_per_period
+ throttle_unauthenticated_packages_api_period_in_seconds
+ throttle_unauthenticated_files_api_requests_per_period
+ throttle_unauthenticated_files_api_period_in_seconds
+ throttle_unauthenticated_deprecated_api_requests_per_period
+ throttle_unauthenticated_deprecated_api_period_in_seconds
+ throttle_authenticated_api_requests_per_period
+ throttle_authenticated_api_period_in_seconds
+ throttle_authenticated_git_lfs_requests_per_period
+ throttle_authenticated_git_lfs_period_in_seconds
+ throttle_authenticated_web_requests_per_period
+ throttle_authenticated_web_period_in_seconds
+ throttle_authenticated_packages_api_requests_per_period
+ throttle_authenticated_packages_api_period_in_seconds
+ throttle_authenticated_files_api_requests_per_period
+ throttle_authenticated_files_api_period_in_seconds
+ throttle_authenticated_deprecated_api_requests_per_period
+ throttle_authenticated_deprecated_api_period_in_seconds
+ throttle_protected_paths_requests_per_period
+ throttle_protected_paths_period_in_seconds
+ project_jobs_api_rate_limit
+ ]
+ end
+
+ with_them do
+ it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than(0) }
+ it { is_expected.not_to allow_value(nil).for(attribute) }
+ end
+ end
+
+ context 'for null integer attributes starting from 1' do
+ where(:attribute) do
+ %i[
+ failed_login_attempts_unlock_period_in_minutes
+ external_pipeline_validation_service_timeout
+ max_login_attempts
+ ]
+ end
+
+ with_them do
+ it { is_expected.to validate_numericality_of(attribute).only_integer.is_greater_than(0).allow_nil }
+ end
+ end
+
+ it { is_expected.to validate_inclusion_of(:remember_me_enabled).in_array([true, false]) }
it { is_expected.to validate_inclusion_of(:instance_level_code_suggestions_enabled).in_array([true, false]) }
@@ -586,66 +615,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
- it { is_expected.to validate_presence_of(:max_attachment_size) }
-
- specify do
- is_expected.to validate_numericality_of(:max_attachment_size)
- .only_integer
- .is_greater_than(0)
- end
-
- it { is_expected.to validate_presence_of(:max_export_size) }
-
- specify do
- is_expected.to validate_numericality_of(:max_export_size)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- it { is_expected.to validate_presence_of(:max_import_size) }
-
- specify do
- is_expected.to validate_numericality_of(:max_import_size)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- it { is_expected.to validate_presence_of(:max_import_remote_file_size) }
-
- specify do
- is_expected.to validate_numericality_of(:max_import_remote_file_size)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- it { is_expected.to validate_presence_of(:bulk_import_max_download_file_size) }
-
- specify do
- is_expected.to validate_numericality_of(:bulk_import_max_download_file_size)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- it { is_expected.to validate_presence_of(:max_decompressed_archive_size) }
-
- specify do
- is_expected.to validate_numericality_of(:max_decompressed_archive_size)
- .only_integer
- .is_greater_than_or_equal_to(0)
- end
-
- specify do
- is_expected.to validate_numericality_of(:failed_login_attempts_unlock_period_in_minutes)
- .only_integer
- .is_greater_than(0)
- end
-
- specify do
- is_expected.to validate_numericality_of(:max_login_attempts)
- .only_integer
- .is_greater_than(0)
- end
-
specify do
is_expected.to validate_numericality_of(:local_markdown_version)
.only_integer
@@ -879,10 +848,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
- context 'housekeeping settings' do
- it { is_expected.not_to allow_value(0).for(:housekeeping_optimize_repository_period) }
- end
-
context 'gitaly timeouts' do
it "validates that the default_timeout is lower than the max_request_duration" do
is_expected.to validate_numericality_of(:gitaly_timeout_default)
@@ -1002,8 +967,8 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it 'the credentials are valid when the private key can be read and matches the certificate' do
tls_attributes = [:external_auth_client_key_pass,
- :external_auth_client_key,
- :external_auth_client_cert]
+ :external_auth_client_key,
+ :external_auth_client_cert]
setting.external_auth_client_key = File.read('spec/fixtures/passphrase_x509_certificate_pk.key')
setting.external_auth_client_key_pass = '5iveL!fe'
@@ -1215,43 +1180,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
end
- context 'throttle_* settings' do
- where(:throttle_setting) do
- %i[
- throttle_unauthenticated_api_requests_per_period
- throttle_unauthenticated_api_period_in_seconds
- throttle_unauthenticated_requests_per_period
- throttle_unauthenticated_period_in_seconds
- throttle_authenticated_api_requests_per_period
- throttle_authenticated_api_period_in_seconds
- throttle_authenticated_web_requests_per_period
- throttle_authenticated_web_period_in_seconds
- throttle_unauthenticated_packages_api_requests_per_period
- throttle_unauthenticated_packages_api_period_in_seconds
- throttle_authenticated_packages_api_requests_per_period
- throttle_authenticated_packages_api_period_in_seconds
- throttle_unauthenticated_files_api_requests_per_period
- throttle_unauthenticated_files_api_period_in_seconds
- throttle_authenticated_files_api_requests_per_period
- throttle_authenticated_files_api_period_in_seconds
- throttle_unauthenticated_deprecated_api_requests_per_period
- throttle_unauthenticated_deprecated_api_period_in_seconds
- throttle_authenticated_deprecated_api_requests_per_period
- throttle_authenticated_deprecated_api_period_in_seconds
- throttle_authenticated_git_lfs_requests_per_period
- throttle_authenticated_git_lfs_period_in_seconds
- ]
- end
-
- with_them do
- it { is_expected.to allow_value(3).for(throttle_setting) }
- it { is_expected.not_to allow_value(-3).for(throttle_setting) }
- it { is_expected.not_to allow_value(0).for(throttle_setting) }
- it { is_expected.not_to allow_value('three').for(throttle_setting) }
- it { is_expected.not_to allow_value(nil).for(throttle_setting) }
- end
- end
-
context 'sidekiq job limiter settings' do
it 'has the right defaults', :aggregate_failures do
expect(setting.sidekiq_job_limiter_mode).to eq('compress')
@@ -1262,8 +1190,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
end
it { is_expected.to allow_value('track').for(:sidekiq_job_limiter_mode) }
- it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_compression_threshold_bytes).only_integer.is_greater_than_or_equal_to(0) }
- it { is_expected.to validate_numericality_of(:sidekiq_job_limiter_limit_bytes).only_integer.is_greater_than_or_equal_to(0) }
end
context 'prometheus settings' do
@@ -1352,13 +1278,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
.with_message("must be a value between 0 and 1")
end
end
-
- describe 'bulk_import_concurrent_pipeline_batch_limit' do
- it do
- is_expected.to validate_numericality_of(:bulk_import_concurrent_pipeline_batch_limit)
- .is_greater_than(0)
- end
- end
end
context 'restrict creating duplicates' do
@@ -1714,8 +1633,6 @@ RSpec.describe ApplicationSetting, feature_category: :shared, type: :model do
it { is_expected.to validate_numericality_of(:inactive_projects_delete_after_months).is_greater_than(0) }
- it { is_expected.to validate_numericality_of(:inactive_projects_min_size_mb).is_greater_than_or_equal_to(0) }
-
it "deletes the redis key used for tracking inactive projects deletion warning emails when setting is updated",
:clean_gitlab_redis_shared_state do
Gitlab::Redis::SharedState.with do |redis|
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index ce143a1aa33..014b050a5b5 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -307,8 +307,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
import = build(:bulk_import, source_version: '16.2.0')
entity = build(:bulk_import_entity, :project_entity, bulk_import: import)
- expect(entity.export_relations_url_path(batched: true))
- .to eq("/projects/#{entity.source_xid}/export_relations?batched=true")
+ expect(entity.export_relations_url_path).to eq("/projects/#{entity.source_xid}/export_relations?batched=true")
end
end
@@ -316,8 +315,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
it 'returns export relations url' do
entity = build(:bulk_import_entity)
- expect(entity.export_relations_url_path(batched: true))
- .to eq("/groups/#{entity.source_xid}/export_relations")
+ expect(entity.export_relations_url_path).to eq("/groups/#{entity.source_xid}/export_relations")
end
end
end
diff --git a/spec/models/bulk_imports/failure_spec.rb b/spec/models/bulk_imports/failure_spec.rb
index 928f14aaced..bbb5ad52fe1 100644
--- a/spec/models/bulk_imports/failure_spec.rb
+++ b/spec/models/bulk_imports/failure_spec.rb
@@ -58,4 +58,20 @@ RSpec.describe BulkImports::Failure, type: :model, feature_category: :importers
expect(failure.exception_message.size).to eq(255)
end
end
+
+ describe '#source_title=' do
+ it 'truncates title to 255 characters' do
+ failure = described_class.new
+ failure.source_title = 'A' * 1000
+ expect(failure.source_title.size).to eq(255)
+ end
+ end
+
+ describe '#source_url=' do
+ it 'truncates url to 255 characters' do
+ failure = described_class.new
+ failure.source_url = 'A' * 1000
+ expect(failure.source_url.size).to eq(255)
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 18c7e57d464..d7e91f44e75 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -68,10 +68,17 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
describe 'associations' do
+ it { is_expected.to belong_to(:project_mirror) }
+
it 'has a bidirectional relationship with projects' do
expect(described_class.reflect_on_association(:project).has_inverse?).to eq(:builds)
expect(Project.reflect_on_association(:builds).has_inverse?).to eq(:project)
end
+
+ it 'has a bidirectional relationship with project mirror' do
+ expect(described_class.reflect_on_association(:project_mirror).has_inverse?).to eq(:builds)
+ expect(Ci::ProjectMirror.reflect_on_association(:builds).has_inverse?).to eq(:project_mirror)
+ end
end
describe 'callbacks' do
@@ -325,14 +332,15 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe '.with_exposed_artifacts' do
subject { described_class.with_exposed_artifacts }
- let!(:job1) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:job1) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:job3) { create(:ci_build, pipeline: pipeline) }
+
let!(:job2) { create(:ci_build, options: options, pipeline: pipeline) }
- let!(:job3) { create(:ci_build, pipeline: pipeline) }
- context 'when some jobs have exposed artifacs and some not' do
+ context 'when some jobs have exposed artifacts and some not' do
let(:options) { { artifacts: { expose_as: 'test', paths: ['test'] } } }
- before do
+ before_all do
job1.ensure_metadata.update!(has_exposed_artifacts: nil)
job3.ensure_metadata.update!(has_exposed_artifacts: false)
end
@@ -356,10 +364,10 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
let(:artifact_scope) { Ci::JobArtifact.where(file_type: 'archive') }
- let!(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline) }
- let!(:build_2) { create(:ci_build, :codequality_reports, pipeline: pipeline) }
- let!(:build_3) { create(:ci_build, :test_reports, pipeline: pipeline) }
- let!(:build_4) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let_it_be(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let_it_be(:build_2) { create(:ci_build, :codequality_reports, pipeline: pipeline) }
+ let_it_be(:build_3) { create(:ci_build, :test_reports, pipeline: pipeline) }
+ let_it_be(:build_4) { create(:ci_build, :artifacts, pipeline: pipeline) }
it 'returns artifacts matching the given scope' do
expect(builds).to contain_exactly(build_1, build_4)
@@ -383,10 +391,10 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
describe '.with_needs' do
- let!(:build) { create(:ci_build, pipeline: pipeline) }
- let!(:build_b) { create(:ci_build, pipeline: pipeline) }
- let!(:build_need_a) { create(:ci_build_need, build: build) }
- let!(:build_need_b) { create(:ci_build_need, build: build_b) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:build_b) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:build_need_a) { create(:ci_build_need, build: build) }
+ let_it_be(:build_need_b) { create(:ci_build_need, build: build_b) }
context 'when passing build name' do
subject { described_class.with_needs(build_need_a.name) }
@@ -421,6 +429,33 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
+ describe '.belonging_to_runner_manager' do
+ subject { described_class.belonging_to_runner_manager(runner_manager) }
+
+ let_it_be(:runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:build_b) { create(:ci_build, :success) }
+
+ context 'with runner_manager of runner associated with build' do
+ let!(:runner_manager) { create(:ci_runner_machine, runner: runner) }
+ let!(:runner_manager_build) { create(:ci_runner_machine_build, build: build, runner_manager: runner_manager) }
+
+ it { is_expected.to contain_exactly(build) }
+ end
+
+ context 'with runner_manager of runner not associated with build' do
+ let!(:runner_manager) { create(:ci_runner_machine, runner: instance_runner) }
+ let!(:instance_runner) { create(:ci_runner, :with_runner_manager) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with nil runner_manager' do
+ let(:runner_manager) { nil }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '#stick_build_if_status_changed' do
it 'sticks the build if the status changed' do
job = create(:ci_build, :pending, pipeline: pipeline)
@@ -3262,6 +3297,34 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
+ context 'for the diffblue_cover integration' do
+ context 'when active' do
+ let_it_be(:diffblue_cover_integration) { create(:diffblue_cover_integration, active: true) }
+
+ let(:diffblue_cover_variables) do
+ [
+ { key: 'DIFFBLUE_LICENSE_KEY', value: diffblue_cover_integration.diffblue_license_key, masked: true, public: false },
+ { key: 'DIFFBLUE_ACCESS_TOKEN_NAME', value: diffblue_cover_integration.diffblue_access_token_name, masked: true, public: false },
+ { key: 'DIFFBLUE_ACCESS_TOKEN', value: diffblue_cover_integration.diffblue_access_token_secret, masked: true, public: false }
+ ]
+ end
+
+ it 'includes diffblue_cover variables' do
+ is_expected.to include(*diffblue_cover_variables)
+ end
+ end
+
+ context 'when inactive' do
+ let_it_be(:diffblue_cover_integration) { create(:diffblue_cover_integration, active: false) }
+
+ it 'does not include diffblue_cover variables' do
+ expect(subject.find { |v| v[:key] == 'DIFFBLUE_LICENSE_KEY' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'DIFFBLUE_ACCESS_TOKEN_NAME' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'DIFFBLUE_ACCESS_TOKEN' }).to be_nil
+ end
+ end
+ end
+
context 'for the google_play integration' do
before do
allow(build.pipeline).to receive(:protected_ref?).and_return(pipeline_protected_ref)
@@ -5507,10 +5570,11 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
stub_current_partition_id
end
- it 'includes partition_id as a token prefix' do
- prefix = ci_build.token.split('_').first.to_i(16)
+ it 'includes partition_id in the token prefix' do
+ prefix = ci_build.token.match(/^glcbt-([\h]+)_/)
+ partition_prefix = prefix[1].to_i(16)
- expect(prefix).to eq(ci_testing_partition_id)
+ expect(partition_prefix).to eq(ci_testing_partition_id)
end
end
@@ -5648,7 +5712,7 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
it 'generates a token' do
expect { ci_build.enqueue }
- .to change { ci_build.token }.from(nil).to(a_string_starting_with(partition_id_prefix_in_16_bit_encode))
+ .to change { ci_build.token }.from(nil).to(a_string_starting_with("glcbt-#{partition_id_prefix_in_16_bit_encode}"))
end
end
@@ -5665,4 +5729,51 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
end
+
+ describe '#prefix_and_partition_for_token' do
+ # 100.to_s(16) -> 64
+ let(:ci_build) { described_class.new(partition_id: 100) }
+
+ shared_examples 'partition prefix' do
+ it 'is prefixed with partition_id' do
+ ci_build.ensure_token
+ expect(ci_build.token).to match(/^64_[\w-]{20}$/)
+ end
+ end
+
+ shared_examples 'static and partition prefixes' do
+ it 'is prefixed with static string and partition id' do
+ ci_build.ensure_token
+ expect(ci_build.token).to match(/^glcbt-64_[\w-]{20}$/)
+ end
+ end
+
+ it_behaves_like 'static and partition prefixes'
+
+ context 'when feature flag is globally disabled' do
+ before do
+ stub_feature_flags(prefix_ci_build_tokens: false)
+ end
+
+ it_behaves_like 'partition prefix'
+
+ context 'when enabled for a different project' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ stub_feature_flags(prefix_ci_build_tokens: project)
+ end
+
+ it_behaves_like 'partition prefix'
+ end
+
+ context 'when enabled for the project' do
+ before do
+ stub_feature_flags(prefix_ci_build_tokens: ci_build.project)
+ end
+
+ it_behaves_like 'static and partition prefixes'
+ end
+ end
+ end
end
diff --git a/spec/models/ci/catalog/resources/version_spec.rb b/spec/models/ci/catalog/resources/version_spec.rb
index aafd51699b5..bacaa6355fe 100644
--- a/spec/models/ci/catalog/resources/version_spec.rb
+++ b/spec/models/ci/catalog/resources/version_spec.rb
@@ -28,6 +28,23 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
end
end
+ describe '.by_name' do
+ it 'returns the version that matches the name' do
+ versions = described_class.by_name('v1.0')
+
+ expect(versions.count).to eq(1)
+ expect(versions.first.name).to eq('v1.0')
+ end
+
+ context 'when no version matches the name' do
+ it 'returns empty response' do
+ versions = described_class.by_name('does_not_exist')
+
+ expect(versions).to be_empty
+ end
+ end
+ end
+
describe '.order_by_created_at_asc' do
it 'returns versions ordered by created_at ascending' do
versions = described_class.order_by_created_at_asc
@@ -127,9 +144,9 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
describe '#name' do
it 'is equivalent to release.tag' do
- release_v1_0.update!(name: 'Release v1.0')
+ v1_0.release.update!(name: 'Release v1.0')
- expect(v1_0.name).to eq(release_v1_0.tag)
+ expect(v1_0.name).to eq(v1_0.release.tag)
end
end
@@ -142,10 +159,17 @@ RSpec.describe Ci::Catalog::Resources::Version, type: :model, feature_category:
context 'when the sha is nil' do
it 'returns nil' do
- release_v1_0.update!(sha: nil)
+ v1_0.release.update!(sha: nil)
is_expected.to be_nil
end
end
end
+
+ describe '#readme' do
+ it 'returns the correct readme for the version' do
+ expect(v1_0.readme.data).to include('Readme v1.0')
+ expect(v1_1.readme.data).to include('Readme v1.1')
+ end
+ end
end
diff --git a/spec/models/ci/instance_variable_spec.rb b/spec/models/ci/instance_variable_spec.rb
index 0ef1dfbd55c..9a6a78ee5f4 100644
--- a/spec/models/ci/instance_variable_spec.rb
+++ b/spec/models/ci/instance_variable_spec.rb
@@ -113,4 +113,10 @@ RSpec.describe Ci::InstanceVariable do
end
end
end
+
+ describe "description" do
+ it { is_expected.to allow_values('').for(:description) }
+ it { is_expected.to allow_values(nil).for(:description) }
+ it { is_expected.to validate_length_of(:description).is_at_most(255) }
+ end
end
diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb
index 63e6e9e6b26..8db8fd4e067 100644
--- a/spec/models/ci/namespace_mirror_spec.rb
+++ b/spec/models/ci/namespace_mirror_spec.rb
@@ -21,6 +21,16 @@ RSpec.describe Ci::NamespaceMirror do
)
end
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
+ it { is_expected.to have_many(:project_mirrors) }
+
+ it 'has a bidirectional relationship with project mirrors' do
+ expect(described_class.reflect_on_association(:project_mirrors).has_inverse?).to eq(:namespace_mirror)
+ expect(Ci::ProjectMirror.reflect_on_association(:namespace_mirror).has_inverse?).to eq(:project_mirrors)
+ end
+ end
+
context 'scopes' do
describe '.by_group_and_descendants' do
let_it_be(:another_group) { create(:group) }
diff --git a/spec/models/ci/pipeline_artifact_spec.rb b/spec/models/ci/pipeline_artifact_spec.rb
index eb89c7af208..1cb99ec22b9 100644
--- a/spec/models/ci/pipeline_artifact_spec.rb
+++ b/spec/models/ci/pipeline_artifact_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineArtifact, type: :model do
+RSpec.describe Ci::PipelineArtifact, type: :model, feature_category: :build_artifacts do
let(:coverage_report) { create(:ci_pipeline_artifact, :with_coverage_report) }
describe 'associations' do
@@ -309,4 +309,19 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
let!(:model) { create(:ci_pipeline_artifact, project: parent) }
end
end
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_artifact) { create(:ci_pipeline_artifact, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_artifact.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_chat_data_spec.rb b/spec/models/ci/pipeline_chat_data_spec.rb
new file mode 100644
index 00000000000..4c9dc7edd88
--- /dev/null
+++ b/spec/models/ci/pipeline_chat_data_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineChatData, type: :model, feature_category: :continuous_integration do
+ it { is_expected.to belong_to(:chat_name) }
+ it { is_expected.to belong_to(:pipeline) }
+
+ it { is_expected.to validate_presence_of(:pipeline_id) }
+ it { is_expected.to validate_presence_of(:chat_name_id) }
+ it { is_expected.to validate_presence_of(:response_url) }
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_chat_data) { create(:ci_pipeline_chat_data, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_chat_data.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_config_spec.rb b/spec/models/ci/pipeline_config_spec.rb
index 3d033d33df3..3368c40fb57 100644
--- a/spec/models/ci/pipeline_config_spec.rb
+++ b/spec/models/ci/pipeline_config_spec.rb
@@ -2,9 +2,24 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineConfig, type: :model do
+RSpec.describe Ci::PipelineConfig, type: :model, feature_category: :continuous_integration do
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to validate_presence_of(:pipeline) }
it { is_expected.to validate_presence_of(:content) }
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_config) { create(:ci_pipeline_config, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_config.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_metadata_spec.rb b/spec/models/ci/pipeline_metadata_spec.rb
index 1a426118063..efe180b1db3 100644
--- a/spec/models/ci/pipeline_metadata_spec.rb
+++ b/spec/models/ci/pipeline_metadata_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
is_expected.to define_enum_for(
:auto_cancel_on_new_commit
).with_values(
- conservative: 0, interruptible: 1, disabled: 2
+ conservative: 0, interruptible: 1, none: 2
).with_prefix
end
@@ -27,4 +27,19 @@ RSpec.describe Ci::PipelineMetadata, feature_category: :pipeline_composition do
).with_prefix
end
end
+
+ describe 'partitioning', :ci_partitionable do
+ include Ci::PartitioningHelpers
+
+ let(:pipeline) { create(:ci_pipeline) }
+ let(:pipeline_metadata) { create(:ci_pipeline_metadata, pipeline: pipeline) }
+
+ before do
+ stub_current_partition_id
+ end
+
+ it 'assigns the same partition id as the one that pipeline has' do
+ expect(pipeline_metadata.partition_id).to eq(ci_testing_partition_id)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 024d3ae4240..52c3792ac93 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2349,14 +2349,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
it 'runs on a branch update push' do
- expect(pipeline.before_sha).not_to be Gitlab::Git::BLANK_SHA
+ expect(pipeline.before_sha).not_to be Gitlab::Git::SHA1_BLANK_SHA
expect(pipeline.branch_updated?).to be true
end
end
context 'when pipeline does not have before SHA' do
before do
- pipeline.update!(before_sha: Gitlab::Git::BLANK_SHA)
+ pipeline.update!(before_sha: Gitlab::Git::SHA1_BLANK_SHA)
end
it 'does not run on a branch updating push' do
@@ -2384,7 +2384,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
context 'when either old or new revision is missing' do
before do
- pipeline.update!(before_sha: Gitlab::Git::BLANK_SHA)
+ pipeline.update!(before_sha: Gitlab::Git::SHA1_BLANK_SHA)
end
it 'returns nil' do
@@ -5727,4 +5727,36 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
end
+
+ describe '#auto_cancel_on_new_commit' do
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
+
+ subject(:auto_cancel_on_new_commit) { pipeline.auto_cancel_on_new_commit }
+
+ context 'when pipeline_metadata is not present' do
+ it { is_expected.to eq('conservative') }
+ end
+
+ context 'when pipeline_metadata is present' do
+ before_all do
+ create(:ci_pipeline_metadata, project: pipeline.project, pipeline: pipeline)
+ end
+
+ context 'when auto_cancel_on_new_commit is nil' do
+ before do
+ pipeline.pipeline_metadata.auto_cancel_on_new_commit = nil
+ end
+
+ it { is_expected.to eq('conservative') }
+ end
+
+ context 'when auto_cancel_on_new_commit is a valid value' do
+ before do
+ pipeline.pipeline_metadata.auto_cancel_on_new_commit = 'interruptible'
+ end
+
+ it { is_expected.to eq('interruptible') }
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 5d457c4f213..d74441f93a6 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
let(:ignore_accessors) do
%i[type namespace lock_version target_url base_tags trace_sections
- commit_id deployment erased_by_id project_id
+ commit_id deployment erased_by_id project_id project_mirror
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines sourced_pipeline artifacts_file_store artifacts_metadata_store
diff --git a/spec/models/ci/project_mirror_spec.rb b/spec/models/ci/project_mirror_spec.rb
index 5ef520b4230..491ae353ffe 100644
--- a/spec/models/ci/project_mirror_spec.rb
+++ b/spec/models/ci/project_mirror_spec.rb
@@ -8,6 +8,22 @@ RSpec.describe Ci::ProjectMirror do
let!(:project) { create(:project, namespace: group2) }
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:namespace_mirror) }
+ it { is_expected.to have_many(:builds) }
+
+ it 'has a bidirectional relationship with namespace mirror' do
+ expect(described_class.reflect_on_association(:namespace_mirror).has_inverse?).to eq(:project_mirrors)
+ expect(Ci::NamespaceMirror.reflect_on_association(:project_mirrors).has_inverse?).to eq(:namespace_mirror)
+ end
+
+ it 'has a bidirectional relationship with builds' do
+ expect(described_class.reflect_on_association(:builds).has_inverse?).to eq(:project_mirror)
+ expect(Ci::Build.reflect_on_association(:project_mirror).has_inverse?).to eq(:builds)
+ end
+ end
+
context 'scopes' do
let_it_be(:another_project) { create(:project, namespace: group1) }
diff --git a/spec/models/ci/runner_manager_spec.rb b/spec/models/ci/runner_manager_spec.rb
index 02a72afe0c6..4fdfbae997d 100644
--- a/spec/models/ci/runner_manager_spec.rb
+++ b/spec/models/ci/runner_manager_spec.rb
@@ -41,20 +41,70 @@ RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :mo
end
end
- describe '.stale', :freeze_time do
- subject { described_class.stale.ids }
+ describe 'status scopes' do
+ let_it_be(:runner) { create(:ci_runner, :instance) }
- let!(:runner_manager1) { create(:ci_runner_machine, :stale) }
- let!(:runner_manager2) { create(:ci_runner_machine, :stale, contacted_at: nil) }
- let!(:runner_manager3) { create(:ci_runner_machine, created_at: 6.months.ago, contacted_at: Time.current) }
- let!(:runner_manager4) { create(:ci_runner_machine, created_at: 5.days.ago) }
- let!(:runner_manager5) do
- create(:ci_runner_machine, created_at: (7.days - 1.second).ago, contacted_at: (7.days - 1.second).ago)
+ let_it_be(:offline_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: 2.hours.ago) }
+ let_it_be(:online_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: 1.second.ago) }
+ let_it_be(:never_contacted_runner_manager) { create(:ci_runner_machine, runner: runner, contacted_at: nil) }
+
+ describe '.online' do
+ subject(:runner_managers) { described_class.online }
+
+ it 'returns online runner managers' do
+ expect(runner_managers).to contain_exactly(online_runner_manager)
+ end
+ end
+
+ describe '.offline' do
+ subject(:runner_managers) { described_class.offline }
+
+ it 'returns offline runner managers' do
+ expect(runner_managers).to contain_exactly(offline_runner_manager)
+ end
end
- it 'returns stale runner managers' do
- is_expected.to match_array([runner_manager1.id, runner_manager2.id])
+ describe '.never_contacted' do
+ subject(:runner_managers) { described_class.never_contacted }
+
+ it 'returns never contacted runner managers' do
+ expect(runner_managers).to contain_exactly(never_contacted_runner_manager)
+ end
end
+
+ describe '.stale', :freeze_time do
+ subject { described_class.stale }
+
+ let!(:stale_runner_manager1) do
+ create(
+ :ci_runner_machine,
+ runner: runner,
+ created_at: described_class.stale_deadline - 1.second,
+ contacted_at: nil
+ )
+ end
+
+ let!(:stale_runner_manager2) do
+ create(
+ :ci_runner_machine,
+ runner: runner,
+ created_at: 8.days.ago,
+ contacted_at: described_class.stale_deadline - 1.second
+ )
+ end
+
+ it 'returns stale runner managers' do
+ is_expected.to contain_exactly(stale_runner_manager1, stale_runner_manager2)
+ end
+ end
+
+ include_examples 'runner with status scope'
+ end
+
+ describe '.available_statuses' do
+ subject { described_class.available_statuses }
+
+ it { is_expected.to eq(%w[online offline never_contacted stale]) }
end
describe '.online_contact_time_deadline', :freeze_time do
@@ -72,23 +122,60 @@ RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :mo
describe '.for_runner' do
subject(:runner_managers) { described_class.for_runner(runner_arg) }
- let_it_be(:runner1) { create(:ci_runner) }
- let_it_be(:runner_manager11) { create(:ci_runner_machine, runner: runner1) }
- let_it_be(:runner_manager12) { create(:ci_runner_machine, runner: runner1) }
+ let_it_be(:runner_a) { create(:ci_runner) }
+ let_it_be(:runner_manager_a1) { create(:ci_runner_machine, runner: runner_a) }
+ let_it_be(:runner_manager_a2) { create(:ci_runner_machine, runner: runner_a) }
context 'with single runner' do
- let(:runner_arg) { runner1 }
+ let(:runner_arg) { runner_a }
- it { is_expected.to contain_exactly(runner_manager11, runner_manager12) }
+ it { is_expected.to contain_exactly(runner_manager_a1, runner_manager_a2) }
end
context 'with multiple runners' do
- let(:runner_arg) { [runner1, runner2] }
+ let(:runner_arg) { [runner_a, runner_b] }
- let_it_be(:runner2) { create(:ci_runner) }
- let_it_be(:runner_manager2) { create(:ci_runner_machine, runner: runner2) }
+ let_it_be(:runner_b) { create(:ci_runner) }
+ let_it_be(:runner_manager_b1) { create(:ci_runner_machine, runner: runner_b) }
- it { is_expected.to contain_exactly(runner_manager11, runner_manager12, runner_manager2) }
+ it { is_expected.to contain_exactly(runner_manager_a1, runner_manager_a2, runner_manager_b1) }
+ end
+ end
+
+ describe '.with_system_xid' do
+ subject(:runner_managers) { described_class.with_system_xid(system_xid) }
+
+ let_it_be(:runner_a) { create(:ci_runner) }
+ let_it_be(:runner_b) { create(:ci_runner) }
+ let_it_be(:runner_manager_a1) { create(:ci_runner_machine, runner: runner_a, system_xid: 'id1') }
+ let_it_be(:runner_manager_a2) { create(:ci_runner_machine, runner: runner_a, system_xid: 'id2') }
+ let_it_be(:runner_manager_b1) { create(:ci_runner_machine, runner: runner_b, system_xid: 'id1') }
+
+ context 'with single system id' do
+ let(:system_xid) { 'id2' }
+
+ it { is_expected.to contain_exactly(runner_manager_a2) }
+ end
+
+ context 'with multiple system ids' do
+ let(:system_xid) { %w[id1 id2] }
+
+ it { is_expected.to contain_exactly(runner_manager_a1, runner_manager_a2, runner_manager_b1) }
+ end
+
+ context 'when chained with another scope' do
+ subject(:runner_managers) { described_class.for_runner(runner).with_system_xid(system_xid) }
+
+ let(:runner) { runner_a }
+ let(:system_xid) { 'id1' }
+
+ it { is_expected.to contain_exactly(runner_manager_a1) }
+
+ context 'with another runner' do
+ let(:runner) { runner_b }
+
+ it { is_expected.to contain_exactly(runner_manager_b1) }
+ end
end
end
@@ -96,18 +183,18 @@ RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :mo
let!(:runner_version1) { create(:ci_runner_version, version: '16.0.0', status: :recommended) }
let!(:runner_version2) { create(:ci_runner_version, version: '16.0.1', status: :available) }
- let!(:runner1) { create(:ci_runner) }
- let!(:runner2) { create(:ci_runner) }
- let!(:runner_manager11) { create(:ci_runner_machine, runner: runner1, version: runner_version1.version) }
- let!(:runner_manager12) { create(:ci_runner_machine, runner: runner1, version: runner_version2.version) }
- let!(:runner_manager2) { create(:ci_runner_machine, runner: runner2, version: runner_version2.version) }
+ let!(:runner_a) { create(:ci_runner) }
+ let!(:runner_b) { create(:ci_runner) }
+ let!(:runner_manager_a1) { create(:ci_runner_machine, runner: runner_a, version: runner_version1.version) }
+ let!(:runner_manager_a2) { create(:ci_runner_machine, runner: runner_a, version: runner_version2.version) }
+ let!(:runner_manager_b1) { create(:ci_runner_machine, runner: runner_b, version: runner_version2.version) }
subject { described_class.aggregate_upgrade_status_by_runner_id }
it 'contains aggregate runner upgrade status by runner ID' do
is_expected.to eq({
- runner1.id => :recommended,
- runner2.id => :available
+ runner_a.id => :recommended,
+ runner_b.id => :available
})
end
end
@@ -139,6 +226,108 @@ RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :mo
it { is_expected.to eq([runner_manager2, runner_manager1]) }
end
+ describe '.with_upgrade_status' do
+ subject(:scope) { described_class.with_upgrade_status(upgrade_status) }
+
+ let_it_be(:runner_manager_14_0_0) { create(:ci_runner_machine, version: '14.0.0') }
+ let_it_be(:runner_manager_14_1_0) { create(:ci_runner_machine, version: '14.1.0') }
+ let_it_be(:runner_manager_14_1_1) { create(:ci_runner_machine, version: '14.1.1') }
+
+ before_all do
+ create(:ci_runner_version, version: '14.0.0', status: :available)
+ create(:ci_runner_version, version: '14.1.0', status: :recommended)
+ create(:ci_runner_version, version: '14.1.1', status: :unavailable)
+ end
+
+ context 'as :unavailable' do
+ let(:upgrade_status) { :unavailable }
+
+ it 'returns runners with runner managers whose version is assigned :unavailable' do
+ is_expected.to contain_exactly(runner_manager_14_1_1)
+ end
+ end
+
+ context 'as :available' do
+ let(:upgrade_status) { :available }
+
+ it 'returns runners with runner managers whose version is assigned :available' do
+ is_expected.to contain_exactly(runner_manager_14_0_0)
+ end
+ end
+
+ context 'as :recommended' do
+ let(:upgrade_status) { :recommended }
+
+ it 'returns runners with runner managers whose version is assigned :recommended' do
+ is_expected.to contain_exactly(runner_manager_14_1_0)
+ end
+ end
+ end
+
+ describe '.with_version_prefix' do
+ subject { described_class.with_version_prefix(version_prefix) }
+
+ let_it_be(:runner_manager1) { create(:ci_runner_machine, version: '15.11.0') }
+ let_it_be(:runner_manager2) { create(:ci_runner_machine, version: '15.9.0') }
+ let_it_be(:runner_manager3) { create(:ci_runner_machine, version: '15.11.5') }
+
+ context 'with a prefix string of "15."' do
+ let(:version_prefix) { "15." }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager1, runner_manager2, runner_manager3)
+ end
+ end
+
+ context 'with a prefix string of "15"' do
+ let(:version_prefix) { "15" }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager1, runner_manager2, runner_manager3)
+ end
+ end
+
+ context 'with a prefix string of "15.11."' do
+ let(:version_prefix) { "15.11." }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager1, runner_manager3)
+ end
+ end
+
+ context 'with a prefix string of "15.11"' do
+ let(:version_prefix) { "15.11" }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager1, runner_manager3)
+ end
+ end
+
+ context 'with a prefix string of "15.9"' do
+ let(:version_prefix) { "15.9" }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager2)
+ end
+ end
+
+ context 'with a prefix string of "15.11.5"' do
+ let(:version_prefix) { "15.11.5" }
+
+ it 'returns runner managers' do
+ is_expected.to contain_exactly(runner_manager3)
+ end
+ end
+
+ context 'with a malformed prefix of "V2"' do
+ let(:version_prefix) { "V2" }
+
+ it 'returns no runner managers' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '#status', :freeze_time do
let(:runner_manager) { build(:ci_runner_machine, created_at: 8.days.ago) }
@@ -375,106 +564,4 @@ RSpec.describe Ci::RunnerManager, feature_category: :fleet_visibility, type: :mo
it { is_expected.to contain_exactly build }
end
end
-
- describe '.with_upgrade_status' do
- subject(:scope) { described_class.with_upgrade_status(upgrade_status) }
-
- let_it_be(:runner_manager_14_0_0) { create(:ci_runner_machine, version: '14.0.0') }
- let_it_be(:runner_manager_14_1_0) { create(:ci_runner_machine, version: '14.1.0') }
- let_it_be(:runner_manager_14_1_1) { create(:ci_runner_machine, version: '14.1.1') }
-
- before_all do
- create(:ci_runner_version, version: '14.0.0', status: :available)
- create(:ci_runner_version, version: '14.1.0', status: :recommended)
- create(:ci_runner_version, version: '14.1.1', status: :unavailable)
- end
-
- context 'as :unavailable' do
- let(:upgrade_status) { :unavailable }
-
- it 'returns runners with runner managers whose version is assigned :unavailable' do
- is_expected.to contain_exactly(runner_manager_14_1_1)
- end
- end
-
- context 'as :available' do
- let(:upgrade_status) { :available }
-
- it 'returns runners with runner managers whose version is assigned :available' do
- is_expected.to contain_exactly(runner_manager_14_0_0)
- end
- end
-
- context 'as :recommended' do
- let(:upgrade_status) { :recommended }
-
- it 'returns runners with runner managers whose version is assigned :recommended' do
- is_expected.to contain_exactly(runner_manager_14_1_0)
- end
- end
- end
-
- describe '.with_version_prefix' do
- subject { described_class.with_version_prefix(version_prefix) }
-
- let_it_be(:runner_manager1) { create(:ci_runner_machine, version: '15.11.0') }
- let_it_be(:runner_manager2) { create(:ci_runner_machine, version: '15.9.0') }
- let_it_be(:runner_manager3) { create(:ci_runner_machine, version: '15.11.5') }
-
- context 'with a prefix string of "15."' do
- let(:version_prefix) { "15." }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager1, runner_manager2, runner_manager3)
- end
- end
-
- context 'with a prefix string of "15"' do
- let(:version_prefix) { "15" }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager1, runner_manager2, runner_manager3)
- end
- end
-
- context 'with a prefix string of "15.11."' do
- let(:version_prefix) { "15.11." }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager1, runner_manager3)
- end
- end
-
- context 'with a prefix string of "15.11"' do
- let(:version_prefix) { "15.11" }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager1, runner_manager3)
- end
- end
-
- context 'with a prefix string of "15.9"' do
- let(:version_prefix) { "15.9" }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager2)
- end
- end
-
- context 'with a prefix string of "15.11.5"' do
- let(:version_prefix) { "15.11.5" }
-
- it 'returns runner managers' do
- is_expected.to contain_exactly(runner_manager3)
- end
- end
-
- context 'with a malformed prefix of "V2"' do
- let(:version_prefix) { "V2" }
-
- it 'returns no runner managers' do
- is_expected.to be_empty
- end
- end
- end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index bb9ac084ed6..d4f7db3bddd 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -532,7 +532,7 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
let_it_be(:runner3) { create(:ci_runner, creator_id: 1) }
let_it_be(:runner4) { create(:ci_runner, creator_id: nil) }
- it 'returns runners with creator_id \'1\'' do
+ it "returns runners with creator_id '1'" do
is_expected.to contain_exactly(runner2, runner3)
end
end
@@ -557,19 +557,6 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '.stale', :freeze_time do
- subject { described_class.stale }
-
- let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 1.second) }
- let!(:runner2) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago) }
- let!(:runner3) { create(:ci_runner, :instance, created_at: 3.months.ago, contacted_at: nil) }
- let!(:runner4) { create(:ci_runner, :instance, created_at: 2.months.ago, contacted_at: nil) }
-
- it 'returns stale runners' do
- is_expected.to match_array([runner2, runner3])
- end
- end
-
describe '#stale?', :clean_gitlab_redis_cache, :freeze_time do
let(:runner) { build(:ci_runner, :instance) }
@@ -632,15 +619,6 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '.online', :freeze_time do
- subject { described_class.online }
-
- let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
- let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
-
- it { is_expected.to match_array([runner2]) }
- end
-
describe '#online?', :clean_gitlab_redis_cache, :freeze_time do
let(:runner) { build(:ci_runner, :instance) }
@@ -715,15 +693,6 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
- describe '.offline' do
- subject { described_class.offline }
-
- let!(:runner1) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
- let!(:runner2) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
-
- it { is_expected.to eq([runner1]) }
- end
-
describe '.with_running_builds' do
subject { described_class.with_running_builds }
@@ -1229,6 +1198,46 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
+ describe '#clear_heartbeat', :freeze_time do
+ let!(:runner) { create(:ci_runner, :project, version: '15.0.0') }
+ let(:heartbeat_values) do
+ {
+ version: '15.0.1',
+ platform: 'darwin',
+ architecture: '18-bit',
+ ip_address: '1.1.1.1',
+ executor: 'shell',
+ revision: 'sha',
+ config: { 'gpus' => 'all' }
+ }
+ end
+
+ let(:expected_attributes) { heartbeat_values.except(:executor).merge(executor_type: 'shell') }
+ let(:expected_cleared_attributes) { expected_attributes.to_h { |key, _| [key, nil] }.merge(config: {}) }
+
+ it 'clears contacted at and other attributes' do
+ expect do
+ runner.heartbeat(heartbeat_values)
+ end.to change { runner.reload.contacted_at }.from(nil).to(Time.current)
+ .and change { runner.reload.uncached_contacted_at }.from(nil).to(Time.current)
+
+ expected_attributes.each do |key, value|
+ expect(runner.public_send(key)).to eq(value)
+ expect(runner.read_attribute(key)).to eq(value)
+ end
+
+ expect do
+ runner.clear_heartbeat
+ end.to change { runner.reload.contacted_at }.from(Time.current).to(nil)
+ .and change { runner.reload.uncached_contacted_at }.from(Time.current).to(nil)
+
+ expected_cleared_attributes.each do |key, value|
+ expect(runner.public_send(key)).to eq(value)
+ expect(runner.read_attribute(key)).to eq(value)
+ end
+ end
+ end
+
describe '#destroy' do
let(:runner) { create(:ci_runner) }
@@ -2126,4 +2135,102 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
end
+
+ describe 'status scopes' do
+ let_it_be(:online_runner) { create(:ci_runner, :instance, contacted_at: 1.second.ago) }
+ let_it_be(:offline_runner) { create(:ci_runner, :instance, contacted_at: 2.hours.ago) }
+ let_it_be(:never_contacted_runner) { create(:ci_runner, :instance, contacted_at: nil) }
+
+ describe '.online' do
+ subject(:runners) { described_class.online }
+
+ it 'returns online runners' do
+ expect(runners).to contain_exactly(online_runner)
+ end
+ end
+
+ describe '.offline' do
+ subject(:runners) { described_class.offline }
+
+ it 'returns offline runners' do
+ expect(runners).to contain_exactly(offline_runner)
+ end
+ end
+
+ describe '.never_contacted' do
+ subject(:runners) { described_class.never_contacted }
+
+ it 'returns never contacted runners' do
+ expect(runners).to contain_exactly(never_contacted_runner)
+ end
+ end
+
+ describe '.stale', :freeze_time do
+ subject { described_class.stale }
+
+ let!(:stale_runner1) do
+ create(:ci_runner, :instance, created_at: described_class.stale_deadline - 1.second, contacted_at: nil)
+ end
+
+ let!(:stale_runner2) do
+ create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: described_class.stale_deadline - 1.second)
+ end
+
+ it 'returns stale runners' do
+ is_expected.to contain_exactly(stale_runner1, stale_runner2)
+ end
+ end
+
+ include_examples 'runner with status scope'
+ end
+
+ describe '.available_statuses' do
+ subject { described_class.available_statuses }
+
+ it { is_expected.to eq(%w[active paused online offline never_contacted stale]) }
+ end
+
+ describe '.online_contact_time_deadline', :freeze_time do
+ subject { described_class.online_contact_time_deadline }
+
+ it { is_expected.to eq(2.hours.ago) }
+ end
+
+ describe '.stale_deadline', :freeze_time do
+ subject { described_class.stale_deadline }
+
+ it { is_expected.to eq(3.months.ago) }
+ end
+
+ describe '.with_runner_type' do
+ subject { described_class.with_runner_type(runner_type) }
+
+ let_it_be(:instance_runner) { create(:ci_runner, :instance) }
+ let_it_be(:group_runner) { create(:ci_runner, :group) }
+ let_it_be(:project_runner) { create(:ci_runner, :project) }
+
+ context 'with instance_type' do
+ let(:runner_type) { 'instance_type' }
+
+ it { is_expected.to contain_exactly(instance_runner) }
+ end
+
+ context 'with group_type' do
+ let(:runner_type) { 'group_type' }
+
+ it { is_expected.to contain_exactly(group_runner) }
+ end
+
+ context 'with project_type' do
+ let(:runner_type) { 'project_type' }
+
+ it { is_expected.to contain_exactly(project_runner) }
+ end
+
+ context 'with invalid runner type' do
+ let(:runner_type) { 'invalid runner type' }
+
+ it { is_expected.to contain_exactly(instance_runner, group_runner, project_runner) }
+ end
+ end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 5fc5bbd41ff..a95f56ea714 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -576,9 +576,9 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
it 'avoids N+1 queries' do
another_project = create(:project)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
described_class.ancestor_clusters_for_clusterable(another_project, hierarchy_order: hierarchy_order)
- end.count
+ end
cluster2 = create(:cluster, :provided_by_gcp, :group)
child2 = cluster2.group
@@ -587,7 +587,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
expect do
described_class.ancestor_clusters_for_clusterable(project, hierarchy_order: hierarchy_order)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
context 'for a group' do
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index be80aced3fd..5db417f8032 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -210,7 +210,7 @@ RSpec.describe CommitCollection, feature_category: :source_code_management do
it 'returns the original commit if the commit could not be lazy loaded' do
collection = described_class.new(project, [hash_commit])
- unexisting_lazy_commit = Commit.lazy(project, Gitlab::Git::BLANK_SHA)
+ unexisting_lazy_commit = Commit.lazy(project, Gitlab::Git::SHA1_BLANK_SHA)
expect(Commit).to receive(:lazy).with(project, hash_commit.id).and_return(unexisting_lazy_commit)
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 618dd3a3f77..7c4917596a0 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -583,7 +583,7 @@ RSpec.describe CommitStatus, feature_category: :continuous_integration do
end
it 'returns blank sha' do
- is_expected.to eq(Gitlab::Git::BLANK_SHA)
+ is_expected.to eq(Gitlab::Git::SHA1_BLANK_SHA)
end
end
diff --git a/spec/models/concerns/commit_signature_spec.rb b/spec/models/concerns/commit_signature_spec.rb
index 4bba5a6ee41..9a4ac165ac1 100644
--- a/spec/models/concerns/commit_signature_spec.rb
+++ b/spec/models/concerns/commit_signature_spec.rb
@@ -2,15 +2,15 @@
require 'spec_helper'
-RSpec.describe CommitSignature do
+RSpec.describe CommitSignature, feature_category: :source_code_management do
+ subject(:implementation) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'ssh_signatures'
+ end.include(described_class).new
+ end
+
describe '#signed_by_user' do
context 'when class does not define the signed_by_user method' do
- subject(:implementation) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'ssh_signatures'
- end.include(described_class).new
- end
-
it 'raises a NoMethodError with custom message' do
expect do
implementation.signed_by_user
@@ -18,4 +18,12 @@ RSpec.describe CommitSignature do
end
end
end
+
+ describe 'enums' do
+ it 'defines enums for verification statuses' do
+ is_expected.to define_enum_for(:verification_status).with_values(
+ ::Enums::CommitSignature.verification_statuses
+ )
+ end
+ end
end
diff --git a/spec/models/concerns/database_event_tracking_spec.rb b/spec/models/concerns/database_event_tracking_spec.rb
deleted file mode 100644
index a99b4737537..00000000000
--- a/spec/models/concerns/database_event_tracking_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe DatabaseEventTracking, :snowplow, feature_category: :service_ping do
- before do
- allow(Gitlab::Tracking).to receive(:database_event).and_call_original
- end
-
- let(:test_class) do
- Class.new(ActiveRecord::Base) do
- include DatabaseEventTracking
-
- self.table_name = 'application_setting_terms'
-
- self::SNOWPLOW_ATTRIBUTES = %w[id].freeze # rubocop:disable RSpec/LeakyConstantDeclaration
- end
- end
-
- subject(:create_test_class_record) { test_class.create!(id: 1, terms: "") }
-
- context 'if event emmiter failed' do
- before do
- allow(Gitlab::Tracking).to receive(:database_event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking
- end
-
- it 'tracks the exception' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- create_test_class_record
- end
- end
-
- describe 'event tracking' do
- let(:category) { test_class.to_s }
- let(:event) { 'database_event' }
-
- it 'when created' do
- create_test_class_record
-
- expect_snowplow_event(
- tracking_method: :database_event,
- category: category,
- action: "#{event}_create",
- label: 'application_setting_terms',
- property: 'create',
- namespace: nil,
- project: nil,
- "id" => 1
- )
- end
-
- it 'when updated' do
- create_test_class_record
- test_class.first.update!(id: 3)
-
- expect_snowplow_event(
- tracking_method: :database_event,
- category: category,
- action: "#{event}_update",
- label: 'application_setting_terms',
- property: 'update',
- namespace: nil,
- project: nil,
- "id" => 3
- )
- end
-
- it 'when destroyed' do
- create_test_class_record
- test_class.first.destroy!
-
- expect_snowplow_event(
- tracking_method: :database_event,
- category: category,
- action: "#{event}_destroy",
- label: 'application_setting_terms',
- property: 'destroy',
- namespace: nil,
- project: nil,
- "id" => 1
- )
- end
- end
-end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index e71392f7bbc..a9149b0eebe 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -82,15 +82,15 @@ RSpec.shared_examples 'routable resource' do
end
end
- context 'on the usage of `use_includes` parameter' do
+ context 'on the usage of `preload_routes` parameter' do
let_it_be(:klass) { record.class.to_s.downcase }
let_it_be(:record_3) { create(:"#{klass}") }
let_it_be(:record_4) { create(:"#{klass}") }
- context 'when use_includes: true' do
+ context 'when preload_routes: true' do
it 'includes route information when loading records' do
- control_count = ActiveRecord::QueryRecorder.new do
- described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: true)
+ control = ActiveRecord::QueryRecorder.new do
+ described_class.where_full_path_in([record.full_path, record_2.full_path], preload_routes: true)
.map(&:route)
end
@@ -101,16 +101,16 @@ RSpec.shared_examples 'routable resource' do
record_2.full_path,
record_3.full_path,
record_4.full_path
- ], use_includes: true)
+ ], preload_routes: true)
.map(&:route)
- end.to issue_same_number_of_queries_as(control_count)
+ end.to issue_same_number_of_queries_as(control)
end
end
- context 'when use_includes: false' do
+ context 'when preload_routes: false' do
it 'does not include route information when loading records' do
control_count = ActiveRecord::QueryRecorder.new do
- described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: false)
+ described_class.where_full_path_in([record.full_path, record_2.full_path], preload_routes: false)
.map(&:route)
end
@@ -121,7 +121,7 @@ RSpec.shared_examples 'routable resource' do
record_2.full_path,
record_3.full_path,
record_4.full_path
- ], use_includes: false)
+ ], preload_routes: false)
.map(&:route)
end.not_to issue_same_number_of_queries_as(control_count)
end
@@ -130,14 +130,6 @@ RSpec.shared_examples 'routable resource' do
end
it_behaves_like '.where_full_path_in', :aggregate_failures
-
- context 'when the `optimize_where_full_path_in` feature flag is turned OFF' do
- before do
- stub_feature_flags(optimize_where_full_path_in: false)
- end
-
- it_behaves_like '.where_full_path_in', :aggregate_failures
- end
end
RSpec.shared_examples 'routable resource with parent' do
diff --git a/spec/models/container_registry/protection/rule_spec.rb b/spec/models/container_registry/protection/rule_spec.rb
index 1706fcf76ae..848b844ed64 100644
--- a/spec/models/container_registry/protection/rule_spec.rb
+++ b/spec/models/container_registry/protection/rule_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_category: :container_registry do
+ using RSpec::Parameterized::TableSyntax
+
it_behaves_like 'having unique enum values'
describe 'relationships' do
@@ -51,4 +53,192 @@ RSpec.describe ContainerRegistry::Protection::Rule, type: :model, feature_catego
it { is_expected.to validate_presence_of(:push_protected_up_to_access_level) }
end
end
+
+ describe '.for_repository_path' do
+ let_it_be(:container_registry_protection_rule) do
+ create(:container_registry_protection_rule, repository_path_pattern: 'my-scope/my_container')
+ end
+
+ let_it_be(:protection_rule_with_wildcard_start) do
+ create(:container_registry_protection_rule, repository_path_pattern: '*my-scope/my_container-with-wildcard-start')
+ end
+
+ let_it_be(:protection_rule_with_wildcard_end) do
+ create(:container_registry_protection_rule, repository_path_pattern: 'my-scope/my_container-with-wildcard-end*')
+ end
+
+ let_it_be(:protection_rule_with_wildcard_middle) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: 'my-scope/*my_container-with-wildcard-middle')
+ end
+
+ let_it_be(:protection_rule_with_wildcard_double) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: '**my-scope/**my_container-with-wildcard-double**')
+ end
+
+ let_it_be(:protection_rule_with_underscore) do
+ create(:container_registry_protection_rule, repository_path_pattern: 'my-scope/my_container-with_____underscore')
+ end
+
+ let_it_be(:protection_rule_with_regex_chars) do
+ create(:container_registry_protection_rule, repository_path_pattern: 'my-scope/my_container-with-regex-chars.+')
+ end
+
+ let(:repository_path) { container_registry_protection_rule.repository_path_pattern }
+
+ subject { described_class.for_repository_path(repository_path) }
+
+ context 'with several container registry protection rule scenarios' do
+ where(:repository_path, :expected_container_registry_protection_rules) do
+ 'my-scope/my_container' | [ref(:container_registry_protection_rule)]
+ 'my-scope/my2container' | []
+ 'my-scope/my_container-2' | []
+
+ # With wildcard pattern at the start
+ 'my-scope/my_container-with-wildcard-start' | [ref(:protection_rule_with_wildcard_start)]
+ 'my-scope/my_container-with-wildcard-start-any' | []
+ 'prefix-my-scope/my_container-with-wildcard-start' | [ref(:protection_rule_with_wildcard_start)]
+ 'prefix-my-scope/my_container-with-wildcard-start-any' | []
+
+ # With wildcard pattern at the end
+ 'my-scope/my_container-with-wildcard-end' | [ref(:protection_rule_with_wildcard_end)]
+ 'my-scope/my_container-with-wildcard-end:1234567890' | [ref(:protection_rule_with_wildcard_end)]
+ 'prefix-my-scope/my_container-with-wildcard-end' | []
+ 'prefix-my-scope/my_container-with-wildcard-end:1234567890' | []
+
+ # With wildcard pattern in the middle
+ 'my-scope/my_container-with-wildcard-middle' | [ref(:protection_rule_with_wildcard_middle)]
+ 'my-scope/any-my_container-with-wildcard-middle' | [ref(:protection_rule_with_wildcard_middle)]
+ 'my-scope/any-my_container-my_container-wildcard-middle-any' | []
+
+ # With double wildcard pattern
+ 'my-scope/my_container-with-wildcard-double' | [ref(:protection_rule_with_wildcard_double)]
+ 'prefix-my-scope/any-my_container-with-wildcard-double-any' | [ref(:protection_rule_with_wildcard_double)]
+ '****my-scope/****my_container-with-wildcard-double****' | [ref(:protection_rule_with_wildcard_double)]
+ 'prefix-@other-scope/any-my_container-with-wildcard-double-any' | []
+
+ # With underscore
+ 'my-scope/my_container-with_____underscore' | [ref(:protection_rule_with_underscore)]
+ 'my-scope/my_container-with_any_underscore' | []
+
+ 'my-scope/my_container-with-regex-chars.+' | [ref(:protection_rule_with_regex_chars)]
+ 'my-scope/my_container-with-regex-chars.' | []
+ 'my-scope/my_container-with-regex-chars' | []
+ 'my-scope/my_container-with-regex-chars-any' | []
+
+ # Special cases
+ nil | []
+ '' | []
+ 'any_container' | []
+ end
+
+ with_them do
+ it { is_expected.to match_array(expected_container_registry_protection_rules) }
+ end
+ end
+
+ context 'with multiple matching container registry protection rules' do
+ let!(:container_registry_protection_rule_second_match) do
+ create(:container_registry_protection_rule, repository_path_pattern: "#{repository_path}*")
+ end
+
+ it {
+ is_expected.to contain_exactly(container_registry_protection_rule_second_match,
+ container_registry_protection_rule)
+ }
+ end
+ end
+
+ describe '.for_push_exists?' do
+ subject do
+ project
+ .container_registry_protection_rules
+ .for_push_exists?(
+ access_level: access_level,
+ repository_path: repository_path
+ )
+ end
+
+ context 'when the repository path matches multiple protection rules' do
+ # The abbreviation `crpr` stands for container registry protection rule
+ let_it_be(:project_with_crpr) { create(:project) }
+ let_it_be(:project_without_crpr) { create(:project) }
+
+ let_it_be(:protection_rule_for_developer) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: 'my-scope/my-container-stage*',
+ project: project_with_crpr,
+ push_protected_up_to_access_level: :developer
+ )
+ end
+
+ let_it_be(:protection_rule_for_maintainer) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: 'my-scope/my-container-prod*',
+ project: project_with_crpr,
+ push_protected_up_to_access_level: :maintainer
+ )
+ end
+
+ let_it_be(:protection_rule_for_owner) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: 'my-scope/my-container-release*',
+ project: project_with_crpr,
+ push_protected_up_to_access_level: :owner
+ )
+ end
+
+ let_it_be(:protection_rule_overlapping_for_developer) do
+ create(:container_registry_protection_rule,
+ repository_path_pattern: 'my-scope/my-container-*',
+ project: project_with_crpr,
+ push_protected_up_to_access_level: :developer
+ )
+ end
+
+ where(:project, :access_level, :repository_path, :for_push_exists) do
+ ref(:project_with_crpr) | Gitlab::Access::REPORTER | 'my-scope/my-container-stage-sha-1234' | true
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/my-container-stage-sha-1234' | true
+ ref(:project_with_crpr) | Gitlab::Access::MAINTAINER | 'my-scope/my-container-stage-sha-1234' | false
+ ref(:project_with_crpr) | Gitlab::Access::MAINTAINER | 'my-scope/my-container-stage-sha-1234' | false
+ ref(:project_with_crpr) | Gitlab::Access::OWNER | 'my-scope/my-container-stage-sha-1234' | false
+ ref(:project_with_crpr) | Gitlab::Access::ADMIN | 'my-scope/my-container-stage-sha-1234' | false
+
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/my-container-prod-sha-1234' | true
+ ref(:project_with_crpr) | Gitlab::Access::MAINTAINER | 'my-scope/my-container-prod-sha-1234' | true
+ ref(:project_with_crpr) | Gitlab::Access::OWNER | 'my-scope/my-container-prod-sha-1234' | false
+ ref(:project_with_crpr) | Gitlab::Access::ADMIN | 'my-scope/my-container-prod-sha-1234' | false
+
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/my-container-release-v1' | true
+ ref(:project_with_crpr) | Gitlab::Access::OWNER | 'my-scope/my-container-release-v1' | true
+ ref(:project_with_crpr) | Gitlab::Access::ADMIN | 'my-scope/my-container-release-v1' | false
+
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/my-container-any-suffix' | true
+ ref(:project_with_crpr) | Gitlab::Access::MAINTAINER | 'my-scope/my-container-any-suffix' | false
+ ref(:project_with_crpr) | Gitlab::Access::OWNER | 'my-scope/my-container-any-suffix' | false
+
+ # For non-matching repository_path
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/non-matching-container' | false
+
+ # For no access level
+ ref(:project_with_crpr) | Gitlab::Access::NO_ACCESS | 'my-scope/my-container-prod-sha-1234' | true
+
+ # Edge cases
+ ref(:project_with_crpr) | 0 | '' | false
+ ref(:project_with_crpr) | nil | nil | false
+ ref(:project_with_crpr) | Gitlab::Access::DEVELOPER | nil | false
+ ref(:project_with_crpr) | nil | 'my-scope/non-matching-container' | false
+
+ # For projects that have no container registry protection rules
+ ref(:project_without_crpr) | Gitlab::Access::DEVELOPER | 'my-scope/my-container-prod-sha-1234' | false
+ ref(:project_without_crpr) | Gitlab::Access::MAINTAINER | 'my-scope/my-container-prod-sha-1234' | false
+ ref(:project_without_crpr) | Gitlab::Access::OWNER | 'my-scope/my-container-prod-sha-1234' | false
+ end
+
+ with_them do
+ it { is_expected.to eq for_push_exists }
+ end
+ end
+ end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index fb32c796016..084e2dd7bd5 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -693,12 +693,16 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
it 'calls GitlabApiClient#tags and passes parameters' do
allow(repository.gitlab_api_client).to receive(:tags).and_return({})
expect(repository.gitlab_api_client).to receive(:tags).with(
- repository.path, page_size: page_size, before: before, last: last, sort: sort, name: name)
+ repository.path, page_size: page_size, before: before, last: last, sort: sort, name: name, referrers: nil)
subject
end
context 'with a call to tags' do
+ let_it_be(:created_at) { 15.minutes.ago }
+ let_it_be(:updated_at) { 10.minutes.ago }
+ let_it_be(:published_at) { 5.minutes.ago }
+
let_it_be(:tags_response) do
[
{
@@ -707,8 +711,15 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
config_digest: 'sha256:66b1132a0173910b01ee69583bbf2f7f1e4462c99efbe1b9ab5bf',
media_type: 'application/vnd.oci.image.manifest.v1+json',
size_bytes: 1234567890,
- created_at: 5.minutes.ago,
- updated_at: 5.minutes.ago
+ created_at: created_at,
+ updated_at: updated_at,
+ published_at: published_at,
+ referrers: [
+ {
+ artifactType: 'application/vnd.example+type',
+ digest: 'sha256:57d3be92c2f857566ecc7f9306a80021c0a7fa631e0ef5146957235aea859961'
+ }
+ ]
},
{
name: 'latest',
@@ -716,8 +727,9 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
config_digest: nil,
media_type: 'application/vnd.oci.image.manifest.v1+json',
size_bytes: 1234567892,
- created_at: 10.minutes.ago,
- updated_at: 10.minutes.ago
+ created_at: created_at,
+ updated_at: updated_at,
+ published_at: published_at
}
]
end
@@ -753,8 +765,17 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
revision: expected_revision,
short_revision: expected_revision[0..8],
created_at: DateTime.rfc3339(tags_response[index][:created_at].rfc3339),
- updated_at: DateTime.rfc3339(tags_response[index][:updated_at].rfc3339)
+ updated_at: DateTime.rfc3339(tags_response[index][:updated_at].rfc3339),
+ published_at: DateTime.rfc3339(tags_response[index][:published_at].rfc3339)
)
+
+ Array(tag.referrers).each_with_index do |ref, ref_index|
+ expect(ref.is_a?(ContainerRegistry::Referrer)).to eq(true)
+ expect(ref).to have_attributes(
+ artifact_type: tags_response[index][:referrers][ref_index][:artifactType],
+ digest: tags_response[index][:referrers][ref_index][:digest]
+ )
+ end
end
end
end
@@ -1148,9 +1169,9 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
end
end
- describe '.find_or_create_from_path' do
+ describe '.find_or_create_from_path!' do
let(:repository) do
- described_class.find_or_create_from_path(ContainerRegistry::Path.new(path))
+ described_class.find_or_create_from_path!(ContainerRegistry::Path.new(path))
end
let(:repository_path) { ContainerRegistry::Path.new(path) }
@@ -1239,7 +1260,7 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
Thread.new do
true while wait_for_it
- described_class.find_or_create_from_path(path)
+ described_class.find_or_create_from_path!(path)
end
end
wait_for_it = false
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index cb2c38c15e0..d260e75871d 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -56,6 +56,8 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
let(:scope_attrs) { { project: project } }
let(:usage) { :deployments }
end
+
+ it { is_expected.to include_module(EachBatch) }
end
describe '.stoppable' do
@@ -878,7 +880,7 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
context 'when the SHA for the deployment does not exist in the repo' do
it 'returns false' do
- deployment.update!(sha: Gitlab::Git::BLANK_SHA)
+ deployment.update!(sha: Gitlab::Git::SHA1_BLANK_SHA)
commit = project.commit
expect(deployment.includes_commit?(commit.id)).to be false
@@ -1537,6 +1539,18 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
expect(project.commit(deployment.ref_path)).not_to be_nil
end
end
+
+ it 'does not trigger N+1 queries' do
+ project = create(:project, :repository)
+ environment = create(:environment, project: project)
+ create(:deployment, environment: environment, project: project)
+
+ control = ActiveRecord::QueryRecorder.new { project.deployments.fast_destroy_all }
+
+ create_list(:deployment, 2, environment: environment, project: project)
+
+ expect { project.deployments.fast_destroy_all }.not_to exceed_query_limit(control)
+ end
end
describe '#update_merge_request_metrics!' do
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 98e5399f737..882aaffa818 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -447,7 +447,7 @@ RSpec.describe DesignManagement::Design, feature_category: :design_management do
let(:versions_count) { 1 }
it 'builds diff refs based on the empty tree if there was only one version' do
- expect(design.diff_refs.base_sha).to eq(Gitlab::Git::BLANK_SHA)
+ expect(design.diff_refs.base_sha).to eq(Gitlab::Git::SHA1_BLANK_SHA)
expect(design.diff_refs.head_sha).to eq(design.diff_refs.head_sha)
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 1fafa64a535..26a9a364ea6 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -1603,6 +1603,27 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
+ describe '#owned_by?' do
+ let!(:invited_group_member) { create(:group_member, :owner, :invited, group: group) }
+
+ before do
+ @members = setup_group_members(group)
+ end
+
+ it 'returns true for owner' do
+ expect(group.owned_by?(@members[:owner])).to eq(true)
+ end
+
+ it 'returns false for developer' do
+ expect(group.owned_by?(@members[:developer])).to eq(false)
+ end
+
+ it 'returns false when nil is passed' do
+ expect(invited_group_member.user).to eq(nil)
+ expect(group.owned_by?(invited_group_member.user)).to eq(false)
+ end
+ end
+
def setup_group_members(group)
members = {
owner: create(:user),
@@ -1642,6 +1663,54 @@ RSpec.describe Group, feature_category: :groups_and_projects do
it { expect(subject.parent).to be_kind_of(described_class) }
end
+ describe '#has_user?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:invited_group_member) { create(:group_member, :owner, :invited, group: group) }
+
+ subject { group.has_user?(user) }
+
+ context 'when the user is a member' do
+ before_all do
+ group.add_developer(user)
+ end
+
+ it { is_expected.to be_truthy }
+ it { expect(group.has_user?(user2)).to be_falsey }
+
+ it 'returns false for subgroup' do
+ expect(subgroup.has_user?(user)).to be_falsey
+ end
+ end
+
+ context 'when the user is a member with minimal access' do
+ before_all do
+ group.add_member(user, GroupMember::MINIMAL_ACCESS)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the user has requested membership' do
+ before_all do
+ create(:group_member, :developer, :access_request, user: user, source: group)
+ end
+
+ it 'returns false' do
+ expect(subject).to be_falsey
+ end
+ end
+
+ context 'when the user is an invited member' do
+ it 'returns false when nil is passed' do
+ expect(invited_group_member.user).to eq(nil)
+ expect(group.has_user?(invited_group_member.user)).to be_falsey
+ end
+ end
+ end
+
describe '#member?' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
@@ -2442,9 +2511,15 @@ RSpec.describe Group, feature_category: :groups_and_projects do
subject(:highest_group_member) { nested_group_2.highest_group_member(user) }
context 'when the user is not a member of any group in the hierarchy' do
- it 'returns nil' do
- expect(highest_group_member).to be_nil
+ it { is_expected.to be_nil }
+ end
+
+ context 'when access request to group is pending' do
+ before do
+ create(:group_member, requested_at: Time.current.utc, source: nested_group, user: user)
end
+
+ it { is_expected.to be_nil }
end
context 'when the user is only a member of one group in the hierarchy' do
@@ -3168,6 +3243,48 @@ RSpec.describe Group, feature_category: :groups_and_projects do
end
end
+ describe '.descendant_groups_counts' do
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:project) { create(:project, namespace: parent) }
+
+ subject(:descendant_groups_counts) { described_class.id_in(parent).descendant_groups_counts }
+
+ it 'return a hash of group id and descendant groups count without projects' do
+ expect(descendant_groups_counts).to eq({ parent.id => 1 })
+ end
+ end
+
+ describe '.projects_counts' do
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+ let_it_be(:project) { create(:project, namespace: parent) }
+ let_it_be(:archived_project) { create(:project, :archived, namespace: parent) }
+
+ subject(:projects_counts) { described_class.id_in(parent).projects_counts }
+
+ it 'return a hash of group id and projects count without counting archived projects' do
+ expect(projects_counts).to eq({ parent.id => 1 })
+ end
+ end
+
+ describe '.group_members_counts' do
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
+
+ before_all do
+ create(:group_member, group: parent)
+ create(:group_member, group: parent, requested_at: Time.current)
+ create(:group_member, group: group)
+ end
+
+ subject(:group_members_counts) { described_class.id_in(parent).group_members_counts }
+
+ it 'return a hash of group id and approved direct group members' do
+ expect(group_members_counts).to eq({ parent.id => 1 })
+ end
+ end
+
describe '#shared_with_group_links_visible_to_user' do
let_it_be(:admin) { create :admin }
let_it_be(:normal_user) { create :user }
diff --git a/spec/models/integrations/chat_message/push_message_spec.rb b/spec/models/integrations/chat_message/push_message_spec.rb
index a9d0f801406..ba619f655ec 100644
--- a/spec/models/integrations/chat_message/push_message_spec.rb
+++ b/spec/models/integrations/chat_message/push_message_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
let(:args) do
{
after: 'after',
- before: Gitlab::Git::BLANK_SHA,
+ before: Gitlab::Git::SHA1_BLANK_SHA,
project_name: 'project_name',
ref: 'refs/tags/new_tag',
user_name: 'test.user',
@@ -112,7 +112,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
context 'removed tag' do
let(:args) do
{
- after: Gitlab::Git::BLANK_SHA,
+ after: Gitlab::Git::SHA1_BLANK_SHA,
before: 'before',
project_name: 'project_name',
ref: 'refs/tags/new_tag',
@@ -152,7 +152,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
context 'new branch' do
before do
- args[:before] = Gitlab::Git::BLANK_SHA
+ args[:before] = Gitlab::Git::SHA1_BLANK_SHA
end
context 'without markdown' do
@@ -185,7 +185,7 @@ RSpec.describe Integrations::ChatMessage::PushMessage do
context 'removed branch' do
before do
- args[:after] = Gitlab::Git::BLANK_SHA
+ args[:after] = Gitlab::Git::SHA1_BLANK_SHA
end
context 'without markdown' do
diff --git a/spec/models/integrations/diffblue_cover_spec.rb b/spec/models/integrations/diffblue_cover_spec.rb
new file mode 100644
index 00000000000..c1a98cc2fbd
--- /dev/null
+++ b/spec/models/integrations/diffblue_cover_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::DiffblueCover, feature_category: :integrations do
+ let_it_be(:project) { build(:project) }
+
+ subject(:integration) { build(:diffblue_cover_integration, project: project) }
+
+ describe 'Validations' do
+ context 'when active' do
+ before do
+ integration.active = true
+ end
+
+ it { is_expected.to validate_presence_of(:diffblue_license_key) }
+ it { is_expected.to validate_presence_of(:diffblue_access_token_name) }
+ it { is_expected.to validate_presence_of(:diffblue_access_token_secret) }
+ end
+
+ context 'when inactive' do
+ before do
+ integration.active = false
+ end
+
+ it { is_expected.not_to validate_presence_of(:diffblue_license_key) }
+ it { is_expected.not_to validate_presence_of(:diffblue_access_token_name) }
+ it { is_expected.not_to validate_presence_of(:diffblue_access_token_secret) }
+ end
+ end
+
+ describe '#avatar_url' do
+ it 'returns the avatar image path' do
+ expect(integration.avatar_url).to eq(ActionController::Base.helpers.image_path(
+ 'illustrations/third-party-logos/integrations-logos/diffblue.svg'
+ ))
+ end
+ end
+
+ describe '#ci-vars' do
+ let(:ci_vars) do
+ [
+ { key: 'DIFFBLUE_LICENSE_KEY', value: '1234-ABCD-DCBA-4321', public: false, masked: true },
+ { key: 'DIFFBLUE_ACCESS_TOKEN_NAME', value: 'Diffblue CI', public: false, masked: true },
+ { key: 'DIFFBLUE_ACCESS_TOKEN',
+ value: 'glpat-00112233445566778899', public: false, masked: true } # gitleaks:allow
+ ]
+ end
+
+ context 'when active' do
+ before do
+ integration.active = true
+ end
+
+ it 'returns the required pipeline vars' do
+ expect(integration.ci_variables).to match_array(ci_vars)
+ end
+ end
+
+ context 'when inactive' do
+ before do
+ integration.active = false
+ end
+
+ it 'does not return the required pipeline vars' do
+ expect(integration.ci_variables).to be_empty
+ end
+ end
+ end
+
+ describe '#diffblue_link' do
+ it { expect(described_class.diffblue_link).to include("https://www.diffblue.com/try-cover/gitlab/") }
+ end
+end
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index 1537b10ba03..2294e687296 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -246,7 +246,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
end
it 'returns nil when ref is blank' do
- data[:after] = Gitlab::Git::BLANK_SHA
+ data[:after] = Gitlab::Git::SHA1_BLANK_SHA
expect(integration.execute(data)).to be_nil
end
diff --git a/spec/models/issue_email_participant_spec.rb b/spec/models/issue_email_participant_spec.rb
index 8ddc9a5f478..760af974275 100644
--- a/spec/models/issue_email_participant_spec.rb
+++ b/spec/models/issue_email_participant_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe IssueEmailParticipant do
+RSpec.describe IssueEmailParticipant, feature_category: :service_desk do
describe "Associations" do
it { is_expected.to belong_to(:issue) }
end
@@ -27,4 +27,18 @@ RSpec.describe IssueEmailParticipant do
expect(subject).to be_invalid
end
end
+
+ describe 'Scopes' do
+ describe '.with_emails' do
+ let!(:participant) { create(:issue_email_participant, email: 'user@example.com') }
+ let!(:participant1) { create(:issue_email_participant, email: 'user1@example.com') }
+ let!(:participant2) { create(:issue_email_participant, email: 'user2@example.com') }
+
+ it 'returns only participant with matching emails' do
+ expect(described_class.with_emails([participant.email, participant1.email])).to match_array(
+ [participant, participant1]
+ )
+ end
+ end
+ end
end
diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb
index 4862b0b0453..2c73178ee63 100644
--- a/spec/models/label_spec.rb
+++ b/spec/models/label_spec.rb
@@ -208,20 +208,6 @@ RSpec.describe Label, feature_category: :team_planning do
end
describe 'scopes' do
- describe '.on_board' do
- let(:board) { create(:board, project: project) }
- let!(:list1) { create(:list, board: board, label: development) }
- let!(:list2) { create(:list, board: board, label: testing) }
-
- let!(:development) { create(:label, project: project, name: 'Development') }
- let!(:testing) { create(:label, project: project, name: 'Testing') }
- let!(:regression) { create(:label, project: project, name: 'Regression') }
-
- it 'returns only the board labels' do
- expect(described_class.on_board(board.id)).to match_array([development, testing])
- end
- end
-
describe '.with_lock_on_merge' do
let(:label) { create(:label, project: project, name: 'Label') }
let(:label_locked) { create(:label, project: project, name: 'Label locked', lock_on_merge: true) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index db2ae319bc9..bd74af9b7e8 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -735,6 +735,30 @@ RSpec.describe Member, feature_category: :groups_and_projects do
it { is_expected.to respond_to(:user_email) }
end
+ describe 'callbacks' do
+ describe '#send_invite' do
+ context 'with an invited group member' do
+ it 'sends an invite email' do
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:invite_member)
+ end
+
+ create(:group_member, :invited)
+ end
+ end
+
+ context 'with an uninvited member' do
+ it 'does not send an invite email' do
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).not_to receive(:invite_member)
+ end
+
+ create(:group_member)
+ end
+ end
+ end
+ end
+
describe '.valid_email?' do
it 'is a valid email format' do
expect(described_class.valid_email?('foo')).to eq(false)
@@ -898,6 +922,40 @@ RSpec.describe Member, feature_category: :groups_and_projects do
expect(member.invite_token).not_to be_nil
expect_any_instance_of(described_class).not_to receive(:after_accept_invite)
end
+
+ context 'when after accepting invite' do
+ include NotificationHelpers
+
+ let_it_be(:group) { create(:group, require_two_factor_authentication: true) }
+ let_it_be(:member, reload: true) { create(:group_member, :invited, source: group) }
+ let_it_be(:email) { member.invite_email }
+ let(:user) { build(:user, email: email) }
+
+ it 'enqueues an email to user' do
+ member.accept_invite!(user)
+
+ expect_enqueud_email(member.real_source_type, member.id, mail: 'member_invite_accepted_email')
+ end
+
+ it 'calls updates the two factor requirement' do
+ expect(user).to receive(:require_two_factor_authentication_from_group).and_call_original
+
+ member.accept_invite!(user)
+
+ expect(user.require_two_factor_authentication_from_group).to be_truthy
+ end
+
+ context 'when member source is a project' do
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:member) { create(:project_member, :invited, source: project, invite_email: email) }
+
+ it 'calls updates the two factor requirement' do
+ expect(user).not_to receive(:require_two_factor_authentication_from_group)
+
+ member.accept_invite!(user)
+ end
+ end
+ end
end
describe '#decline_invite!' do
@@ -1026,6 +1084,110 @@ RSpec.describe Member, feature_category: :groups_and_projects do
end
end
+ context 'for updating organization_users' do
+ let_it_be(:group) { create(:group, :with_organization) }
+ let(:member) { create(:group_member, source: group) }
+ let(:update_organization_users_enabled) { true }
+
+ before do
+ stub_feature_flags(update_organization_users: update_organization_users_enabled)
+ end
+
+ context 'when update_organization_users is enabled' do
+ it 'inserts new record on member creation' do
+ expect { member }.to change { Organizations::OrganizationUser.count }.by(1)
+ record_attrs = { organization: group.organization, user: member.user, access_level: :default }
+ expect(Organizations::OrganizationUser.exists?(record_attrs)).to be(true)
+ end
+
+ context 'when user already exists in the organization_users' do
+ context 'for an already existing default organization_user' do
+ let_it_be(:project) { create(:project, group: group, organization: group.organization) }
+
+ before do
+ member
+ end
+
+ it 'does not insert a new record in organization_users' do
+ expect do
+ create(:project_member, :owner, source: project, user: member.user)
+ end.not_to change { Organizations::OrganizationUser.count }
+
+ expect(
+ Organizations::OrganizationUser.exists?(
+ organization: project.organization, user: member.user, access_level: :default
+ )
+ ).to be(true)
+ end
+
+ it 'does not update timestamps' do
+ travel_to(1.day.from_now) do
+ expect do
+ create(:project_member, :owner, source: project, user: member.user)
+ end.not_to change { Organizations::OrganizationUser.last.updated_at }
+ end
+ end
+ end
+
+ context 'for an already existing owner organization_user' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:common_attrs) { { organization: group.organization, user: user } }
+
+ before_all do
+ create(:organization_user, :owner, common_attrs)
+ end
+
+ it 'does not insert a new record in organization_users nor update the access_level' do
+ expect do
+ create(:group_member, :owner, source: group, user: user)
+ end.not_to change { Organizations::OrganizationUser.count }
+
+ expect(
+ Organizations::OrganizationUser.exists?(common_attrs.merge(access_level: :default))
+ ).to be(false)
+ expect(
+ Organizations::OrganizationUser.exists?(common_attrs.merge(access_level: :owner))
+ ).to be(true)
+ end
+ end
+ end
+
+ context 'when updating the organization_users is not successful' do
+ it 'rolls back the member creation' do
+ allow(Organizations::OrganizationUser).to receive(:upsert).once.and_raise(ActiveRecord::StatementTimeout)
+
+ expect { member }.to raise_error(ActiveRecord::StatementTimeout)
+ expect(Organizations::OrganizationUser.exists?(organization: group.organization)).to be(false)
+ expect(group.group_members).to be_empty
+ end
+ end
+ end
+
+ shared_examples_for 'does not create an organization_user entry' do
+ specify do
+ expect { member }.not_to change { Organizations::OrganizationUser.count }
+ end
+ end
+
+ context 'when update_organization_users is disabled' do
+ let(:update_organization_users_enabled) { false }
+
+ it_behaves_like 'does not create an organization_user entry'
+ end
+
+ context 'when member is an invite' do
+ let(:member) { create(:group_member, :invited, source: group) }
+
+ it_behaves_like 'does not create an organization_user entry'
+ end
+
+ context 'when organization does not exist' do
+ let(:member) { create(:group_member) }
+
+ it_behaves_like 'does not create an organization_user entry'
+ end
+ end
+
context 'when after_commit :update_highest_role' do
let_it_be(:user) { create(:user) }
@@ -1071,6 +1233,132 @@ RSpec.describe Member, feature_category: :groups_and_projects do
end
end
+ context 'when after_update :post_update_hook' do
+ let_it_be(:member) { create(:group_member, :developer) }
+
+ context 'when access_level is changed' do
+ it 'calls NotificationService.update_member' do
+ expect(NotificationService).to receive_message_chain(:new, :updated_member_access_level).with(member)
+
+ member.update_attribute(:access_level, Member::MAINTAINER)
+ end
+
+ it 'does not send an email when the access level has not changed' do
+ expect(NotificationService).not_to receive(:new)
+
+ member.touch
+ end
+ end
+
+ context 'when expiration is changed' do
+ it 'calls the notification service when membership expiry has changed' do
+ expect(NotificationService).to receive_message_chain(:new, :updated_member_expiration).with(member)
+
+ member.update!(expires_at: 5.days.from_now)
+ end
+ end
+ end
+
+ context 'when after_create :post_create_hook' do
+ include NotificationHelpers
+
+ let_it_be(:source) { create(:group) }
+ let(:member) { create(:group_member, source: source) }
+
+ subject(:create_member) { member }
+
+ shared_examples_for 'invokes a notification' do
+ it 'enqueues an email to user' do
+ create_member
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email(member.real_source_type, member.id, mail: 'member_access_granted_email')
+ end
+ end
+
+ shared_examples_for 'performs all the common hooks' do
+ it_behaves_like 'invokes a notification'
+
+ it 'creates an event' do
+ expect { create_member }.to change { Event.count }.by(1)
+ end
+ end
+
+ it 'calls the system hook service' do
+ expect_next_instance_of(SystemHooksService) do |instance|
+ expect(instance).to receive(:execute_hooks_for).with(an_instance_of(GroupMember), :create)
+ end
+
+ create_member
+ end
+
+ context 'when source is a group' do
+ it_behaves_like 'invokes a notification'
+
+ it 'does not create an event' do
+ expect { create_member }.not_to change { Event.count }
+ end
+ end
+
+ context 'when source is a project' do
+ context 'when source is a personal project' do
+ let_it_be(:namespace) { create(:namespace) }
+
+ context 'when member is the owner of the namespace' do
+ subject(:create_member) { create(:project, namespace: namespace) }
+
+ it 'does not enqueue an email' do
+ create_member
+
+ expect_delivery_jobs_count(0)
+ end
+
+ it 'does not create an event' do
+ expect { create_member }.not_to change { Event.count }
+ end
+ end
+
+ context 'when member is not the namespace owner' do
+ let_it_be(:project) { create(:project, namespace: namespace) }
+ let(:member) { create(:project_member, source: project) }
+
+ subject(:create_member) { member }
+
+ it_behaves_like 'performs all the common hooks'
+ end
+ end
+
+ context 'when source is not a personal project' do
+ let_it_be(:project) { create(:project, namespace: create(:group)) }
+ let(:member) { create(:project_member, source: project) }
+
+ subject(:create_member) { member }
+
+ it_behaves_like 'performs all the common hooks'
+ end
+ end
+ end
+
+ context 'when after_create :update_two_factor_requirement' do
+ it 'calls update_two_factor_requirement after creation' do
+ user = create(:user)
+
+ expect(user).to receive(:update_two_factor_requirement)
+
+ create(:group_member, user: user)
+ end
+ end
+
+ context 'when after_destroy :update_two_factor_requirement' do
+ it 'calls update_two_factor_requirement after deletion' do
+ group_member = create(:group_member)
+
+ expect(group_member.user).to receive(:update_two_factor_requirement)
+
+ group_member.destroy!
+ end
+ end
+
describe 'log_invitation_token_cleanup' do
let_it_be(:project) { create :project }
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 7307e76272d..3b23f3661f8 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -33,10 +33,6 @@ RSpec.describe GroupMember, feature_category: :cell do
end
end
- describe 'delegations' do
- it { is_expected.to delegate_method(:update_two_factor_requirement).to(:user).allow_nil }
- end
-
describe '.access_level_roles' do
it 'returns Gitlab::Access.options_with_owner' do
expect(described_class.access_level_roles).to eq(Gitlab::Access.options_with_owner)
@@ -67,22 +63,6 @@ RSpec.describe GroupMember, feature_category: :cell do
it { is_expected.to eq 'Group' }
end
- describe '#update_two_factor_requirement' do
- it 'is called after creation and deletion' do
- user = create :user
- group = create :group
- group_member = build :group_member, user: user, group: group
-
- expect(user).to receive(:update_two_factor_requirement)
-
- group_member.save!
-
- expect(user).to receive(:update_two_factor_requirement)
-
- group_member.destroy!
- end
- end
-
describe '#destroy' do
context 'for an orphaned member' do
let!(:orphaned_group_member) do
@@ -95,21 +75,6 @@ RSpec.describe GroupMember, feature_category: :cell do
end
end
- describe '#after_accept_invite' do
- it 'calls #update_two_factor_requirement' do
- email = 'foo@email.com'
- user = build(:user, email: email)
- group = create(:group, require_two_factor_authentication: true)
- group_member = create(:group_member, group: group, invite_token: '1234', invite_email: email)
-
- expect(user).to receive(:require_two_factor_authentication_from_group).and_call_original
-
- group_member.accept_invite!(user)
-
- expect(user.require_two_factor_authentication_from_group).to be_truthy
- end
- end
-
describe '#last_owner_of_the_group?' do
let_it_be(:parent_group) { create(:group) }
let_it_be(:group) { create(:group, parent: parent_group) }
@@ -202,18 +167,6 @@ RSpec.describe GroupMember, feature_category: :cell do
end
end
- context 'when group member expiration date is updated' do
- let_it_be(:group_member) { create(:group_member) }
-
- it 'emails the user that their group membership expiry has changed' do
- expect_next_instance_of(NotificationService) do |notification|
- allow(notification).to receive(:updated_group_member_expiration).with(group_member)
- end
-
- group_member.update!(expires_at: 5.days.from_now)
- end
- end
-
describe 'refresh_member_authorized_projects' do
context 'when importing' do
it 'does not refresh' do
@@ -288,18 +241,4 @@ RSpec.describe GroupMember, feature_category: :cell do
it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
end
-
- context 'group member welcome email', :sidekiq_inline, :saas do
- let_it_be(:group) { create(:group) }
-
- let(:user) { create(:user) }
-
- it 'schedules plain welcome to the group email' do
- expect_next_instance_of(NotificationService) do |notification|
- expect(notification).to receive(:new_group_member)
- end
-
- group.add_developer(user)
- end
- end
end
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index e9e4956dc41..8d1d503b323 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -93,12 +93,4 @@ RSpec.describe MergeRequest::Metrics do
end
end
end
-
- it_behaves_like 'database events tracking', feature_category: :service_ping do
- let(:merge_request) { create(:merge_request) }
-
- let(:record) { merge_request.metrics }
- let(:namespace) { nil }
- let(:update_params) { { pipeline_id: 1, updated_at: Date.tomorrow } }
- end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 2e68cd9e74a..e31a4399646 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -53,6 +53,28 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
it { expect(subject.start_commit_sha).to eq('0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
it { expect(subject.patch_id_sha).to eq('1e05e04d4c2a6414d9d4ab38208511a3bbe715f2') }
+ it 'calls GraphqlTriggers.merge_request_diff_generated' do
+ merge_request = create(:merge_request, :skip_diff_creation)
+
+ expect(GraphqlTriggers).to receive(:merge_request_diff_generated).with(merge_request)
+
+ merge_request.create_merge_request_diff
+ end
+
+ context 'when merge_request_diff_generated_subscription flag is disabled' do
+ before do
+ stub_feature_flags(merge_request_diff_generated_subscription: false)
+ end
+
+ it 'does not call GraphqlTriggers.merge_request_diff_generated' do
+ merge_request = create(:merge_request, :skip_diff_creation)
+
+ expect(GraphqlTriggers).not_to receive(:merge_request_diff_generated)
+
+ merge_request.create_merge_request_diff
+ end
+ end
+
context 'when diff_type is merge_head' do
let_it_be(:merge_request) { create(:merge_request) }
@@ -488,6 +510,28 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
end
+ shared_examples_for 'perform generated files check' do
+ context 'when collapse_generated option is given' do
+ let(:diff_options) do
+ super().merge(collapse_generated: true)
+ end
+
+ it 'checks generated files' do
+ diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
+
+ expect(diffs.diff_files.first.generated?).to be false
+ end
+ end
+
+ context 'when collapse_generated option is not given' do
+ it 'does not check generated files' do
+ diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
+
+ expect(diffs.diff_files.first.generated?).to be nil
+ end
+ end
+ end
+
context 'when no persisted files available' do
before do
diff_with_commits.clean!
@@ -501,6 +545,7 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
it_behaves_like 'fetching full diffs'
+ it_behaves_like 'perform generated files check'
end
end
@@ -540,6 +585,8 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
{ ignore_whitespace_change: true }
end
+ it_behaves_like 'perform generated files check'
+
it 'returns pagination data from MergeRequestDiffBatch' do
diffs = diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
file_count = diff_with_commits.merge_request_diff_files.count
@@ -561,11 +608,35 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
describe '#paginated_diffs' do
+ shared_examples 'diffs with generated files check' do
+ context 'when collapse_generated_diff_files FF is enabled' do
+ it 'checks generated files' do
+ diffs = diff_with_commits.paginated_diffs(1, 10)
+
+ expect(diffs.diff_files.first.generated?).not_to be_nil
+ end
+ end
+
+ context 'when collapse_generated_diff_files FF is disabled' do
+ before do
+ stub_feature_flags(collapse_generated_diff_files: false)
+ end
+
+ it 'does not check generated files' do
+ diffs = diff_with_commits.paginated_diffs(1, 10)
+
+ expect(diffs.diff_files.first.generated?).to be_nil
+ end
+ end
+ end
+
context 'when no persisted files available' do
before do
diff_with_commits.clean!
end
+ it_behaves_like 'diffs with generated files check'
+
it 'returns a Gitlab::Diff::FileCollection::Compare' do
diffs = diff_with_commits.paginated_diffs(1, 10)
@@ -575,6 +646,8 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
context 'when persisted files available' do
+ it_behaves_like 'diffs with generated files check'
+
it 'returns paginated diffs' do
diffs = diff_with_commits.paginated_diffs(1, 10)
@@ -911,11 +984,19 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
context 'handling generated files' do
- let(:project) { create(:project, :repository) }
+ let(:project) do
+ create(:project, :custom_repo, files: {
+ '.gitattributes' => '*.txt gitlab-generated'
+ })
+ end
+
+ let(:generated_file_name_manual) { 'generated.txt' }
+ let(:generated_file_name_auto) { 'package-lock.json' }
+ let(:regular_file_name) { 'regular.rb' }
+
let(:target_branch) { project.default_branch }
let(:source_branch) { 'test-generated-diff-file' }
- let(:generated_file_name) { 'generated.txt' }
- let(:regular_file_name) { 'regular.rb' }
+
let(:merge_request) do
create(
:merge_request,
@@ -931,20 +1012,34 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
before do
- project.repository.update_file(
+ project.repository.create_branch(source_branch, target_branch)
+
+ project.repository.create_file(
project.creator,
- '.gitattributes',
- '*.txt gitlab-generated',
- message: 'Update',
- branch_name: target_branch)
+ generated_file_name_manual,
+ 'updated generated content',
+ message: 'Update generated file',
+ branch_name: source_branch)
- create_file_in_repo(project, target_branch, source_branch, generated_file_name, "generated text\n")
- create_file_in_repo(project, source_branch, source_branch, regular_file_name, "something else\n")
+ project.repository.create_file(
+ project.creator,
+ generated_file_name_auto,
+ 'updated generated content',
+ message: 'Update generated file',
+ branch_name: source_branch)
+
+ project.repository.create_file(
+ project.creator,
+ regular_file_name,
+ 'updated regular content',
+ message: "Update regular file",
+ branch_name: source_branch)
end
context 'with collapse_generated_diff_files feature flag' do
it 'sets generated field correctly' do
- expect(diff_files.find_by(new_path: generated_file_name)).to be_generated
+ expect(diff_files.find_by(new_path: generated_file_name_manual)).to be_generated
+ expect(diff_files.find_by(new_path: generated_file_name_auto)).to be_generated
expect(diff_files.find_by(new_path: regular_file_name)).not_to be_generated
end
end
@@ -955,7 +1050,8 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
end
it 'sets generated field correctly' do
- expect(diff_files.find_by(new_path: generated_file_name)).not_to be_generated
+ expect(diff_files.find_by(new_path: generated_file_name_auto)).not_to be_generated
+ expect(diff_files.find_by(new_path: generated_file_name_manual)).not_to be_generated
expect(diff_files.find_by(new_path: regular_file_name)).not_to be_generated
end
end
@@ -1206,7 +1302,7 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
it 'returns false if passed commits do not exist' do
expect(subject.includes_any_commits?([])).to eq(false)
- expect(subject.includes_any_commits?([Gitlab::Git::BLANK_SHA])).to eq(false)
+ expect(subject.includes_any_commits?([Gitlab::Git::SHA1_BLANK_SHA])).to eq(false)
end
it 'returns true if passed commits exists' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 737884425f7..797ab5be235 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -5044,10 +5044,36 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
describe '#should_be_rebased?' do
- it 'returns false for the same source and target branches' do
- merge_request = build_stubbed(:merge_request, source_project: project, target_project: project)
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ subject { merge_request.should_be_rebased? }
+
+ context 'when the same source and target branches' do
+ let(:merge_request) { build_stubbed(:merge_request, source_project: project, target_project: project) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the project is using ff merge method' do
+ before do
+ allow(merge_request.target_project).to receive(:ff_merge_must_be_possible?).and_return(true)
+ end
+
+ context 'when the mr needs to be rebased to merge' do
+ before do
+ allow(merge_request).to receive(:ff_merge_possible?).and_return(false)
+ end
+
+ it { is_expected.to be_truthy }
+ end
- expect(merge_request.should_be_rebased?).to be_falsey
+ context 'when the MR can be merged without rebase' do
+ before do
+ allow(merge_request).to receive(:ff_merge_possible?).and_return(true)
+ end
+
+ it { is_expected.to be_falsey }
+ end
end
end
@@ -6128,6 +6154,34 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe '#allows_multiple_assignees?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ subject(:allows_multiple_assignees?) { merge_request.allows_multiple_assignees? }
+
+ before do
+ allow(merge_request.project)
+ .to receive(:allows_multiple_merge_request_assignees?)
+ .and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#allows_multiple_reviewers?' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ subject(:allows_multiple_reviewers?) { merge_request.allows_multiple_reviewers? }
+
+ before do
+ allow(merge_request.project)
+ .to receive(:allows_multiple_merge_request_reviewers?)
+ .and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
describe '#previous_diff' do
let(:merge_request) { create(:merge_request, :skip_diff_creation) }
diff --git a/spec/models/ml/candidate_metric_spec.rb b/spec/models/ml/candidate_metric_spec.rb
index 9f9a6e8e3ba..9ceaa83a6fa 100644
--- a/spec/models/ml/candidate_metric_spec.rb
+++ b/spec/models/ml/candidate_metric_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ml::CandidateMetric do
+RSpec.describe Ml::CandidateMetric, feature_category: :mlops do
describe 'associations' do
it { is_expected.to belong_to(:candidate) }
end
diff --git a/spec/models/ml/candidate_param_spec.rb b/spec/models/ml/candidate_param_spec.rb
index ff38e471219..89232b10855 100644
--- a/spec/models/ml/candidate_param_spec.rb
+++ b/spec/models/ml/candidate_param_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ml::CandidateParam do
+RSpec.describe Ml::CandidateParam, feature_category: :mlops do
describe 'associations' do
it { is_expected.to belong_to(:candidate) }
end
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
index 36bdb611833..1864c04d2fd 100644
--- a/spec/models/ml/experiment_spec.rb
+++ b/spec/models/ml/experiment_spec.rb
@@ -37,6 +37,20 @@ RSpec.describe Ml::Experiment, feature_category: :mlops do
end
end
+ describe '.by_project' do
+ subject { described_class.by_project(exp.project) }
+
+ it { is_expected.to match_array([exp, exp2]) }
+ end
+
+ describe '.including_project' do
+ subject { described_class.including_project }
+
+ it 'loads latest version' do
+ expect(subject.first.association_cached?(:project)).to be(true)
+ end
+ end
+
describe '#by_project_id_and_iid' do
subject { described_class.by_project_id_and_iid(exp.project_id, iid) }
diff --git a/spec/models/ml/model_metadata_spec.rb b/spec/models/ml/model_metadata_spec.rb
index f06c7a2ce50..0afc7bb7a2e 100644
--- a/spec/models/ml/model_metadata_spec.rb
+++ b/spec/models/ml/model_metadata_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Ml::ModelMetadata, feature_category: :mlops do
describe 'associations' do
it { is_expected.to belong_to(:model).required }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:value).is_at_most(5000) }
end
describe 'validations' do
diff --git a/spec/models/ml/model_version_metadata_spec.rb b/spec/models/ml/model_version_metadata_spec.rb
new file mode 100644
index 00000000000..7c8ffb9b0d7
--- /dev/null
+++ b/spec/models/ml/model_version_metadata_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::ModelVersionMetadata, feature_category: :mlops do
+ describe 'associations' do
+ it { is_expected.to belong_to(:model_version).required }
+ it { is_expected.to belong_to(:project).required }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:value).is_at_most(5000) }
+ end
+
+ describe 'validations' do
+ let_it_be(:metadata) { create(:ml_model_version_metadata, name: 'some_metadata') }
+ let_it_be(:model_version) { metadata.model_version }
+
+ it 'is unique within the model version' do
+ expect do
+ model_version.metadata.create!(name: 'some_metadata', value: 'blah')
+ end.to raise_error.with_message(/Name 'some_metadata' already taken/)
+ end
+
+ it 'a model version is required' do
+ expect do
+ described_class.create!(name: 'some_metadata', value: 'blah')
+ end.to raise_error.with_message(/Model version must exist/)
+ end
+ end
+end
diff --git a/spec/models/ml/model_version_spec.rb b/spec/models/ml/model_version_spec.rb
index 95d4a545f52..9db9f7e34ab 100644
--- a/spec/models/ml/model_version_spec.rb
+++ b/spec/models/ml/model_version_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
it { is_expected.to belong_to(:model) }
it { is_expected.to belong_to(:package).class_name('Packages::MlModel::Package') }
it { is_expected.to have_one(:candidate).class_name('Ml::Candidate') }
+ it { is_expected.to have_many(:metadata) }
end
describe 'validation' do
@@ -99,6 +100,34 @@ RSpec.describe Ml::ModelVersion, feature_category: :mlops do
end
end
+ describe '#add_metadata' do
+ it 'accepts an array of metadata and persists it to the model version' do
+ input = [
+ { project_id: base_project.id, key: 'tag1', value: 'value1' },
+ { project_id: base_project.id, key: 'tag2', value: 'value2' }
+ ]
+
+ expect { model_version1.add_metadata(input) }.to change { model_version1.metadata.count }.by(2)
+ end
+
+ it 'raises an error when duplicate key names are supplied' do
+ input = [
+ { project_id: base_project.id, key: 'tag1', value: 'value1' },
+ { project_id: base_project.id, key: 'tag1', value: 'value2' }
+ ]
+
+ expect { model_version1.add_metadata(input) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+
+ it 'raises an error when validation fails' do
+ input = [
+ { project_id: base_project.id, key: nil, value: 'value1' }
+ ]
+
+ expect { model_version1.add_metadata(input) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
describe '#find_or_create!' do
let_it_be(:existing_model_version) { create(:ml_model_versions, model: model1, version: '1.0.0') }
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index f06490d7999..e326e8cace8 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -12,13 +12,21 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
describe '#maven_duplicates_allowed' do
it { is_expected.to validate_inclusion_of(:maven_duplicates_allowed).in_array([true, false]) }
- it { is_expected.to validate_inclusion_of(:generic_duplicates_allowed).in_array([true, false]) }
- it { is_expected.to validate_inclusion_of(:nuget_duplicates_allowed).in_array([true, false]) }
+ it { is_expected.to validate_length_of(:maven_duplicate_exception_regex).is_at_most(255) }
end
it { is_expected.to allow_value(true, false).for(:nuget_symbol_server_enabled) }
it { is_expected.not_to allow_value(nil).for(:nuget_symbol_server_enabled) }
+ it { is_expected.to validate_inclusion_of(:generic_duplicates_allowed).in_array([true, false]) }
+ it { is_expected.to validate_length_of(:generic_duplicate_exception_regex).is_at_most(255) }
+ it { is_expected.to validate_inclusion_of(:nuget_duplicates_allowed).in_array([true, false]) }
+ it { is_expected.to validate_length_of(:nuget_duplicate_exception_regex).is_at_most(255) }
+
+ it { is_expected.to allow_value(true, false).for(:terraform_module_duplicates_allowed) }
+ it { is_expected.not_to allow_value(nil).for(:terraform_module_duplicates_allowed) }
+ it { is_expected.to validate_length_of(:terraform_module_duplicate_exception_regex).is_at_most(255) }
+
describe 'regex values' do
let_it_be(:package_settings) { create(:namespace_package_setting) }
@@ -39,6 +47,50 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
end
end
+ describe 'scopes' do
+ describe '.namespace_id_in' do
+ let_it_be(:package_settings) { create(:namespace_package_setting) }
+ let_it_be(:other_package_settings) { create(:namespace_package_setting) }
+
+ subject { described_class.namespace_id_in([package_settings.namespace_id]) }
+
+ it { is_expected.to eq([package_settings]) }
+ end
+
+ describe '.with_terraform_module_duplicates_allowed_or_exception_regex' do
+ let_it_be(:package_settings) { create(:namespace_package_setting) }
+
+ subject { described_class.with_terraform_module_duplicates_allowed_or_exception_regex }
+
+ context 'when terraform_module_duplicates_allowed is true' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
+ end
+
+ it { is_expected.to eq([package_settings]) }
+ end
+
+ context 'when terraform_module_duplicate_exception_regex is present' do
+ before do
+ package_settings.update_column(:terraform_module_duplicate_exception_regex, 'foo')
+ end
+
+ it { is_expected.to eq([package_settings]) }
+ end
+
+ context 'when terraform_module_duplicates_allowed is false and terraform_module_duplicate_exception_regex is empty' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: ''
+ )
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+ end
+
describe '#duplicates_allowed?' do
using RSpec::Parameterized::TableSyntax
@@ -46,9 +98,14 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
context 'package types with package_settings' do
# As more package types gain settings they will be added to this list
- %i[maven_package generic_package nuget_package].each do |format|
- context "with package_type:#{format}" do
- let_it_be(:package) { create(format, name: 'foo', version: '1.0.0-beta') }
+ [
+ { format: :maven_package, package_name: 'foo' },
+ { format: :generic_package, package_name: 'foo' },
+ { format: :nuget_package, package_name: 'foo' },
+ { format: :terraform_module_package, package_name: 'foo/bar' }
+ ].each do |type|
+ context "with package_type: #{type[:format]}" do
+ let_it_be(:package) { create(type[:format], name: type[:package_name], version: '1.0.0-beta') }
let_it_be(:package_type) { package.package_type }
let_it_be(:package_setting) { package.project.namespace.package_settings }
@@ -61,7 +118,7 @@ RSpec.describe Namespace::PackageSetting, feature_category: :package_registry do
end
with_them do
- context "for #{format}" do
+ context "for #{type[:format]}" do
before do
package_setting.update!(
"#{package_type}_duplicates_allowed" => duplicates_allowed,
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 0e6513764f5..67b8931f0c5 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -39,6 +39,7 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') }
it { is_expected.to have_many(:cycle_analytics_stages) }
it { is_expected.to have_many(:value_streams) }
+ it { is_expected.to have_many(:non_archived_projects).class_name('Project') }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
@@ -896,12 +897,14 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it 'does not cause N+1 query in fetching registries' do
stub_container_registry_tags(repository: :any, tags: [])
- control_count = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }.count
+ control = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }
other_repositories = create_list(:container_repository, 2)
create(:project, namespace: namespace, container_repositories: other_repositories)
- expect { namespace.first_project_with_container_registry_tags }.not_to exceed_query_limit(control_count + 1)
+ expect do
+ namespace.first_project_with_container_registry_tags
+ end.not_to exceed_query_limit(control).with_threshold(1)
end
end
@@ -1130,6 +1133,28 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
+ describe '.gfm_autocomplete_search' do
+ let_it_be(:parent_group) { create(:group, path: 'parent', name: 'Parent') }
+ let_it_be(:group_1) { create(:group, parent: parent_group, path: 'somepath', name: 'Your Group') }
+ let_it_be(:group_2) { create(:group, path: 'noparent', name: 'My Group') }
+
+ it 'returns partial matches on full path' do
+ expect(described_class.gfm_autocomplete_search('parent/som')).to eq([group_1])
+ end
+
+ it 'returns matches on full name across multiple words' do
+ expect(described_class.gfm_autocomplete_search('yourgr')).to eq([group_1])
+ end
+
+ it 'prioritizes sorting of matches that start with the query' do
+ expect(described_class.gfm_autocomplete_search('pare')).to eq([parent_group, group_1, group_2])
+ end
+
+ it 'falls back to sorting by full path' do
+ expect(described_class.gfm_autocomplete_search('group')).to eq([group_2, group_1])
+ end
+ end
+
describe '.with_statistics' do
let_it_be(:namespace) { create(:namespace) }
diff --git a/spec/models/namespaces/descendants_spec.rb b/spec/models/namespaces/descendants_spec.rb
new file mode 100644
index 00000000000..6c153c3307b
--- /dev/null
+++ b/spec/models/namespaces/descendants_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::Descendants, feature_category: :database do
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe 'validations' do
+ subject(:namespace_descendants) { create(:namespace_descendants) }
+
+ it { is_expected.to validate_uniqueness_of(:namespace_id) }
+ end
+
+ describe 'factory' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ let_it_be(:project1) { create(:project, group: subgroup) }
+ let_it_be(:project2) { create(:project, group: group) }
+
+ it 'up to date descendant record for a group' do
+ descendants = create(:namespace_descendants, namespace: group)
+
+ expect(descendants).to have_attributes(
+ self_and_descendant_group_ids: [group.id, subgroup.id],
+ all_project_ids: [project1.id, project2.id],
+ traversal_ids: [group.id]
+ )
+ end
+
+ it 'creates up-to-date descendant record for a subgroup' do
+ descendants = create(:namespace_descendants, namespace: subgroup)
+
+ expect(descendants).to have_attributes(
+ self_and_descendant_group_ids: [subgroup.id],
+ all_project_ids: [project1.id],
+ traversal_ids: [group.id, subgroup.id]
+ )
+ end
+ end
+
+ describe '.expire_for' do
+ it 'sets the outdated_at column for the given namespace ids' do
+ freeze_time do
+ expire_time = Time.current
+
+ group1 = create(:group).tap do |g|
+ create(:namespace_descendants, namespace: g).reload.update!(outdated_at: nil)
+ end
+ group2 = create(:group, parent: group1).tap { |g| create(:namespace_descendants, namespace: g) }
+ group3 = create(:group, parent: group1)
+
+ group4 = create(:group).tap do |g|
+ create(:namespace_descendants, namespace: g).reload.update!(outdated_at: nil)
+ end
+
+ described_class.expire_for([group1.id, group2.id, group3.id])
+
+ expect(group1.namespace_descendants.outdated_at).to eq(expire_time)
+ expect(group2.namespace_descendants.outdated_at).to eq(expire_time)
+ expect(group3.namespace_descendants).to be_nil
+ expect(group4.namespace_descendants.outdated_at).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/namespaces/traversal/cached_spec.rb b/spec/models/namespaces/traversal/cached_spec.rb
new file mode 100644
index 00000000000..8263e28bb98
--- /dev/null
+++ b/spec/models/namespaces/traversal/cached_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::Traversal::Cached, feature_category: :database do
+ let_it_be_with_refind(:old_parent) { create(:group) }
+ let_it_be_with_refind(:new_parent) { create(:group) }
+ let_it_be_with_refind(:group) { create(:group, parent: old_parent) }
+ let_it_be_with_refind(:subgroup) { create(:group, parent: group) }
+
+ context 'when the namespace_descendants_cache_expiration feature flag is off' do
+ let!(:cache) { create(:namespace_descendants, namespace: group) }
+
+ before do
+ stub_feature_flags(namespace_descendants_cache_expiration: false)
+ end
+
+ it 'does not invalidate the cache' do
+ expect { group.update!(parent: new_parent) }.not_to change { cache.reload.outdated_at }
+ end
+
+ context 'when the group is deleted' do
+ it 'invalidates the cache' do
+ expect { group.destroy! }.not_to change { cache.reload.outdated_at }
+ end
+ end
+ end
+
+ context 'when no cached records are present' do
+ it 'does nothing' do
+ group.parent = new_parent
+
+ expect { group.save! }.not_to change { Namespaces::Descendants.all.to_a }
+ end
+ end
+
+ context 'when the namespace record is UserNamespace' do
+ it 'does nothing' do
+ # we won't use the optimization for UserNamespace
+ namespace = create(:user_namespace)
+ cache = create(:namespace_descendants, namespace: namespace)
+
+ expect { namespace.destroy! }.not_to change { cache.reload.outdated_at }
+ end
+ end
+
+ context 'when cached record is present' do
+ let!(:cache) { create(:namespace_descendants, namespace: group) }
+
+ it 'invalidates the cache' do
+ expect { group.update!(parent: new_parent) }.to change { cache.reload.outdated_at }.from(nil)
+ end
+
+ it 'does not invalidate the cache of subgroups' do
+ subgroup_cache = create(:namespace_descendants, namespace: subgroup)
+
+ expect { group.update!(parent: new_parent) }.not_to change { subgroup_cache.reload.outdated_at }
+ end
+
+ context 'when a new subgroup is added' do
+ it 'invalidates the cache' do
+ expect { create(:group, parent: group) }.to change { cache.reload.outdated_at }
+ end
+ end
+
+ context 'when a new project is added' do
+ it 'invalidates the cache' do
+ expect { create(:project, group: group) }.to change { cache.reload.outdated_at }
+ end
+ end
+ end
+
+ context 'when parent group has cached record' do
+ it 'invalidates the parent cache' do
+ old_parent_cache = create(:namespace_descendants, namespace: old_parent)
+ new_parent_cache = create(:namespace_descendants, namespace: new_parent)
+
+ group.update!(parent: new_parent)
+
+ expect(old_parent_cache.reload.outdated_at).not_to be_nil
+ expect(new_parent_cache.reload.outdated_at).not_to be_nil
+ end
+ end
+
+ context 'when group is destroyed' do
+ it 'invalidates the cache' do
+ cache = create(:namespace_descendants, namespace: group)
+
+ expect { group.destroy! }.to change { cache.reload.outdated_at }.from(nil)
+ end
+
+ context 'when parent group has cached record' do
+ it 'invalidates the parent cache' do
+ old_parent_cache = create(:namespace_descendants, namespace: old_parent)
+ new_parent_cache = create(:namespace_descendants, namespace: new_parent)
+
+ group.destroy!
+
+ expect(old_parent_cache.reload.outdated_at).not_to be_nil
+ expect(new_parent_cache.reload.outdated_at).to be_nil # no change
+ end
+ end
+ end
+end
diff --git a/spec/models/note_diff_file_spec.rb b/spec/models/note_diff_file_spec.rb
index 1ece1dfea59..5d9381b5886 100644
--- a/spec/models/note_diff_file_spec.rb
+++ b/spec/models/note_diff_file_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe NoteDiffFile do
end
it 'excludes note diff files with the wrong sha' do
- found = described_class.referencing_sha(Gitlab::Git::BLANK_SHA, project_id: project.id)
+ found = described_class.referencing_sha(Gitlab::Git::SHA1_BLANK_SHA, project_id: project.id)
expect(found).to be_empty
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 5aa3ac3a2ea..59795059642 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -448,13 +448,13 @@ RSpec.describe Note, feature_category: :team_planning do
# Project authorization checks are cached, establish a baseline
retrieve_participants
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
retrieve_participants
end
create(:note_on_commit, project: note.project, note: 'another note', noteable_id: commit.id)
- expect { retrieve_participants }.not_to exceed_query_limit(control_count)
+ expect { retrieve_participants }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/models/onboarding/completion_spec.rb b/spec/models/onboarding/completion_spec.rb
index dd7648f7799..b9c9b994736 100644
--- a/spec/models/onboarding/completion_spec.rb
+++ b/spec/models/onboarding/completion_spec.rb
@@ -42,43 +42,19 @@ RSpec.describe Onboarding::Completion, feature_category: :onboarding do
describe '#completed?' do
subject(:completed?) { described_class.new(project).completed?(column) }
- context 'when code_added' do
- let(:column) { :code_added }
+ let(:column) { :code_added_at }
+ let(:completed_actions) { { code_added_at: code_added_at_timestamp } }
- context 'when commit_count > 1' do
- let(:project) { build(:project, :stubbed_commit_count, namespace: namespace) }
+ context 'when the action has been completed' do
+ let(:code_added_at_timestamp) { Time.current }
- it { is_expected.to eq(true) }
- end
-
- context 'when branch_count > 1' do
- let(:project) { build(:project, :stubbed_branch_count, namespace: namespace) }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when empty repository' do
- let(:project) { build(:project, namespace: namespace) }
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(true) }
end
- context 'when secure_dast_run' do
- let(:column) { :secure_dast_run_at }
- let(:completed_actions) { { secure_dast_run_at: secure_dast_run_at } }
-
- context 'when is completed' do
- let(:secure_dast_run_at) { Time.current }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when is not completed' do
- let(:secure_dast_run_at) { nil }
+ context 'when the action has not been completed' do
+ let(:code_added_at_timestamp) { nil }
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
end
end
end
diff --git a/spec/models/organizations/organization_detail_spec.rb b/spec/models/organizations/organization_detail_spec.rb
index 3f44a9cc637..dd49274e7dd 100644
--- a/spec/models/organizations/organization_detail_spec.rb
+++ b/spec/models/organizations/organization_detail_spec.rb
@@ -16,6 +16,15 @@ RSpec.describe Organizations::OrganizationDetail, type: :model, feature_category
let(:model) { create(:organization_detail) }
end
+ describe '#description_html' do
+ let_it_be(:model) { create(:organization_detail, description: '### Foo **Bar**') }
+ let(:expected_description) { ' Foo <strong>Bar</strong> ' }
+
+ subject { model.description_html }
+
+ it { is_expected.to eq_no_sourcepos(expected_description) }
+ end
+
context 'with uploads' do
it_behaves_like 'model with uploads', false do
let(:model_object) { create(:organization_detail) }
diff --git a/spec/models/organizations/organization_spec.rb b/spec/models/organizations/organization_spec.rb
index 756024b6437..7a3c743eddd 100644
--- a/spec/models/organizations/organization_spec.rb
+++ b/spec/models/organizations/organization_spec.rb
@@ -59,8 +59,10 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
describe 'delegations' do
it { is_expected.to delegate_method(:description).to(:organization_detail) }
+ it { is_expected.to delegate_method(:description_html).to(:organization_detail) }
it { is_expected.to delegate_method(:avatar).to(:organization_detail) }
it { is_expected.to delegate_method(:avatar_url).to(:organization_detail) }
+ it { is_expected.to delegate_method(:remove_avatar!).to(:organization_detail) }
end
describe 'nested attributes' do
@@ -202,6 +204,32 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
end
end
+ describe '#owner?' do
+ let_it_be(:user) { create(:user) }
+
+ subject { organization.owner?(user) }
+
+ context 'when user is an owner' do
+ before do
+ create(:organization_user, :owner, organization: organization, user: user)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when user is not an owner' do
+ before do
+ create(:organization_user, organization: organization, user: user)
+ end
+
+ it { is_expected.to eq false }
+ end
+
+ context 'when user is not an organization user' do
+ it { is_expected.to eq false }
+ end
+ end
+
describe '#web_url' do
it 'returns web url from `Gitlab::UrlBuilder`' do
web_url = 'http://127.0.0.1:3000/-/organizations/default'
diff --git a/spec/models/organizations/organization_user_spec.rb b/spec/models/organizations/organization_user_spec.rb
index 392ffa1b5be..c3416c93ec9 100644
--- a/spec/models/organizations/organization_user_spec.rb
+++ b/spec/models/organizations/organization_user_spec.rb
@@ -7,4 +7,41 @@ RSpec.describe Organizations::OrganizationUser, type: :model, feature_category:
it { is_expected.to belong_to(:organization).inverse_of(:organization_users).required }
it { is_expected.to belong_to(:user).inverse_of(:organization_users).required }
end
+
+ describe 'validations' do
+ subject { build(:organization_user) }
+
+ it { is_expected.to define_enum_for(:access_level).with_values(described_class.access_levels) }
+ it { is_expected.to validate_presence_of(:access_level) }
+ it { is_expected.to validate_uniqueness_of(:user).scoped_to(:organization_id) }
+
+ it 'does not allow invalid enum value' do
+ expect { build(:organization_user, access_level: '_invalid_') }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'with loose foreign key on organization_users.organization_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:organization) }
+ let_it_be(:model) { create(:organization_user, organization: parent) }
+ end
+ end
+
+ context 'with loose foreign key on organization_users.user_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let_it_be(:parent) { create(:user) }
+ let_it_be(:model) { create(:organization_user, user: parent) }
+ end
+ end
+
+ describe '.owners' do
+ it 'returns the owners of the organization' do
+ organization_user = create(:organization_user, :owner)
+ create(:organization_user)
+
+ expect(described_class.owners).to match([organization_user])
+ end
+ end
+
+ it_behaves_like 'having unique enum values'
end
diff --git a/spec/models/packages/protection/rule_spec.rb b/spec/models/packages/protection/rule_spec.rb
index 3f0aefa945a..03d0440f0d9 100644
--- a/spec/models/packages/protection/rule_spec.rb
+++ b/spec/models/packages/protection/rule_spec.rb
@@ -35,30 +35,32 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
it { is_expected.to validate_uniqueness_of(:package_name_pattern).scoped_to(:project_id, :package_type) }
it { is_expected.to validate_length_of(:package_name_pattern).is_at_most(255) }
- [
- '@my-scope/my-package',
- '@my-scope/*my-package-with-wildcard-inbetween',
- '@my-scope/*my-package-with-wildcard-start',
- '@my-scope/my-*package-*with-wildcard-multiple-*',
- '@my-scope/my-package-with_____underscore',
- '@my-scope/my-package-with-regex-characters.+',
- '@my-scope/my-package-with-wildcard-end*'
- ].each do |package_name_pattern|
- it { is_expected.to allow_value(package_name_pattern).for(:package_name_pattern) }
+ where(:package_name_pattern, :allowed) do
+ '@my-scope/my-package' | true
+ '@my-scope/*my-package-with-wildcard-inbetween' | true
+ '@my-scope/*my-package-with-wildcard-start' | true
+ '@my-scope/my-*package-*with-wildcard-multiple-*' | true
+ '@my-scope/my-package-with_____underscore' | true
+ '@my-scope/my-package-with-regex-characters.+' | true
+ '@my-scope/my-package-with-wildcard-end*' | true
+
+ '@my-scope/my-package-with-percent-sign-%' | false
+ '*@my-scope/my-package-with-wildcard-start' | false
+ '@my-scope/my-package-with-backslash-\*' | false
end
- [
- '@my-scope/my-package-with-percent-sign-%',
- '*@my-scope/my-package-with-wildcard-start',
- '@my-scope/my-package-with-backslash-\*'
- ].each do |package_name_pattern|
- it {
- is_expected.not_to(
- allow_value(package_name_pattern)
- .for(:package_name_pattern)
- .with_message(_('should be a valid NPM package name with optional wildcard characters.'))
- )
- }
+ with_them do
+ if params[:allowed]
+ it { is_expected.to allow_value(package_name_pattern).for(:package_name_pattern) }
+ else
+ it {
+ is_expected.not_to(
+ allow_value(package_name_pattern)
+ .for(:package_name_pattern)
+ .with_message(_('should be a valid NPM package name with optional wildcard characters.'))
+ )
+ }
+ end
end
end
diff --git a/spec/models/preloaders/commit_status_preloader_spec.rb b/spec/models/preloaders/commit_status_preloader_spec.rb
index 85ea784335c..0453b6267ed 100644
--- a/spec/models/preloaders/commit_status_preloader_spec.rb
+++ b/spec/models/preloaders/commit_status_preloader_spec.rb
@@ -21,13 +21,13 @@ RSpec.describe Preloaders::CommitStatusPreloader do
it 'prevents N+1 for specified relations', :use_sql_query_cache do
execute
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
call_each_relation(statuses.sample(3))
end
expect do
call_each_relation(statuses)
- end.to issue_same_number_of_queries_as(control_count)
+ end.to issue_same_number_of_queries_as(control)
end
private
diff --git a/spec/models/project_authorizations/changes_spec.rb b/spec/models/project_authorizations/changes_spec.rb
index d6ccfccbcbe..9c2686e82f6 100644
--- a/spec/models/project_authorizations/changes_spec.rb
+++ b/spec/models/project_authorizations/changes_spec.rb
@@ -28,48 +28,81 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
end
shared_examples_for 'publishes AuthorizationsChangedEvent' do
- it 'publishes a AuthorizationsChangedEvent event with project id' do
- project_ids.each do |project_id|
- project_data = { project_id: project_id }
- project_event = instance_double('::ProjectAuthorizations::AuthorizationsChangedEvent', data: project_data)
+ it 'does not publish a AuthorizationsChangedEvent event' do
+ expect(::Gitlab::EventStore).not_to receive(:publish)
+ .with(an_instance_of(::ProjectAuthorizations::AuthorizationsChangedEvent))
- allow(::ProjectAuthorizations::AuthorizationsChangedEvent).to receive(:new)
- .with(data: project_data)
- .and_return(project_event)
+ apply_project_authorization_changes
+ end
- allow(::Gitlab::EventStore).to receive(:publish)
- expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
end
- apply_project_authorization_changes
+ it 'publishes a AuthorizationsChangedEvent event with project id' do
+ allow(::Gitlab::EventStore).to receive(:publish)
+ project_ids.each do |project_id|
+ project_data = { project_id: project_id }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsChangedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsChangedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+
+ expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ end
+
+ apply_project_authorization_changes
+ end
end
end
shared_examples_for 'publishes AuthorizationsRemovedEvent' do
it 'publishes a AuthorizationsRemovedEvent event with project id' do
- project_ids.each do |project_id|
+ allow(::Gitlab::EventStore).to receive(:publish_group)
+ project_events = project_ids.map do |project_id|
project_data = { project_id: project_id, user_ids: user_ids }
project_event = instance_double('::ProjectAuthorizations::AuthorizationsRemovedEvent', data: project_data)
allow(::ProjectAuthorizations::AuthorizationsRemovedEvent).to receive(:new)
.with(data: project_data)
.and_return(project_event)
+ project_event
+ end
+ expect(::Gitlab::EventStore).to receive(:publish_group).with(project_events)
- allow(::Gitlab::EventStore).to receive(:publish)
- expect(::Gitlab::EventStore).to receive(:publish).with(project_event)
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'publishes AuthorizationsAddedEvent' do
+ it 'publishes a AuthorizationsAddedEvent event with project id' do
+ allow(::Gitlab::EventStore).to receive(:publish_group)
+ project_events = project_ids.map do |project_id|
+ project_data = { project_id: project_id, user_ids: user_ids }
+ project_event = instance_double('::ProjectAuthorizations::AuthorizationsAddedEvent', data: project_data)
+
+ allow(::ProjectAuthorizations::AuthorizationsAddedEvent).to receive(:new)
+ .with(data: project_data)
+ .and_return(project_event)
+ project_event
end
+ expect(::Gitlab::EventStore).to receive(:publish_group).with(project_events)
apply_project_authorization_changes
end
- context 'when feature flag "user_approval_rules_removal" is disabled' do
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
before do
- stub_feature_flags(user_approval_rules_removal: false)
+ stub_feature_flags(add_policy_approvers_to_rules: false)
end
- it 'does not publish a AuthorizationsRemovedEvent event' do
+ it 'does not publish a AuthorizationsAddedEvent event' do
expect(::Gitlab::EventStore).not_to(
- receive(:publish).with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ receive(:publish_group).with(array_including(
+ an_instance_of(::ProjectAuthorizations::AuthorizationsAddedEvent))
+ )
)
apply_project_authorization_changes
@@ -88,8 +121,23 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
shared_examples_for 'does not publish AuthorizationsRemovedEvent' do
it 'does not publish a AuthorizationsRemovedEvent event' do
- expect(::Gitlab::EventStore).not_to receive(:publish)
- .with(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish_group).with(
+ array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsRemovedEvent))
+ )
+ )
+
+ apply_project_authorization_changes
+ end
+ end
+
+ shared_examples_for 'does not publish AuthorizationsAddedEvent' do
+ it 'does not publish a AuthorizationsAddedEvent event' do
+ expect(::Gitlab::EventStore).not_to(
+ receive(:publish_group).with(
+ array_including(an_instance_of(::ProjectAuthorizations::AuthorizationsAddedEvent))
+ )
+ )
apply_project_authorization_changes
end
@@ -101,6 +149,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+ let(:user_ids) { [user.id] }
let(:authorizations_to_add) do
[
@@ -155,6 +204,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
context 'when the GitLab installation does not have a replica database configured' do
@@ -166,6 +216,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -178,6 +229,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between batches'
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
+ it_behaves_like 'publishes AuthorizationsAddedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
end
end
@@ -242,6 +294,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -253,6 +306,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -265,6 +319,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is empty' do
@@ -273,6 +328,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes project authorizations of the users in the current project'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is nil' do
@@ -281,6 +337,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes project authorizations of the users in the current project'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -344,6 +401,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'logs the detail', batch_size: 2
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
context 'when the GitLab installation does not have a replica database configured' do
before do
@@ -355,6 +413,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
@@ -367,6 +426,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not log any detail'
it_behaves_like 'publishes AuthorizationsChangedEvent'
it_behaves_like 'publishes AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the project_ids list is empty' do
@@ -375,6 +435,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes any project authorizations from the current user'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
context 'when the user_ids list is nil' do
@@ -383,6 +444,7 @@ RSpec.describe ProjectAuthorizations::Changes, feature_category: :groups_and_pro
it_behaves_like 'does not removes any project authorizations from the current user'
it_behaves_like 'does not publish AuthorizationsChangedEvent'
it_behaves_like 'does not publish AuthorizationsRemovedEvent'
+ it_behaves_like 'does not publish AuthorizationsAddedEvent'
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index c256c4f10f8..1743c9bd89d 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1121,6 +1121,8 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
+ it { is_expected.to delegate_method(:has_user?).to(:team) }
+ it { is_expected.to delegate_method(:member?).to(:team) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).allow_nil }
it { is_expected.to delegate_method(:root_ancestor).to(:namespace).allow_nil }
it { is_expected.to delegate_method(:certificate_based_clusters_enabled?).to(:namespace).allow_nil }
@@ -2340,11 +2342,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it 'avoids n + 1', :aggregate_failures do
create(:prometheus_integration)
run_test = -> { described_class.include_integration(:prometheus_integration).map(&:prometheus_integration) }
- control_count = ActiveRecord::QueryRecorder.new { run_test.call }
+ control = ActiveRecord::QueryRecorder.new { run_test.call }
create(:prometheus_integration)
expect(run_test.call.count).to eq(2)
- expect { run_test.call }.not_to exceed_query_limit(control_count)
+ expect { run_test.call }.not_to exceed_query_limit(control)
end
end
@@ -6591,17 +6593,17 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
let_it_be(:subject) { create(:project) }
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }.count
+ control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }
- expect(control_count).to be <= 4
+ expect(control.count).to be <= 4
end
it 'avoids N+1 database queries with more available integrations' do
allow(Integration).to receive(:available_integration_names).and_return(%w[pushover])
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }
+ control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }
allow(Integration).to receive(:available_integration_names).and_call_original
- expect { subject.find_or_initialize_integrations }.not_to exceed_query_limit(control_count)
+ expect { subject.find_or_initialize_integrations }.not_to exceed_query_limit(control)
end
context 'with disabled integrations' do
@@ -6648,11 +6650,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it 'avoids N+1 database queries' do
allow(Integration).to receive(:available_integration_names).and_return(%w[prometheus pushover])
- control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integration('prometheus') }.count
+ control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integration('prometheus') }
allow(Integration).to receive(:available_integration_names).and_call_original
- expect { subject.find_or_initialize_integration('prometheus') }.not_to exceed_query_limit(control_count)
+ expect { subject.find_or_initialize_integration('prometheus') }.not_to exceed_query_limit(control)
end
it 'returns nil if integration is disabled' do
@@ -7978,6 +7980,24 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.reload.topics.map(&:name)).to eq(%w[topic1 topic2 topic3])
end
+
+ it 'assigns slug value for new topics' do
+ topic = create(:topic, name: 'old topic', title: 'old topic', slug: nil)
+ project.topic_list = topic.name
+ project.save!
+
+ project.topic_list = 'old topic, new topic'
+ expect { expect(project.save).to be true }.to change { Projects::Topic.count }.by(1)
+
+ topics = project.reset.topics
+ expect(topics.map(&:name)).to match_array(['old topic', 'new topic'])
+
+ old_topic = topics.first
+ new_topic = topics.last
+
+ expect(old_topic.slug).to be_nil
+ expect(new_topic.slug).to eq('newtopic')
+ end
end
context 'public topics counter' do
@@ -8970,6 +8990,30 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
end
+ describe '#allows_multiple_merge_request_assignees?' do
+ let(:project) { build_stubbed(:project) }
+
+ subject(:allows_multiple_merge_request_assignees?) { project.allows_multiple_merge_request_assignees? }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#allows_multiple_merge_request_reviewers?' do
+ let(:project) { build_stubbed(:project) }
+
+ subject(:allows_multiple_merge_request_reviewers?) { project.allows_multiple_merge_request_reviewers? }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#on_demand_dast_available?' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:on_demand_dast_available?) { project.on_demand_dast_available? }
+
+ it { is_expected.to be_falsy }
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 211ac257c53..d21d29aa469 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -647,4 +647,22 @@ RSpec.describe ProjectStatistics do
end
end
end
+
+ describe '#export_size' do
+ it 'does not include artifacts & packages size' do
+ statistics.update!(
+ repository_size: 3.gigabytes,
+ wiki_size: 3.gigabytes,
+ lfs_objects_size: 3.gigabytes,
+ build_artifacts_size: 3.gigabytes,
+ packages_size: 3.gigabytes,
+ snippets_size: 3.gigabytes,
+ uploads_size: 3.gigabytes
+ )
+
+ statistics.refresh_storage_size!
+
+ expect(statistics.reload.export_size).to eq(15.gigabytes)
+ end
+ end
end
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index 10a2e967b14..cd721b9f163 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -341,22 +341,60 @@ RSpec.describe ProjectTeam, feature_category: :groups_and_projects do
end
end
+ describe '#has_user?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:invited_project_member) { create(:project_member, :owner, :invited, project: project) }
+
+ subject { project.team.has_user?(user) }
+
+ context 'when the user is a member' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to be_truthy }
+ it { expect(group.has_user?(user2)).to be_falsey }
+ end
+
+ context 'when user is a member with minimal access' do
+ before_all do
+ project.add_member(user, GroupMember::MINIMAL_ACCESS)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when user is not a direct member of the project' do
+ before_all do
+ create(:group_member, :developer, user: user, source: group)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the user is an invited member' do
+ it 'returns false when nil is passed' do
+ expect(invited_project_member.user).to eq(nil)
+ expect(project.team.has_user?(invited_project_member.user)).to be_falsey
+ end
+ end
+ end
+
describe "#human_max_access" do
- it 'returns Maintainer role' do
- user = create(:user)
- group = create(:group)
- project = create(:project, namespace: group)
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ it 'returns Maintainer role' do
group.add_maintainer(user)
expect(project.team.human_max_access(user.id)).to eq 'Maintainer'
end
it 'returns Owner role' do
- user = create(:user)
- group = create(:group)
- project = create(:project, namespace: group)
-
group.add_owner(user)
expect(project.team.human_max_access(user.id)).to eq 'Owner'
diff --git a/spec/models/projects/project_topic_spec.rb b/spec/models/projects/project_topic_spec.rb
index c7a989040c7..634c391a25a 100644
--- a/spec/models/projects/project_topic_spec.rb
+++ b/spec/models/projects/project_topic_spec.rb
@@ -12,5 +12,6 @@ RSpec.describe Projects::ProjectTopic do
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:topic) }
+ it { is_expected.to validate_uniqueness_of(:topic_id).scoped_to(:project_id) }
end
end
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index ebe53f3761d..e322fbbbcc3 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -32,6 +32,21 @@ RSpec.describe Projects::Topic do
it { is_expected.not_to allow_value("new\nline").for(:name).with_message(name_format_message) }
it { is_expected.not_to allow_value("new\rline").for(:name).with_message(name_format_message) }
it { is_expected.not_to allow_value("new\vline").for(:name).with_message(name_format_message) }
+
+ context 'for slug' do
+ let(:slug_format_message) { "can contain only letters, digits, '_', '-', '.'" }
+
+ it { is_expected.to validate_length_of(:slug).is_at_most(255) }
+ it { is_expected.to validate_uniqueness_of(:slug).case_insensitive }
+
+ it { is_expected.not_to allow_value("new\nline").for(:slug).with_message(slug_format_message) }
+ it { is_expected.not_to allow_value("space value").for(:slug).with_message(slug_format_message) }
+ it { is_expected.not_to allow_value("$special_symbol_value").for(:slug).with_message(slug_format_message) }
+
+ it { is_expected.to allow_value("underscored_value").for(:slug) }
+ it { is_expected.to allow_value("hypened-value").for(:slug) }
+ it { is_expected.to allow_value("dotted.value").for(:slug) }
+ end
end
describe 'scopes' do
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index bff9f73e44a..4a4cb1ae46a 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -156,6 +156,7 @@ RSpec.describe Release, feature_category: :release_orchestration do
describe 'latest releases' do
let_it_be(:yesterday) { Time.zone.now - 1.day }
+ let_it_be(:today) { Time.zone.now }
let_it_be(:tomorrow) { Time.zone.now + 1.day }
let_it_be(:project2) { create(:project) }
@@ -176,6 +177,14 @@ RSpec.describe Release, feature_category: :release_orchestration do
create(:release, project: project2, released_at: tomorrow, created_at: yesterday)
end
+ let_it_be(:project2_release3) do
+ create(:release, project: project2, released_at: today, created_at: yesterday)
+ end
+
+ let_it_be(:project2_release4) do
+ create(:release, project: project2, released_at: today, created_at: yesterday, release_published_at: today)
+ end
+
let(:args) { {} }
describe '.latest' do
@@ -240,6 +249,16 @@ RSpec.describe Release, feature_category: :release_orchestration do
end
end
end
+
+ describe '.waiting_for_publish_event' do
+ let(:releases) { [project2_release3] }
+
+ subject(:waiting) { described_class.waiting_for_publish_event }
+
+ it "find today's releases not yet published" do
+ expect(waiting).to match_array(releases)
+ end
+ end
end
describe '#assets_count' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index eeb0bbb8e7d..ca2ee447b4c 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -661,7 +661,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe '#blob_at' do
context 'blank sha' do
- subject { repository.blob_at(Gitlab::Git::BLANK_SHA, '.gitignore') }
+ subject { repository.blob_at(Gitlab::Git::SHA1_BLANK_SHA, '.gitignore') }
it { is_expected.to be_nil }
end
@@ -3226,8 +3226,8 @@ RSpec.describe Repository, feature_category: :source_code_management do
end
it 'returns false for invalid commit IDs' do
- expect(repository.ancestor?(commit.id, Gitlab::Git::BLANK_SHA)).to eq(false)
- expect(repository.ancestor?(Gitlab::Git::BLANK_SHA, commit.id)).to eq(false)
+ expect(repository.ancestor?(commit.id, Gitlab::Git::SHA1_BLANK_SHA)).to eq(false)
+ expect(repository.ancestor?(Gitlab::Git::SHA1_BLANK_SHA, commit.id)).to eq(false)
end
end
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 8cc89578e0e..27a39672994 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe ResourceLabelEvent, feature_category: :team_planning, type: :model do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:label) { create(:label, project: project) }
- subject { build(:resource_label_event, issue: issue, label: label) }
+ subject(:resource_label_event) { build(:resource_label_event, issue: issue, label: label) }
it_behaves_like 'having unique enum values'
@@ -95,6 +96,63 @@ RSpec.describe ResourceLabelEvent, feature_category: :team_planning, type: :mode
end
end
+ describe '#reference_html' do
+ subject { Nokogiri::HTML.fragment(label_event.reference_html).css('a').first.attr('href') }
+
+ before do
+ label_event.refresh_invalid_reference
+ end
+
+ context 'when resource event belongs to a group level issue' do
+ let(:group_label) { create(:group_label, group: group) }
+ let(:label_event) do
+ group_issue = create(:issue, :group_level, namespace: group)
+
+ create(:resource_label_event, issue: group_issue, label: group_label)
+ end
+
+ it { is_expected.to eq(Gitlab::Routing.url_helpers.group_work_items_path(group, label_name: group_label.title)) }
+ end
+
+ context 'when resource event belongs to a project level issue' do
+ let(:label_event) { resource_label_event }
+
+ it { is_expected.to eq(Gitlab::Routing.url_helpers.project_issues_path(project, label_name: label.title)) }
+ end
+
+ context 'when resource event belongs to a merge request' do
+ let(:label_event) { create(:resource_label_event, merge_request: merge_request, label: label) }
+
+ it do
+ is_expected.to eq(Gitlab::Routing.url_helpers.project_merge_requests_path(project, label_name: label.title))
+ end
+ end
+ end
+
+ describe '#group' do
+ subject { build_stubbed(:resource_label_event, **issuable_attributes).group }
+
+ context 'when issuable is a merge request' do
+ let(:issuable_attributes) { { merge_request: merge_request } }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when issuable is an issue' do
+ context 'when issue exists at the project level' do
+ let(:issuable_attributes) { { issue: issue } }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when issue exists at the group level' do
+ let(:issuable_attributes) { { issue: build_stubbed(:issue, :group_level, namespace: group) } }
+
+ it { is_expected.to eq(group) }
+ end
+ end
+ end
+
describe '.visible_to_user?' do
let_it_be(:user) { create(:user) }
let_it_be(:issue_project) { create(:project) }
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index 7cada013636..8a791a19dec 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Route do
let!(:another_group) { create(:group, path: 'gittlab', name: 'gitllab') }
let!(:another_group_nested) { create(:group, path: 'git_lab', name: 'git_lab', parent: another_group) }
- context 'path update' do
+ shared_examples_for 'path update' do
context 'when route name is set' do
before do
route.update!(path: 'bar')
@@ -116,7 +116,7 @@ RSpec.describe Route do
end
end
- context 'name update' do
+ shared_examples_for 'name update' do
it 'updates children routes with new path' do
route.update!(name: 'bar')
@@ -134,6 +134,18 @@ RSpec.describe Route do
.to change { route.name }.from(nil).to('bar')
end
end
+
+ it_behaves_like 'path update'
+ it_behaves_like 'name update'
+
+ context 'when the feature flag `batch_route_updates` if turned off' do
+ before do
+ stub_feature_flags(batch_route_updates: false)
+ end
+
+ it_behaves_like 'path update'
+ it_behaves_like 'name update'
+ end
end
describe '#create_redirect_for_old_path' do
diff --git a/spec/models/time_tracking/timelog_category_spec.rb b/spec/models/time_tracking/timelog_category_spec.rb
index ac2fb651134..d07ba29091c 100644
--- a/spec/models/time_tracking/timelog_category_spec.rb
+++ b/spec/models/time_tracking/timelog_category_spec.rb
@@ -2,9 +2,10 @@
require 'spec_helper'
-RSpec.describe TimeTracking::TimelogCategory, type: :model do
+RSpec.describe TimeTracking::TimelogCategory, feature_category: :team_planning do
describe 'associations' do
it { is_expected.to belong_to(:namespace).with_foreign_key('namespace_id') }
+ it { is_expected.to have_many(:timelogs) }
end
describe 'default values' do
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index aee2c4ded19..dc87aea0cb4 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Timelog, feature_category: :team_planning do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:issue).touch(true) }
it { is_expected.to belong_to(:merge_request).touch(true) }
+ it { is_expected.to belong_to(:timelog_category).optional(true) }
it { is_expected.to be_valid }
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index cc0ea69401e..7014c9e685f 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -187,7 +187,6 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to have_many(:merge_request_assignees).inverse_of(:assignee) }
it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) }
it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) }
- it { is_expected.to have_many(:in_product_marketing_emails) }
it { is_expected.to have_many(:timelogs) }
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
@@ -223,6 +222,17 @@ RSpec.describe User, feature_category: :user_profile do
is_expected.to have_many(:alert_assignees).class_name('::AlertManagement::AlertAssignee').inverse_of(:assignee)
end
+ describe 'organizations association' do
+ it 'does not create a cross-database query' do
+ user = create(:user)
+ create(:organization_user, user: user)
+
+ with_cross_joins_prevented do
+ expect(user.organizations.count).to eq(1)
+ end
+ end
+ end
+
describe 'default values' do
let(:user) { described_class.new }
@@ -616,38 +626,33 @@ RSpec.describe User, feature_category: :user_profile do
end
end
- describe 'username' do
+ shared_examples 'username validations' do
it 'validates presence' do
expect(subject).to validate_presence_of(:username)
end
- it 'rejects denied names' do
- user = build(:user, username: 'dashboard')
-
- expect(user).not_to be_valid
- expect(user.errors.messages[:username]).to eq ['dashboard is a reserved name']
- end
-
- it 'allows child names' do
- user = build(:user, username: 'avatar')
+ context 'when username is reserved' do
+ let(:username) { 'dashboard' }
- expect(user).to be_valid
+ it 'rejects denied names' do
+ expect(user).not_to be_valid
+ expect(user.errors.messages[:username]).to eq ['dashboard is a reserved name']
+ end
end
- it 'allows wildcard names' do
- user = build(:user, username: 'blob')
+ context 'when username is a child' do
+ let(:username) { 'avatar' }
- expect(user).to be_valid
+ it 'allows child names' do
+ expect(user).to be_valid
+ end
end
- context 'when username is changed' do
- let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:user_namespace)) }
+ context 'when username is a wildcard' do
+ let(:username) { 'blob' }
- it 'validates move_dir is allowed for the namespace' do
- expect(user.namespace).to receive(:any_project_has_container_registry_tags?).and_return(true)
- user.username = 'new_path'
- expect(user).to be_invalid
- expect(user.errors.messages[:username].first).to eq(_('cannot be changed if a personal project has container registry tags.'))
+ it 'allows wildcard names' do
+ expect(user).to be_valid
end
end
@@ -656,25 +661,59 @@ RSpec.describe User, feature_category: :user_profile do
let!(:other_user) { create(:user, username: username) }
it 'is invalid' do
- user = build(:user, username: username)
-
expect(user).not_to be_valid
expect(user.errors.full_messages).to eq(['Username has already been taken'])
end
end
- it 'validates format' do
- Mime::EXTENSION_LOOKUP.keys.each do |type|
- user = build(:user, username: "test.#{type}")
+ Mime::EXTENSION_LOOKUP.keys.each do |type|
+ context 'with extension format' do
+ let(:username) { "test.#{type}" }
- expect(user).not_to be_valid
- expect(user.errors.full_messages).to include('Username ending with a reserved file extension is not allowed.')
- expect(build(:user, username: "test#{type}")).to be_valid
+ it do
+ expect(user).not_to be_valid
+ expect(user.errors.full_messages).to include('Username ending with a reserved file extension is not allowed.')
+ end
+ end
+
+ context 'when suffixed by extension type' do
+ let(:username) { "test#{type}" }
+
+ it do
+ expect(user).to be_valid
+ end
end
end
+ end
+
+ context 'when creating user' do
+ let(:user) { build(:user, username: username) }
+
+ include_examples 'username validations'
+ end
+
+ context 'when updating user' do
+ let(:user) { create(:user) }
+
+ before do
+ user.username = username if defined?(username)
+ end
+
+ include_examples 'username validations'
+
+ context 'when personal project has container registry tags' do
+ let(:user) { build_stubbed(:user, username: 'old_path', namespace: build_stubbed(:user_namespace)) }
+
+ before do
+ expect(user.namespace).to receive(:any_project_has_container_registry_tags?).and_return(true)
+ end
- it 'validates format on updated record' do
- expect(create(:user).update(username: 'profile.html')).to be_falsey
+ it 'validates move_dir is allowed for the namespace' do
+ user.username = 'new_path'
+
+ expect(user).to be_invalid
+ expect(user.errors.messages[:username].first).to eq(_('cannot be changed if a personal project has container registry tags.'))
+ end
end
end
@@ -1434,6 +1473,20 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '.ordered_by_id_desc' do
+ let_it_be(:first_user) { create(:user) }
+ let_it_be(:second_user) { create(:user) }
+
+ it 'generates the order SQL in descending order' do
+ expect(described_class.ordered_by_id_desc.to_sql).to include(
+ 'ORDER BY "users"."id" DESC')
+ end
+
+ it 'sorts users correctly' do
+ expect(described_class.ordered_by_id_desc).to eq([second_user, first_user])
+ end
+ end
+
describe '.trusted' do
let_it_be(:trusted_user1) { create(:user, :trusted) }
let_it_be(:trusted_user2) { create(:user, :trusted) }
@@ -3368,6 +3421,27 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '.gfm_autocomplete_search' do
+ let_it_be(:user_1) { create(:user, username: 'someuser', name: 'John Doe') }
+ let_it_be(:user_2) { create(:user, username: 'userthomas', name: 'Thomas Person') }
+
+ it 'returns partial matches on username' do
+ expect(described_class.gfm_autocomplete_search('some')).to eq([user_1])
+ end
+
+ it 'returns matches on name across multiple words' do
+ expect(described_class.gfm_autocomplete_search('johnd')).to eq([user_1])
+ end
+
+ it 'prioritizes sorting of matches that start with the query' do
+ expect(described_class.gfm_autocomplete_search('user')).to eq([user_2, user_1])
+ end
+
+ it 'falls back to sorting by username' do
+ expect(described_class.gfm_autocomplete_search('ser')).to eq([user_1, user_2])
+ end
+ end
+
describe '.user_search_minimum_char_limit' do
it 'returns true' do
expect(described_class.user_search_minimum_char_limit).to be(true)
@@ -3624,6 +3698,66 @@ RSpec.describe User, feature_category: :user_profile do
end
end
+ describe '#can_create_project?' do
+ let(:user) { create(:user) }
+
+ context "when projects_limit_left is 0" do
+ before do
+ allow(user).to receive(:projects_limit_left).and_return(0)
+ end
+
+ it "returns false" do
+ expect(user.can_create_project?).to be_falsey
+ end
+ end
+
+ context "when projects_limit_left is > 0" do
+ before do
+ allow(user).to receive(:projects_limit_left).and_return(1)
+ end
+
+ context "with allow_project_creation_for_guest_and_below default value of true" do
+ it "returns true" do
+ expect(user.can_create_project?).to be_truthy
+ end
+ end
+
+ context "when Gitlab::CurrentSettings.allow_project_creation_for_guest_and_below is false" do
+ before do
+ stub_application_setting(allow_project_creation_for_guest_and_below: false)
+ end
+
+ [
+ Gitlab::Access::NO_ACCESS,
+ Gitlab::Access::MINIMAL_ACCESS,
+ Gitlab::Access::GUEST
+ ].each do |role|
+ context "when users highest role is #{role}" do
+ it "returns false" do
+ allow(user).to receive(:highest_role).and_return(role)
+ expect(user.can_create_project?).to be_falsey
+ end
+ end
+ end
+
+ [
+ Gitlab::Access::REPORTER,
+ Gitlab::Access::DEVELOPER,
+ Gitlab::Access::MAINTAINER,
+ Gitlab::Access::OWNER,
+ Gitlab::Access::ADMIN
+ ].each do |role|
+ context "when users highest role is #{role}" do
+ it "returns true" do
+ allow(user).to receive(:highest_role).and_return(role)
+ expect(user.can_create_project?).to be_truthy
+ end
+ end
+ end
+ end
+ end
+ end
+
describe '#all_emails' do
let(:user) { create(:user) }
let!(:unconfirmed_secondary_email) { create(:email, user: user) }
@@ -4468,13 +4602,13 @@ RSpec.describe User, feature_category: :user_profile do
it 'avoids N+1 queries' do
fresh_user = described_class.find(user.id)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
fresh_user.solo_owned_groups
- end.count
+ end
create(:group).add_owner(user)
- expect { solo_owned_groups }.not_to exceed_query_limit(control_count)
+ expect { solo_owned_groups }.not_to exceed_query_limit(control)
end
end
end
@@ -5693,27 +5827,6 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.namespace).to be_nil
end
-
- context 'when create_personal_ns_outside_model feature flag is disabled' do
- before do
- stub_feature_flags(create_personal_ns_outside_model: false)
- end
-
- it 'creates the namespace' do
- expect(user.namespace).to be_nil
-
- user.save!
-
- expect(user.namespace).to be_present
- expect(user.namespace).to be_kind_of(Namespaces::UserNamespace)
- end
-
- it 'creates the namespace setting' do
- user.save!
-
- expect(user.namespace.namespace_settings).to be_persisted
- end
- end
end
context 'for an existing user' do
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
deleted file mode 100644
index b1642383e42..00000000000
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ /dev/null
@@ -1,137 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::InProductMarketingEmail, type: :model, feature_category: :onboarding do
- let(:track) { :create }
- let(:series) { 0 }
-
- describe 'associations' do
- it { is_expected.to belong_to(:user) }
- end
-
- describe 'validations' do
- subject { build(:in_product_marketing_email) }
-
- it { is_expected.to validate_presence_of(:user) }
-
- context 'when track+series email' do
- it { is_expected.to validate_presence_of(:track) }
- it { is_expected.to validate_presence_of(:series) }
-
- it {
- is_expected.to validate_uniqueness_of(:user_id)
- .scoped_to([:track, :series]).with_message('track series email has already been sent')
- }
- end
- end
-
- describe '.without_track_and_series' do
- let_it_be(:user) { create(:user) }
-
- subject(:without_track_and_series) { User.merge(described_class.without_track_and_series(track, series)) }
-
- before do
- create(:in_product_marketing_email, track: :create, series: 0, user: user)
- create(:in_product_marketing_email, track: :create, series: 1, user: user)
- create(:in_product_marketing_email, track: :verify, series: 0, user: user)
- end
-
- context 'when given track and series already exists' do
- it { expect(without_track_and_series).to be_empty }
- end
-
- context 'when track does not exist' do
- let(:track) { :trial }
-
- it { expect(without_track_and_series).to eq [user] }
- end
-
- context 'when series does not exist' do
- let(:series) { 2 }
-
- it { expect(without_track_and_series).to eq [user] }
- end
-
- context 'when no track or series for a user exists' do
- let(:track) { :create }
- let(:series) { 0 }
- let(:other_user) { create(:user) }
-
- it { expect(without_track_and_series).to eq [other_user] }
- end
- end
-
- describe '.for_user_with_track_and_series' do
- let_it_be(:user) { create(:user) }
- let_it_be(:in_product_marketing_email) { create(:in_product_marketing_email, series: 0, track: 0, user: user) }
-
- subject(:for_user_with_track_and_series) do
- described_class.for_user_with_track_and_series(user, track, series).first
- end
-
- context 'when record for user with given track and series exists' do
- it { is_expected.to eq(in_product_marketing_email) }
- end
-
- context 'when user is different' do
- let(:user) { build_stubbed(:user) }
-
- it { is_expected.to be_nil }
- end
-
- context 'when track is different' do
- let(:track) { 1 }
-
- it { is_expected.to be_nil }
- end
-
- context 'when series is different' do
- let(:series) { 1 }
-
- it { is_expected.to be_nil }
- end
- end
-
- describe '.save_cta_click' do
- let(:user) { create(:user) }
-
- subject(:save_cta_click) { described_class.save_cta_click(user, track, series) }
-
- context 'when there is no record' do
- it 'does not error' do
- expect { save_cta_click }.not_to raise_error
- end
- end
-
- context 'when there is no record for the track and series' do
- it 'does not perform an update' do
- other_email = create(:in_product_marketing_email, user: user, track: :verify, series: 2, cta_clicked_at: nil)
-
- expect { save_cta_click }.not_to change { other_email.reload }
- end
- end
-
- context 'when there is a record for the track and series' do
- it 'saves the cta click date' do
- email = create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: nil)
-
- freeze_time do
- expect { save_cta_click }.to change { email.reload.cta_clicked_at }.from(nil).to(Time.zone.now)
- end
- end
-
- context 'when cta_clicked_at is already set' do
- it 'does not update' do
- create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: Time.zone.now)
-
- expect_next_found_instance_of(described_class) do |record|
- expect(record).not_to receive(:update)
- end
-
- save_cta_click
- end
- end
- end
- end
-end
diff --git a/spec/models/users/phone_number_validation_spec.rb b/spec/models/users/phone_number_validation_spec.rb
index 15bbb507dee..eb73fc31dac 100644
--- a/spec/models/users/phone_number_validation_spec.rb
+++ b/spec/models/users/phone_number_validation_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resiliency do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:user) { create(:user) }
let_it_be(:banned_user) { create(:user, :banned) }
@@ -14,12 +16,12 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
it { is_expected.to validate_presence_of(:international_dial_code) }
- it {
+ it do
is_expected.to validate_numericality_of(:international_dial_code)
.only_integer
.is_greater_than_or_equal_to(1)
.is_less_than_or_equal_to(999)
- }
+ end
it { is_expected.to validate_presence_of(:phone_number) }
it { is_expected.to validate_length_of(:phone_number).is_at_most(12) }
@@ -30,6 +32,27 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
it { is_expected.to validate_length_of(:telesign_reference_xid).is_at_most(255) }
+ describe '#similar_records' do
+ let_it_be(:phone_number_validation) { create(:phone_number_validation, :validated) }
+
+ let_it_be(:phone_number) do
+ phone_number_validation.attributes.with_indifferent_access.slice(
+ :international_dial_code, :phone_number
+ )
+ end
+
+ let_it_be(:match) { create(:phone_number_validation, :validated, phone_number) }
+ let_it_be(:unvalidated_match) { create(:phone_number_validation, phone_number) }
+
+ let_it_be(:non_match_1) { create(:phone_number_validation, phone_number.merge(international_dial_code: 81)) }
+ let_it_be(:non_match_2) { create(:phone_number_validation, phone_number.merge(phone_number: '5555555555')) }
+
+ it 'returns matches with the same international dialing code and phone number' do
+ expect(phone_number_validation.similar_records).to match_array([unvalidated_match, match,
+ phone_number_validation])
+ end
+ end
+
describe '.related_to_banned_user?' do
let_it_be(:international_dial_code) { 1 }
let_it_be(:phone_number) { '555' }
@@ -41,7 +64,12 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user has the same international dial code and phone number' do
context 'and the matching record has not been verified' do
before do
- create(:phone_number_validation, user: banned_user)
+ create(
+ :phone_number_validation,
+ user: banned_user,
+ international_dial_code: international_dial_code,
+ phone_number: phone_number
+ )
end
it { is_expected.to eq(false) }
@@ -49,7 +77,13 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'and the matching record has been verified' do
before do
- create(:phone_number_validation, :validated, user: banned_user)
+ create(
+ :phone_number_validation,
+ :validated,
+ user: banned_user,
+ international_dial_code: international_dial_code,
+ phone_number: phone_number
+ )
end
it { is_expected.to eq(true) }
@@ -58,7 +92,14 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user has the same international dial code and phone number, but different country code' do
before do
- create(:phone_number_validation, :validated, user: banned_user, country: 'CA')
+ create(
+ :phone_number_validation,
+ :validated,
+ user: banned_user,
+ international_dial_code: international_dial_code,
+ phone_number: phone_number,
+ country: 'CA'
+ )
end
it { is_expected.to eq(true) }
@@ -66,7 +107,13 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user does not have the same international dial code' do
before do
- create(:phone_number_validation, :validated, user: banned_user, international_dial_code: 61)
+ create(
+ :phone_number_validation,
+ :validated,
+ user: banned_user,
+ international_dial_code: 81,
+ phone_number: phone_number
+ )
end
it { is_expected.to eq(false) }
@@ -74,7 +121,13 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when banned user does not have the same phone number' do
before do
- create(:phone_number_validation, :validated, user: banned_user, phone_number: '666')
+ create(
+ :phone_number_validation,
+ :validated,
+ user: banned_user,
+ international_dial_code: international_dial_code,
+ phone_number: '666'
+ )
end
it { is_expected.to eq(false) }
@@ -82,7 +135,13 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
context 'when not-banned user has the same international dial code and phone number' do
before do
- create(:phone_number_validation, :validated, user: user)
+ create(
+ :phone_number_validation,
+ :validated,
+ user: user,
+ international_dial_code: international_dial_code,
+ phone_number: phone_number
+ )
end
it { is_expected.to eq(false) }
@@ -105,6 +164,57 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
end
end
end
+
+ describe '.similar_to' do
+ subject(:similar_to) { described_class.similar_to(phone_number_validation) }
+
+ let_it_be(:international_dial_code) { 44 }
+ let_it_be(:phone_number) { '111' }
+
+ let_it_be(:phone_number_validation) do
+ create(:phone_number_validation,
+ :validated,
+ international_dial_code: international_dial_code,
+ phone_number: phone_number
+ )
+ end
+
+ let_it_be(:match) do
+ create(:phone_number_validation,
+ :validated,
+ international_dial_code: phone_number_validation.international_dial_code,
+ phone_number: phone_number_validation.phone_number
+ )
+ end
+
+ let_it_be(:non_match_1) do
+ create(:phone_number_validation,
+ :validated,
+ international_dial_code: phone_number_validation.international_dial_code,
+ phone_number: '222'
+ )
+ end
+
+ let_it_be(:non_match_2) do
+ create(:phone_number_validation,
+ :validated,
+ international_dial_code: 81,
+ phone_number: phone_number_validation.phone_number
+ )
+ end
+
+ let_it_be(:non_match_3) do
+ create(:phone_number_validation,
+ :validated,
+ international_dial_code: 82,
+ phone_number: '333'
+ )
+ end
+
+ it 'returns only records with the same international dialing code and phone number' do
+ expect(similar_to).to match_array([phone_number_validation, match])
+ end
+ end
end
describe '#validated?' do
@@ -142,4 +252,43 @@ RSpec.describe Users::PhoneNumberValidation, feature_category: :instance_resilie
it { is_expected.to be_nil }
end
end
+
+ describe '.sms_send_allowed_after' do
+ let_it_be(:record) { create(:phone_number_validation, sms_send_count: 0) }
+
+ subject(:result) { record.sms_send_allowed_after }
+
+ context 'when there are no attempts yet' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when sms_send_wait_time feature flag is disabled' do
+ let_it_be(:record) { create(:phone_number_validation, sms_send_count: 1) }
+
+ before do
+ stub_feature_flags(sms_send_wait_time: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ where(:attempt_number, :expected_delay) do
+ 2 | 1.minute
+ 3 | 3.minutes
+ 4 | 5.minutes
+ 5 | 10.minutes
+ 6 | 10.minutes
+ end
+
+ with_them do
+ it 'returns the correct delayed timestamp value' do
+ freeze_time do
+ record.update!(sms_send_count: attempt_number - 1, sms_sent_at: Time.current)
+
+ expected_result = Time.current + expected_delay
+ expect(result).to eq expected_result
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/work_items/hierarchy_restriction_spec.rb b/spec/models/work_items/hierarchy_restriction_spec.rb
index 2c4d5d32fb8..890c007b6cd 100644
--- a/spec/models/work_items/hierarchy_restriction_spec.rb
+++ b/spec/models/work_items/hierarchy_restriction_spec.rb
@@ -15,4 +15,24 @@ RSpec.describe WorkItems::HierarchyRestriction do
it { is_expected.to validate_presence_of(:child_type) }
it { is_expected.to validate_uniqueness_of(:child_type).scoped_to(:parent_type_id) }
end
+
+ describe '#clear_parent_type_cache!' do
+ subject(:hierarchy_restriction) { build(:hierarchy_restriction) }
+
+ context 'when a hierarchy restriction is saved' do
+ it 'calls #clear_reactive_cache! on parent type' do
+ expect(hierarchy_restriction.parent_type).to receive(:clear_reactive_cache!).once
+
+ hierarchy_restriction.save!
+ end
+ end
+
+ context 'when a hierarchy restriction is destroyed' do
+ it 'calls #clear_reactive_cache! on parent type' do
+ expect(hierarchy_restriction.parent_type).to receive(:clear_reactive_cache!).once
+
+ hierarchy_restriction.destroy!
+ end
+ end
+ end
end
diff --git a/spec/models/work_items/widget_definition_spec.rb b/spec/models/work_items/widget_definition_spec.rb
index da772eec39c..1540ee57ff4 100644
--- a/spec/models/work_items/widget_definition_spec.rb
+++ b/spec/models/work_items/widget_definition_spec.rb
@@ -26,7 +26,9 @@ RSpec.describe WorkItems::WidgetDefinition, feature_category: :team_planning do
::WorkItems::Widgets::HealthStatus,
::WorkItems::Widgets::Progress,
::WorkItems::Widgets::RequirementLegacy,
- ::WorkItems::Widgets::TestReports
+ ::WorkItems::Widgets::TestReports,
+ ::WorkItems::Widgets::Color,
+ ::WorkItems::Widgets::RolledupDates
]
end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 1d58b941d41..db8249252fa 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -90,6 +90,22 @@ RSpec.describe GlobalPolicy, feature_category: :shared do
let(:current_user) { create(:user, can_create_group: true) }
it { is_expected.to be_allowed(:create_group) }
+
+ context 'when can_create_group_and_projects returns true' do
+ before do
+ allow(current_user).to receive(:allow_user_to_create_group_and_project?).and_return(true)
+ end
+
+ it { is_expected.to be_allowed(:create_group) }
+ end
+
+ context 'when can_create_group_and_projects returns false' do
+ before do
+ allow(current_user).to receive(:allow_user_to_create_group_and_project?).and_return(false)
+ end
+
+ it { is_expected.to be_disallowed(:create_group) }
+ end
end
context 'when user does not have the ability to create group' do
diff --git a/spec/policies/organizations/organization_policy_spec.rb b/spec/policies/organizations/organization_policy_spec.rb
index 7eed497d644..9660ed578f7 100644
--- a/spec/policies/organizations/organization_policy_spec.rb
+++ b/spec/policies/organizations/organization_policy_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:admin_organization) }
+ it { is_expected.to be_allowed(:create_group) }
it { is_expected.to be_allowed(:read_organization) }
it { is_expected.to be_allowed(:read_organization_user) }
end
@@ -31,17 +32,30 @@ RSpec.describe Organizations::OrganizationPolicy, feature_category: :cell do
end
context 'when the user is part of the organization' do
- before do
- create :organization_user, organization: organization, user: current_user
+ before_all do
+ create(:organization_user, organization: organization, user: current_user)
+ end
+
+ it { is_expected.to be_disallowed(:admin_organization) }
+ it { is_expected.to be_allowed(:create_group) }
+ it { is_expected.to be_allowed(:read_organization) }
+ it { is_expected.to be_allowed(:read_organization_user) }
+ end
+
+ context 'when the user is an owner of the organization' do
+ before_all do
+ create(:organization_user, :owner, organization: organization, user: current_user)
end
it { is_expected.to be_allowed(:admin_organization) }
+ it { is_expected.to be_allowed(:create_group) }
it { is_expected.to be_allowed(:read_organization) }
it { is_expected.to be_allowed(:read_organization_user) }
end
context 'when the user is not part of the organization' do
it { is_expected.to be_disallowed(:admin_organization) }
+ it { is_expected.to be_disallowed(:create_group) }
it { is_expected.to be_disallowed(:read_organization_user) }
# All organizations are currently public, and hence they are allowed to be read
# even if the user is not a part of the organization.
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index a9a4575d747..9f4bf4f6b36 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -472,12 +472,12 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
end
- context 'reading usage quotas' do
+ context 'reading usage quotas and viewing the edit page' do
%w[maintainer owner].each do |role|
context "with #{role}" do
let(:current_user) { send(role) }
- it { is_expected.to be_allowed(:read_usage_quotas) }
+ it { is_expected.to be_allowed(:read_usage_quotas, :view_edit_page) }
end
end
@@ -485,7 +485,7 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
context "with #{role}" do
let(:current_user) { send(role) }
- it { is_expected.to be_disallowed(:read_usage_quotas) }
+ it { is_expected.to be_disallowed(:read_usage_quotas, :view_edit_page) }
end
end
@@ -493,11 +493,11 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
- it { expect_allowed(:read_usage_quotas) }
+ it { expect_allowed(:read_usage_quotas, :view_edit_page) }
end
context 'when admin mode is disabled' do
- it { expect_disallowed(:read_usage_quotas) }
+ it { expect_disallowed(:read_usage_quotas, :view_edit_page) }
end
end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index eed39c7a404..958bf222e2a 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -312,13 +312,13 @@ RSpec.describe BlobPresenter do
let(:git_blob) { blob.__getobj__ }
it 'returns highlighted content' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby')
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby', used_on: :blob)
presenter.highlight
end
it 'returns plain content when :plain is true' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: true, language: 'ruby')
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: true, language: 'ruby', used_on: :blob)
presenter.highlight(plain: true)
end
@@ -331,7 +331,7 @@ RSpec.describe BlobPresenter do
end
it 'returns limited highlighted content' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', "line one\n", plain: nil, language: 'ruby')
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', "line one\n", plain: nil, language: 'ruby', used_on: :blob)
presenter.highlight(to: 1)
end
@@ -343,11 +343,19 @@ RSpec.describe BlobPresenter do
end
it 'passes language to inner call' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby')
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby', used_on: :blob)
presenter.highlight
end
end
+
+ context 'when used_on param is present' do
+ it 'returns highlighted content' do
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby', used_on: :diff)
+
+ presenter.highlight(used_on: :diff)
+ end
+ end
end
describe '#highlight_and_trim' do
diff --git a/spec/presenters/blobs/notebook_presenter_spec.rb b/spec/presenters/blobs/notebook_presenter_spec.rb
index 2f05dc98fb9..4caecccf538 100644
--- a/spec/presenters/blobs/notebook_presenter_spec.rb
+++ b/spec/presenters/blobs/notebook_presenter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Blobs::NotebookPresenter do
subject(:presenter) { described_class.new(blob, current_user: user) }
it 'highlight receives markdown' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'md')
+ expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'md', used_on: :blob)
presenter.highlight
end
diff --git a/spec/presenters/projects/security/configuration_presenter_spec.rb b/spec/presenters/projects/security/configuration_presenter_spec.rb
index fcd170dfd66..9f6efb08fb1 100644
--- a/spec/presenters/projects/security/configuration_presenter_spec.rb
+++ b/spec/presenters/projects/security/configuration_presenter_spec.rb
@@ -88,6 +88,7 @@ RSpec.describe Projects::Security::ConfigurationPresenter, feature_category: :so
expect(feature['can_enable_by_merge_request']).to eq(true)
expect(feature['meta_info_path']).to be_nil
expect(feature['on_demand_available']).to eq(false)
+ expect(feature['security_features']).not_to be_empty
end
context 'when checking features configured status' do
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 382aabd45a1..9e1203bc720 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -70,6 +70,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
expect(json_response['artifacts']).to be_an Array
expect(json_response['artifacts']).to be_empty
expect(json_response['web_url']).to be_present
+ expect(json_response['archived']).to eq(jobx.archived?)
end
end
@@ -132,12 +133,12 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
end
it 'avoids N+1 queries', :skip_before_request do
- control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request }
running_job = create(:ci_build, :running, project: project, user: user, pipeline: pipeline, artifacts_expire_at: 1.day.since)
running_job.save!
- expect { perform_request }.not_to exceed_query_limit(control_count)
+ expect { perform_request }.not_to exceed_query_limit(control)
end
it_behaves_like 'returns common pipeline data' do
@@ -431,7 +432,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
first_build.user = create(:user)
first_build.save!
- control_count = ActiveRecord::QueryRecorder.new { go }.count
+ control = ActiveRecord::QueryRecorder.new { go }
second_pipeline = create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
second_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: second_pipeline)
@@ -439,7 +440,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
second_build.user = create(:user)
second_build.save!
- expect { go }.not_to exceed_query_limit(control_count)
+ expect { go }.not_to exceed_query_limit(control)
end
context 'filter project with one scope element' do
diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb
index f534b093b7c..588991096b5 100644
--- a/spec/requests/api/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/ci/pipeline_schedules_spec.rb
@@ -42,15 +42,15 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra
# We need at least two users to trigger a preload for that relation.
create_pipeline_schedules(1)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/pipeline_schedules", developer)
- end.count
+ end
create_pipeline_schedules(5)
expect do
get api("/projects/#{project.id}/pipeline_schedules", developer)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
%w[active inactive].each do |target|
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index eef125e1bc3..ef169dbe872 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -471,15 +471,15 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
- end.count
+ end
create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
context 'pipeline has retried jobs' do
@@ -671,15 +671,15 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
- end.count
+ end
3.times { create_bridge(pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 3d6d86335eb..e118ef9a384 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -932,7 +932,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
name: 'ruby',
executor_opts: {
docker: {
- platform: 'amd64'
+ platform: 'amd64',
+ user: 'dave'
}
}
}
@@ -948,7 +949,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
'image' => { 'name' => 'ruby',
'executor_opts' => {
'docker' => {
- 'platform' => 'amd64'
+ 'platform' => 'amd64',
+ 'user' => 'dave'
}
},
'pull_policy' => nil,
diff --git a/spec/requests/api/ci/runner/yamls/image-executor_opts-user.yml b/spec/requests/api/ci/runner/yamls/image-executor_opts-user.yml
new file mode 100644
index 00000000000..9fb56b941c9
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/image-executor_opts-user.yml
@@ -0,0 +1,25 @@
+gitlab_ci:
+ rspec:
+ image:
+ name: alpine:latest
+ docker:
+ user: dave
+ script: echo Hello World
+
+request_response:
+ image:
+ name: alpine:latest
+ entrypoint: null
+ executor_opts:
+ docker:
+ user: dave
+ ports: []
+ pull_policy: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services: []
+
diff --git a/spec/requests/api/ci/runner/yamls/service-executor_opts-user.yml b/spec/requests/api/ci/runner/yamls/service-executor_opts-user.yml
new file mode 100644
index 00000000000..ea824110e63
--- /dev/null
+++ b/spec/requests/api/ci/runner/yamls/service-executor_opts-user.yml
@@ -0,0 +1,27 @@
+gitlab_ci:
+ rspec:
+ services:
+ - name: docker:dind
+ docker:
+ user: john
+ script: echo Hello World
+
+request_response:
+ image: null
+ steps:
+ - name: script
+ script: ["echo Hello World"]
+ timeout: 3600
+ when: on_success
+ allow_failure: false
+ services:
+ - name: docker:dind
+ alias: null
+ command: null
+ entrypoint: null
+ executor_opts:
+ docker:
+ user: john
+ ports: []
+ pull_policy: null
+ variables: []
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 187880e16a4..11d906249e4 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -836,166 +836,219 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
end
describe 'GET /runners/:id/jobs' do
- let_it_be(:job_1) { create(:ci_build) }
- let_it_be(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
- let_it_be(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
- let_it_be(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) }
- let_it_be(:job_5) { create(:ci_build, :failed, runner: project_runner, project: project) }
- let(:path) { "/runners/#{project_runner.id}/jobs" }
+ subject(:request) { get api(path, user, **api_params) }
+
+ let_it_be(:shared_runner_manager1) { create(:ci_runner_machine, runner: shared_runner, system_xid: 'id2') }
+ let_it_be(:jobs) do
+ project_runner_manager1 = create(:ci_runner_machine, runner: project_runner, system_xid: 'id1')
+ project_runner_manager2 = create(:ci_runner_machine, runner: two_projects_runner, system_xid: 'id1')
+
+ [
+ create(:ci_build),
+ create(:ci_build, :running, runner_manager: shared_runner_manager1, project: project),
+ create(:ci_build, :failed, runner_manager: shared_runner_manager1, project: project),
+ create(:ci_build, :running, runner_manager: project_runner_manager1, project: project),
+ create(:ci_build, :failed, runner_manager: project_runner_manager1, project: project),
+ create(:ci_build, :running, runner_manager: project_runner_manager2, project: project),
+ create(:ci_build, :running, runner_manager: project_runner_manager2, project: project2)
+ ]
+ end
+
+ let(:api_params) { {} }
+ let(:runner_id) { project_runner.id }
+ let(:query_part) { query_params.merge(system_id_params).map { |param| param.join('=') }.join('&') }
+ let(:path) { "/runners/#{runner_id}/jobs?#{query_part}" }
+ let(:query_params) { {} }
+ let(:system_id_params) { {} }
it_behaves_like 'GET request permissions for admin mode'
context 'admin user' do
+ let(:user) { admin }
+ let(:api_params) { { admin_mode: true } }
+
context 'when runner exists' do
context 'when runner is shared' do
+ let(:runner_id) { shared_runner.id }
+ let(:system_id) { 'id2' }
+
it 'return jobs' do
- get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(2)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[1].id),
+ a_hash_including('id' => jobs[2].id)
+ ])
+ end
+
+ it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
+ let(:first_record) { jobs[2] }
+ let(:second_record) { jobs[1] }
+ let(:api_call) { api(path, user, **api_params) }
end
end
context 'when runner is a project runner' do
it 'return jobs' do
- get api(path, admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(2)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
end
context 'when user does not have authorization to see all jobs' do
- it 'shows only jobs it has permission to see' do
- create(:ci_build, :running, runner: two_projects_runner, project: project)
- create(:ci_build, :running, runner: two_projects_runner, project: project2)
+ let(:runner_id) { two_projects_runner.id }
+ let(:user) { user2 }
+ let(:api_params) { {} }
+ before_all do
project.add_guest(user2)
project2.add_maintainer(user2)
- get api("/runners/#{two_projects_runner.id}/jobs", user2)
+ end
+
+ it 'shows only jobs it has permission to see' do
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
-
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(1)
+ expect(json_response).to match([a_hash_including('id' => jobs[6].id)])
end
end
end
context 'when valid status is provided' do
+ let(:query_params) { { status: :failed } }
+
it 'return filtered jobs' do
- get api("/runners/#{project_runner.id}/jobs?status=failed", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(1)
- expect(json_response.first).to include('id' => job_5.id)
+ expect(json_response).to match([a_hash_including('id' => jobs[4].id)])
end
end
context 'when valid order_by is provided' do
+ let(:query_params) { { order_by: :id } }
+
context 'when sort order is not specified' do
it 'return jobs in descending order' do
- get api("/runners/#{project_runner.id}/jobs?order_by=id", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(2)
- expect(json_response.first).to include('id' => job_5.id)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[4].id),
+ a_hash_including('id' => jobs[3].id)
+ ])
end
end
context 'when sort order is specified as asc' do
+ let(:query_params) { { order_by: :id, sort: :asc } }
+
it 'return jobs sorted in ascending order' do
- get api("/runners/#{project_runner.id}/jobs?order_by=id&sort=asc", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(2)
- expect(json_response.first).to include('id' => job_4.id)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
end
end
end
context 'when invalid status is provided' do
+ let(:query_params) { { status: 'non-existing' } }
+
it 'return 400' do
- get api("/runners/#{project_runner.id}/jobs?status=non-existing", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when invalid order_by is provided' do
+ let(:query_params) { { order_by: 'non-existing' } }
+
it 'return 400' do
- get api("/runners/#{project_runner.id}/jobs?order_by=non-existing", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when invalid sort is provided' do
+ let(:query_params) { { sort: 'non-existing' } }
+
it 'return 400' do
- get api("/runners/#{project_runner.id}/jobs?sort=non-existing", admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
- it 'avoids N+1 DB queries' do
- get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true)
+ describe 'eager loading' do
+ let(:runner_id) { shared_runner.id }
- control = ActiveRecord::QueryRecorder.new do
- get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true)
- end
+ it 'avoids N+1 DB queries' do
+ get api(path, user, **api_params)
- create(:ci_build, :failed, runner: shared_runner, project: project)
+ control = ActiveRecord::QueryRecorder.new do
+ get api(path, user, **api_params)
+ end
- expect do
- get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true)
- end.not_to exceed_query_limit(control.count)
- end
+ create(:ci_build, :failed, runner: shared_runner, project: project)
- it 'batches loading of commits' do
- shared_runner = create(:ci_runner, :instance, description: 'Shared runner')
+ expect do
+ get api(path, user, **api_params)
+ end.not_to exceed_query_limit(control.count)
+ end
- project_with_repo = create(:project, :repository)
+ it 'batches loading of commits' do
+ project_with_repo = create(:project, :repository)
+ shared_runner_manager1 = create(:ci_runner_machine, runner: shared_runner, system_xid: 'id1')
- pipeline = create(:ci_pipeline, project: project_with_repo, sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b')
- create(:ci_build, :running, runner: shared_runner, project: project_with_repo, pipeline: pipeline)
+ pipeline = create(:ci_pipeline, project: project_with_repo, sha: 'ddd0f15ae83993f5cb66a927a28673882e99100b')
+ create(:ci_build, :running, runner_manager: shared_runner_manager1, project: project_with_repo, pipeline: pipeline)
- pipeline = create(:ci_pipeline, project: project_with_repo, sha: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
- create(:ci_build, :failed, runner: shared_runner, project: project_with_repo, pipeline: pipeline)
+ pipeline = create(:ci_pipeline, project: project_with_repo, sha: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
+ create(:ci_build, :failed, runner_manager: shared_runner_manager1, project: project_with_repo, pipeline: pipeline)
- pipeline = create(:ci_pipeline, project: project_with_repo, sha: '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863')
- create(:ci_build, :failed, runner: shared_runner, project: project_with_repo, pipeline: pipeline)
+ pipeline = create(:ci_pipeline, project: project_with_repo, sha: '1a0b36b3cdad1d2ee32457c102a8c0b7056fa863')
+ create(:ci_build, :failed, runner_manager: shared_runner_manager1, project: project_with_repo, pipeline: pipeline)
- expect_next_instance_of(Repository) do |repo|
- expect(repo).to receive(:commits_by).with(oids:
- %w[
- 1a0b36b3cdad1d2ee32457c102a8c0b7056fa863
- c1c67abbaf91f624347bb3ae96eabe3a1b742478
- ]).once.and_call_original
- end
+ expect_next_instance_of(Repository) do |repo|
+ expect(repo).to receive(:commits_by).with(oids:
+ %w[
+ 1a0b36b3cdad1d2ee32457c102a8c0b7056fa863
+ c1c67abbaf91f624347bb3ae96eabe3a1b742478
+ ]).once.and_call_original
+ end
- get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true), params: { per_page: 2, order_by: 'id', sort: 'desc' }
+ get api(path, admin, admin_mode: true), params: { per_page: 2, order_by: 'id', sort: 'desc' }
+ end
end
context "when runner doesn't exist" do
+ let(:runner_id) { non_existing_record_id }
+
it 'returns 404' do
- get api('/runners/0/jobs', admin, admin_mode: true)
+ request
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -1004,70 +1057,118 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v
context "runner project's administrative user" do
context 'when runner exists' do
+ let(:runner_id) { shared_runner.id }
+
context 'when runner is shared' do
it 'returns 403' do
- get api("/runners/#{shared_runner.id}/jobs", user)
+ request
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when runner is a project runner' do
+ let(:runner_id) { project_runner.id }
+
it 'return jobs' do
- get api(path, user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(2)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
end
- end
- context 'when valid status is provided' do
- it 'return filtered jobs' do
- get api("/runners/#{project_runner.id}/jobs?status=failed", user)
+ context 'when valid status is provided' do
+ let(:query_params) { { status: :failed } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ it 'return filtered jobs' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(1)
- expect(json_response.first).to include('id' => job_5.id)
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[4].id)
+ ])
+ end
end
- end
- context 'when invalid status is provided' do
- it 'return 400' do
- get api("/runners/#{project_runner.id}/jobs?status=non-existing", user)
+ context 'when invalid status is provided' do
+ let(:query_params) { { status: 'non-existing' } }
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'return 400' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
end
context "when runner doesn't exist" do
+ let(:runner_id) { non_existing_record_id }
+
it 'returns 404' do
- get api('/runners/0/jobs', user)
+ request
expect(response).to have_gitlab_http_status(:not_found)
end
end
- end
- context 'other authorized user' do
- it 'does not return jobs' do
- get api(path, user2)
+ context 'other authorized user' do
+ let(:user) { user2 }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'does not return jobs' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'unauthorized user' do
+ let(:user) { nil }
+
+ it 'does not return jobs' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
end
- context 'unauthorized user' do
- it 'does not return jobs' do
- get api(path)
+ context 'with system_id param' do
+ let(:system_id_params) { { system_id: system_id } }
+ let(:system_id) { 'id1' }
+ let(:user) { admin }
+ let(:api_params) { { admin_mode: true } }
- expect(response).to have_gitlab_http_status(:unauthorized)
+ it 'returns jobs from the runner manager' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_limited_pagination_headers
+ expect(response.headers).not_to include('X-Total', 'X-Total-Pages')
+
+ expect(json_response).to match([
+ a_hash_including('id' => jobs[3].id),
+ a_hash_including('id' => jobs[4].id)
+ ])
+ end
+
+ context 'when system_id does not match runner' do
+ let(:runner_id) { shared_runner.id }
+
+ it 'does not return jobs' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response).to be_empty
+ end
end
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 4ec5d195ff8..06ef68f190f 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1842,11 +1842,11 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
it 'are returned without N + 1' do
get api(route, current_user) # warm up the cache
- control_count = ActiveRecord::QueryRecorder.new { get api(route, current_user) }.count
+ control = ActiveRecord::QueryRecorder.new { get api(route, current_user) }
create(:diff_note_on_commit, project: project, author: create(:user))
- expect { get api(route, current_user) }.not_to exceed_query_limit(control_count)
+ expect { get api(route, current_user) }.not_to exceed_query_limit(control)
end
end
end
@@ -2386,11 +2386,11 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
it 'returns multiple merge requests without N + 1' do
perform_request(user)
- control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request(user) }
create(:merge_request, :closed, source_project: project, source_branch: 'master', target_branch: 'feature')
- expect { perform_request(user) }.not_to exceed_query_limit(control_count)
+ expect { perform_request(user) }.not_to exceed_query_limit(control)
end
end
@@ -2457,6 +2457,8 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
end
context 'with ssh signed commit' do
+ let_it_be(:project) { create(:project, :repository, :public, :in_group) }
+
let(:commit_id) { '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' }
let!(:commit) { project.commit(commit_id) }
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 30c345ef458..ca19a97ae49 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -135,11 +135,11 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo
it 'returns multiple deploy keys without N + 1' do
perform_request
- control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request }
create(:deploy_key, public: true, projects: [project], user: maintainer)
- expect { perform_request }.not_to exceed_query_limit(control_count)
+ expect { perform_request }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 5a8e1649e75..f68307df779 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -143,11 +143,11 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do
it 'returns multiple deployments without N + 1' do
perform_request # warm up the cache
- control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request }
create(:deployment, :success, project: project, deployable: build, iid: 21, ref: 'master')
- expect { perform_request }.not_to exceed_query_limit(control_count)
+ expect { perform_request }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/requests/api/draft_notes_spec.rb b/spec/requests/api/draft_notes_spec.rb
index f15ed6e2d5f..fc465fa7c42 100644
--- a/spec/requests/api/draft_notes_spec.rb
+++ b/spec/requests/api/draft_notes_spec.rb
@@ -87,6 +87,10 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
let!(:deleted_draft_note_id) { draft_note_by_current_user.id }
before do
+ allow_next_instance_of(DraftNotes::DestroyService) do |service|
+ allow(service).to receive(:unfolded_drafts?).and_return(true)
+ end
+
delete api(
"#{base_url}/#{draft_note_by_current_user.id}",
user
diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb
index 4fb0dfbb070..2e513194627 100644
--- a/spec/requests/api/feature_flags_spec.rb
+++ b/spec/requests/api/feature_flags_spec.rb
@@ -67,12 +67,12 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do
end
it 'does not have N+1 problem' do
- control_count = ActiveRecord::QueryRecorder.new { subject }
+ control = ActiveRecord::QueryRecorder.new { subject }
create_list(:operations_feature_flag, 3, project: project)
expect { get api("/projects/#{project.id}/feature_flags", user) }
- .not_to exceed_query_limit(control_count)
+ .not_to exceed_query_limit(control)
end
it_behaves_like 'check user permission'
diff --git a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
index 32048ea1432..94678bd18da 100644
--- a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
+++ b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb
@@ -89,14 +89,14 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
end
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user)
- end.count
+ end
user2 = create(:user)
create(:user_achievement, achievement: achievement, user: user2)
- expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control_count)
+ expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control)
end
context 'when the achievements feature flag is disabled' do
diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
index 86e2b288890..312700b1dcf 100644
--- a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
@@ -115,8 +115,8 @@ RSpec.describe 'get board lists', feature_category: :team_planning do
let(:issue_params) { { filters: { or: { assignee_usernames: [user.username, another_user.username] } } } }
it 'returns correctly filtered issues' do
- issue1.assignee_ids = user.id
- issue2.assignee_ids = another_user.id
+ IssueAssignee.create!(issue_id: issue1.id, user_id: user.id)
+ IssueAssignee.create!(issue_id: issue2.id, user_id: another_user.id)
subject
diff --git a/spec/requests/api/graphql/ci/catalog/resource_spec.rb b/spec/requests/api/graphql/ci/catalog/resource_spec.rb
index 9fe73e7ba45..836e52197a3 100644
--- a/spec/requests/api/graphql/ci/catalog/resource_spec.rb
+++ b/spec/requests/api/graphql/ci/catalog/resource_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
)
end
- let_it_be(:resource) { create(:ci_catalog_resource, :published, project: project) }
+ let_it_be_with_reload(:resource) { create(:ci_catalog_resource, :published, project: project) }
let(:query) do
<<~GQL
@@ -81,7 +81,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
nodes {
id
name
- path
+ includePath
inputs {
name
default
@@ -126,7 +126,7 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
a_graphql_entity_for(
components.first,
name: components.first.name,
- path: components.first.path,
+ include_path: components.first.path,
inputs: [
a_graphql_entity_for(
name: 'tags',
@@ -148,48 +148,68 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
a_graphql_entity_for(
components.last,
name: components.last.name,
- path: components.last.path
+ include_path: components.last.path
)
)
end
end
end
- describe 'versions' do
- let(:query) do
- <<~GQL
- query {
- ciCatalogResource(id: "#{resource.to_global_id}") {
- id
- versions {
- nodes {
- id
- tagName
- tagPath
- releasedAt
- author {
+ describe 'version fields' do
+ before_all do
+ # To test the readme_html field, we need to create versions with real commit shas
+ project.repository.create_branch('branch_v2', project.default_branch)
+ project.repository.update_file(
+ user, 'README.md', 'Readme v2', message: 'Update readme', branch_name: 'branch_v2')
+
+ project.repository.add_tag(user, 'v1', project.default_branch)
+ project.repository.add_tag(user, 'v2', 'branch_v2')
+ end
+
+ let_it_be(:author) { create(:user, name: 'author') }
+
+ let_it_be(:version1) do
+ create(:release, :with_catalog_resource_version,
+ project: project,
+ tag: 'v1',
+ sha: project.commit('v1').sha,
+ released_at: '2023-01-01T00:00:00Z',
+ author: author
+ ).catalog_resource_version
+ end
+
+ let_it_be(:version2) do
+ create(:release, :with_catalog_resource_version,
+ project: project,
+ tag: 'v2',
+ sha: project.commit('v2').sha,
+ released_at: '2023-02-01T00:00:00Z',
+ author: author
+ ).catalog_resource_version
+ end
+
+ describe 'versions' do
+ let(:query) do
+ <<~GQL
+ query {
+ ciCatalogResource(id: "#{resource.to_global_id}") {
+ id
+ versions {
+ nodes {
id
name
- webUrl
+ path
+ releasedAt
+ author {
+ id
+ name
+ webUrl
+ }
}
}
}
}
- }
- GQL
- end
-
- context 'when the resource has versions' do
- let_it_be(:author) { create(:user, name: 'author') }
-
- let_it_be(:version1) do
- create(:release, :with_catalog_resource_version, project: project, released_at: '2023-01-01T00:00:00Z',
- author: author).catalog_resource_version
- end
-
- let_it_be(:version2) do
- create(:release, :with_catalog_resource_version, project: project, released_at: '2023-02-01T00:00:00Z',
- author: author).catalog_resource_version
+ GQL
end
it 'returns the resource with the versions data' do
@@ -202,67 +222,124 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
expect(graphql_data_at(:ciCatalogResource, :versions, :nodes)).to contain_exactly(
a_graphql_entity_for(
version1,
- tagName: version1.name,
- tagPath: project_tag_path(project, version1.name),
+ name: version1.name,
+ path: project_tag_path(project, version1.name),
releasedAt: version1.released_at,
author: a_graphql_entity_for(author, :name)
),
a_graphql_entity_for(
version2,
- tagName: version2.name,
- tagPath: project_tag_path(project, version2.name),
+ name: version2.name,
+ path: project_tag_path(project, version2.name),
releasedAt: version2.released_at,
author: a_graphql_entity_for(author, :name)
)
)
end
- end
- context 'when the resource does not have a version' do
- it 'returns versions as an empty array' do
- post_query
+ context 'when the readmeHtml field is requested on more than one version' do
+ let(:query) do
+ <<~GQL
+ query {
+ ciCatalogResource(fullPath: "#{resource.project.full_path}") {
+ versions {
+ nodes {
+ readmeHtml
+ }
+ }
+ }
+ }
+ GQL
+ end
- expect(graphql_data_at(:ciCatalogResource)).to match(
- a_graphql_entity_for(resource, versions: { 'nodes' => [] })
- )
+ it 'limits the request to 1 version at a time' do
+ post_query
+
+ expect_graphql_errors_to_include \
+ [/"readmeHtml" field can be requested only for 1 CiCatalogResourceVersion\(s\) at a time./]
+ end
+ end
+
+ context 'when the name argument is provided' do
+ let(:name) { 'v1' }
+
+ let(:query) do
+ <<~GQL
+ query {
+ ciCatalogResource(fullPath: "#{resource.project.full_path}") {
+ versions(name: "#{name}") {
+ nodes {
+ id
+ name
+ path
+ releasedAt
+ readmeHtml
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ it 'returns the version that matches the name' do
+ post_query
+
+ expect(graphql_data_at(:ciCatalogResource, :versions, :nodes)).to contain_exactly(
+ a_graphql_entity_for(
+ version1,
+ name: version1.name,
+ path: project_tag_path(project, version1.name),
+ releasedAt: version1.released_at,
+ readmeHtml: a_string_including(
+ "#{project.full_path}/-/blob/#{project.default_branch}/README.md"
+ )
+ )
+ )
+ end
+
+ context 'when no version matches the name' do
+ let(:name) { 'does_not_exist' }
+
+ it 'returns an empty array' do
+ post_query
+
+ expect(graphql_data_at(:ciCatalogResource, :versions, :nodes)).to eq([])
+ end
+ end
+ end
+
+ context 'when the resource does not have a version' do
+ it 'returns an empty array' do
+ resource.versions.delete_all(:delete_all)
+
+ post_query
+
+ expect(graphql_data_at(:ciCatalogResource, :versions, :nodes)).to eq([])
+ end
end
end
- end
- describe 'latestVersion' do
- let(:query) do
- <<~GQL
- query {
- ciCatalogResource(id: "#{resource.to_global_id}") {
- id
- latestVersion {
+ describe 'latestVersion' do
+ let(:query) do
+ <<~GQL
+ query {
+ ciCatalogResource(id: "#{resource.to_global_id}") {
id
- tagName
- tagPath
- releasedAt
- author {
+ latestVersion {
id
name
- webUrl
+ path
+ releasedAt
+ readmeHtml
+ author {
+ id
+ name
+ webUrl
+ }
}
}
}
- }
- GQL
- end
-
- context 'when the resource has versions' do
- let_it_be(:author) { create(:user, name: 'author') }
-
- let_it_be(:latest_version) do
- create(:release, :with_catalog_resource_version, project: project, released_at: '2023-02-01T00:00:00Z',
- author: author).catalog_resource_version
- end
-
- before_all do
- # Previous version of the catalog resource
- create(:release, :with_catalog_resource_version, project: project, released_at: '2023-01-01T00:00:00Z',
- author: author)
+ GQL
end
it 'returns the resource with the latest version data' do
@@ -272,24 +349,27 @@ RSpec.describe 'Query.ciCatalogResource', feature_category: :pipeline_compositio
a_graphql_entity_for(
resource,
latestVersion: a_graphql_entity_for(
- latest_version,
- tagName: latest_version.name,
- tagPath: project_tag_path(project, latest_version.name),
- releasedAt: latest_version.released_at,
+ version2,
+ name: version2.name,
+ path: project_tag_path(project, version2.name),
+ releasedAt: version2.released_at,
+ readmeHtml: a_string_including('Readme v2'),
author: a_graphql_entity_for(author, :name)
)
)
)
end
- end
- context 'when the resource does not have a version' do
- it 'returns nil' do
- post_query
+ context 'when the resource does not have a version' do
+ it 'returns nil' do
+ resource.versions.delete_all(:delete_all)
- expect(graphql_data_at(:ciCatalogResource)).to match(
- a_graphql_entity_for(resource, latestVersion: nil)
- )
+ post_query
+
+ expect(graphql_data_at(:ciCatalogResource)).to match(
+ a_graphql_entity_for(resource, latestVersion: nil)
+ )
+ end
end
end
end
diff --git a/spec/requests/api/graphql/ci/catalog/resources_spec.rb b/spec/requests/api/graphql/ci/catalog/resources_spec.rb
index 49a3f3be1d7..150507fc442 100644
--- a/spec/requests/api/graphql/ci/catalog/resources_spec.rb
+++ b/spec/requests/api/graphql/ci/catalog/resources_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
versions {
nodes {
id
- tagName
+ name
releasedAt
author {
id
@@ -153,7 +153,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
id
latestVersion {
id
- tagName
+ name
releasedAt
author {
id
@@ -185,7 +185,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
resource1,
latestVersion: a_graphql_entity_for(
latest_version1,
- tagName: latest_version1.name,
+ name: latest_version1.name,
releasedAt: latest_version1.released_at,
author: a_graphql_entity_for(author1, :name)
)
@@ -194,7 +194,7 @@ RSpec.describe 'Query.ciCatalogResources', feature_category: :pipeline_compositi
public_resource,
latestVersion: a_graphql_entity_for(
latest_version2,
- tagName: latest_version2.name,
+ name: latest_version2.name,
releasedAt: latest_version2.released_at,
author: a_graphql_entity_for(author2, :name)
)
diff --git a/spec/requests/api/graphql/ci/instance_variables_spec.rb b/spec/requests/api/graphql/ci/instance_variables_spec.rb
index a612b4c91b6..6731631a075 100644
--- a/spec/requests/api/graphql/ci/instance_variables_spec.rb
+++ b/spec/requests/api/graphql/ci/instance_variables_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Query.ciVariables', feature_category: :secrets_management do
nodes {
id
key
+ description
value
variableType
protected
@@ -36,6 +37,7 @@ RSpec.describe 'Query.ciVariables', feature_category: :secrets_management do
expect(graphql_data.dig('ciVariables', 'nodes')).to contain_exactly({
'id' => variable.to_global_id.to_s,
'key' => 'TEST_VAR',
+ 'description' => nil,
'value' => 'test',
'variableType' => 'ENV_VAR',
'masked' => false,
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 8262640b283..1b6948d0380 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -876,107 +876,95 @@ RSpec.describe 'Query.runner(id)', :freeze_time, feature_category: :fleet_visibi
end
describe 'Query limits' do
- def runner_query(runner)
- <<~SINGLE
- runner(id: "#{runner.to_global_id}") {
- #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
- createdBy {
- id
- username
- webPath
- webUrl
- }
- groups {
- nodes {
- id
- path
- fullPath
- webUrl
- }
- }
- projects {
- nodes {
- id
- path
- fullPath
- webUrl
- }
- }
- ownerProject {
- id
- path
- fullPath
- webUrl
- }
+ let_it_be(:user2) { another_admin }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:tag_list) { %w[n_plus_1_test some_tag] }
+ let_it_be(:args) do
+ { current_user: user, token: { personal_access_token: create(:personal_access_token, user: user) } }
+ end
+
+ let_it_be(:runner1) { create(:ci_runner, tag_list: tag_list, creator: user) }
+ let_it_be(:runner2) do
+ create(:ci_runner, :group, groups: [group], tag_list: tag_list, creator: user)
+ end
+
+ let_it_be(:runner3) do
+ create(:ci_runner, :project, projects: [project1], tag_list: tag_list, creator: user)
+ end
+
+ let(:single_discrete_runners_query) do
+ multiple_discrete_runners_query([])
+ end
+
+ let(:runner_fragment) do
+ <<~QUERY
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
+ createdBy {
+ id
+ username
+ webPath
+ webUrl
}
- SINGLE
+ QUERY
end
- let(:active_project_runner2) { create(:ci_runner, :project) }
- let(:active_group_runner2) { create(:ci_runner, :group) }
+ # Exclude fields that are already hardcoded above (or tested separately),
+ # and also some fields from deeper objects which are problematic:
+ # - createdBy: Known N+1 issues, but only on exotic fields which we don't normally use
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[createdBy jobs pipeline productAnalyticsState] }
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ discrete_runners_control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(single_discrete_runners_query, **args)
+ end
+
+ additional_runners = setup_additional_records
+
+ expect do
+ post_graphql(multiple_discrete_runners_query(additional_runners), **args)
- # Exclude fields that are already hardcoded above
- let(:excluded_fields) { %w[createdBy jobs groups projects ownerProject] }
+ raise StandardError, flattened_errors if graphql_errors # Ensure any error in query causes test to fail
+ end.not_to exceed_query_limit(discrete_runners_control)
+ end
- let(:single_query) do
+ def runner_query(runner, nr)
<<~QUERY
- {
- instance_runner1: #{runner_query(active_instance_runner)}
- group_runner1: #{runner_query(active_group_runner)}
- project_runner1: #{runner_query(active_project_runner)}
+ runner#{nr}: runner(id: "#{runner.to_global_id}") {
+ #{runner_fragment}
}
QUERY
end
- let(:double_query) do
+ def multiple_discrete_runners_query(additional_runners)
<<~QUERY
{
- instance_runner1: #{runner_query(active_instance_runner)}
- instance_runner2: #{runner_query(inactive_instance_runner)}
- group_runner1: #{runner_query(active_group_runner)}
- group_runner2: #{runner_query(active_group_runner2)}
- project_runner1: #{runner_query(active_project_runner)}
- project_runner2: #{runner_query(active_project_runner2)}
+ #{runner_query(runner1, 1)}
+ #{runner_query(runner2, 2)}
+ #{runner_query(runner3, 3)}
+ #{additional_runners.each_with_index.map { |r, i| runner_query(r, 4 + i) }.join("\n")}
}
QUERY
end
- it 'does not execute more queries per runner', :aggregate_failures, quarantine: "https://gitlab.com/gitlab-org/gitlab/-/issues/391442" do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: user)
- args = { current_user: user, token: { personal_access_token: personal_access_token } }
- post_graphql(double_query, **args)
-
- control = ActiveRecord::QueryRecorder.new { post_graphql(single_query, **args) }
-
- personal_access_token = create(:personal_access_token, user: another_admin)
- args = { current_user: another_admin, token: { personal_access_token: personal_access_token } }
- expect { post_graphql(double_query, **args) }.not_to exceed_query_limit(control)
-
- expect(graphql_data.count).to eq 6
- expect(graphql_data).to match(
- a_hash_including(
- 'instance_runner1' => a_graphql_entity_for(active_instance_runner),
- 'instance_runner2' => a_graphql_entity_for(inactive_instance_runner),
- 'group_runner1' => a_graphql_entity_for(
- active_group_runner,
- groups: { 'nodes' => contain_exactly(a_graphql_entity_for(group)) }
- ),
- 'group_runner2' => a_graphql_entity_for(
- active_group_runner2,
- groups: { 'nodes' => active_group_runner2.groups.map { |g| a_graphql_entity_for(g) } }
- ),
- 'project_runner1' => a_graphql_entity_for(
- active_project_runner,
- projects: { 'nodes' => active_project_runner.projects.map { |p| a_graphql_entity_for(p) } },
- owner_project: a_graphql_entity_for(active_project_runner.projects[0])
- ),
- 'project_runner2' => a_graphql_entity_for(
- active_project_runner2,
- projects: { 'nodes' => active_project_runner2.projects.map { |p| a_graphql_entity_for(p) } },
- owner_project: a_graphql_entity_for(active_project_runner2.projects[0])
- )
- ))
+ def setup_additional_records
+ # Add more runners (including owned by other users)
+ runner4 = create(:ci_runner, tag_list: tag_list + %w[tag1 tag2], creator: user2)
+ runner5 = create(:ci_runner, :group, groups: [create(:group)], tag_list: tag_list + %w[tag2 tag3], creator: user3)
+ # Add one more project to runner
+ runner3.assign_to(create(:project))
+
+ # Add more runner managers (including to existing runners)
+ runner_manager1 = create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner2, system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+ create(:ci_runner_machine, runner: runner4, version: '16.4.1')
+ create(:ci_runner_machine, runner: runner5, version: '16.4.0', system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+
+ [runner4, runner5]
end
end
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index 0fe14bef778..bfe5282cbaa 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -18,22 +18,34 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
let(:fields) do
<<~QUERY
nodes {
- #{all_graphql_fields_for('CiRunner', excluded: %w[createdBy ownerProject])}
- createdBy {
- username
- webPath
- webUrl
- }
- ownerProject {
- id
- path
- fullPath
- webUrl
- }
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
}
QUERY
end
+ let(:query) do
+ %(
+ query {
+ runners {
+ #{fields}
+ }
+ }
+ )
+ end
+
+ # Exclude fields from deeper objects which are problematic:
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[pipeline productAnalyticsState] }
+
+ it 'returns expected runners' do
+ post_graphql(query, current_user: current_user)
+
+ expect(runners_graphql_data['nodes']).to contain_exactly(
+ *Ci::Runner.all.map { |expected_runner| a_graphql_entity_for(expected_runner) }
+ )
+ end
+
context 'with filters' do
shared_examples 'a working graphql query returning expected runners' do
it_behaves_like 'a working graphql query' do
@@ -49,31 +61,6 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
*Array(expected_runners).map { |expected_runner| a_graphql_entity_for(expected_runner) }
)
end
-
- it 'does not execute more queries per runner', :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: current_user)
- args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runners, :nodes)).not_to be_empty
-
- admin2 = create(:admin)
- personal_access_token = create(:personal_access_token, user: admin2)
- args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
-
- runner2 = create(:ci_runner, :instance, version: '14.0.0', tag_list: %w[tag5 tag6], creator: admin2)
- runner3 = create(:ci_runner, :project, version: '14.0.1', projects: [project], tag_list: %w[tag3 tag8],
- creator: current_user)
-
- create(:ci_build, :failed, runner: runner2)
- create(:ci_runner_machine, runner: runner2, version: '16.4.1')
-
- create(:ci_build, :failed, runner: runner3)
- create(:ci_runner_machine, runner: runner3, version: '16.4.0')
-
- expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
- end
end
context 'when filtered on type and status' do
@@ -178,56 +165,129 @@ RSpec.describe 'Query.runners', feature_category: :fleet_visibility do
end
end
end
- end
- context 'without filters' do
- context 'with managers requested for multiple runners' do
- let(:fields) do
- <<~QUERY
- nodes {
- managers {
- nodes {
- #{all_graphql_fields_for('CiRunnerManager', max_depth: 1)}
- }
- }
- }
- QUERY
- end
+ context 'when filtered by creator' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:runner_created_by_user) { create(:ci_runner, :project, creator: user) }
let(:query) do
%(
query {
- runners {
+ runners(creatorId: "#{creator.to_global_id}") {
#{fields}
}
}
)
end
- it 'does not execute more queries per runner', :aggregate_failures do
- # warm-up license cache and so on:
- personal_access_token = create(:personal_access_token, user: current_user)
- args = { current_user: current_user, token: { personal_access_token: personal_access_token } }
- post_graphql(query, **args)
- expect(graphql_data_at(:runners, :nodes)).not_to be_empty
-
- admin2 = create(:admin)
- personal_access_token = create(:personal_access_token, user: admin2)
- args = { current_user: admin2, token: { personal_access_token: personal_access_token } }
- control = ActiveRecord::QueryRecorder.new { post_graphql(query, **args) }
-
- create(:ci_runner, :instance, :with_runner_manager, version: '14.0.0', tag_list: %w[tag5 tag6],
- creator: admin2)
- create(:ci_runner, :project, :with_runner_manager, version: '14.0.1', projects: [project],
- tag_list: %w[tag3 tag8],
- creator: current_user)
-
- expect { post_graphql(query, **args) }.not_to exceed_query_limit(control)
+ context 'when existing user id given' do
+ let(:creator) { user }
+
+ before do
+ create(:ci_runner, :project, creator: create(:user)) # Should not be returned
+ end
+
+ it_behaves_like 'a working graphql query returning expected runners' do
+ let(:expected_runners) { runner_created_by_user }
+ end
+ end
+
+ context 'when non existent user id given' do
+ let(:creator) { User.new(id: non_existing_record_id) }
+
+ it 'does not return any runners' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:runners, :nodes)).to be_empty
+ end
end
end
end
end
+ describe 'Runner query limits' do
+ let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:tag_list) { %w[n_plus_1_test some_tag] }
+ let_it_be(:args) do
+ { current_user: user, token: { personal_access_token: create(:personal_access_token, user: user) } }
+ end
+
+ let_it_be(:runner1) { create(:ci_runner, tag_list: tag_list, creator: user) }
+ let_it_be(:runner2) do
+ create(:ci_runner, :group, groups: [group], tag_list: tag_list, creator: user)
+ end
+
+ let_it_be(:runner3) do
+ create(:ci_runner, :project, projects: [project], tag_list: tag_list, creator: user)
+ end
+
+ let(:runner_fragment) do
+ <<~QUERY
+ #{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
+ createdBy {
+ id
+ username
+ webPath
+ webUrl
+ }
+ QUERY
+ end
+
+ # Exclude fields that are already hardcoded above (or tested separately),
+ # and also some fields from deeper objects which are problematic:
+ # - createdBy: Known N+1 issues, but only on exotic fields which we don't normally use
+ # - ownerProject.pipeline: Needs arguments (iid or sha)
+ # - project.productAnalyticsState: Can be requested only for 1 Project(s) at a time.
+ let(:excluded_fields) { %w[createdBy jobs pipeline productAnalyticsState] }
+
+ let(:runners_query) do
+ <<~QUERY
+ {
+ runners {
+ nodes { #{runner_fragment} }
+ }
+ }
+ QUERY
+ end
+
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ personal_access_token = create(:personal_access_token, user: user)
+ args = { current_user: user, token: { personal_access_token: personal_access_token } }
+
+ runners_control = ActiveRecord::QueryRecorder.new(skip_cached: false) { post_graphql(runners_query, **args) }
+
+ setup_additional_records
+
+ expect { post_graphql(runners_query, **args) }.not_to exceed_query_limit(runners_control)
+ end
+
+ def setup_additional_records
+ # Add more runners (including owned by other users)
+ runner4 = create(:ci_runner, tag_list: tag_list + %w[tag1 tag2], creator: user2)
+ runner5 = create(:ci_runner, :group, groups: [create(:group)], tag_list: tag_list + %w[tag2 tag3], creator: user3)
+ # Add one more project to runner
+ runner3.assign_to(create(:project))
+
+ # Add more runner managers (including to existing runners)
+ runner_manager1 = create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner1)
+ create(:ci_runner_machine, runner: runner2, system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+ create(:ci_runner_machine, runner: runner4, version: '16.4.1')
+ create(:ci_runner_machine, runner: runner5, version: '16.4.0', system_xid: runner_manager1.system_xid)
+ create(:ci_runner_machine, runner: runner3)
+
+ create(:ci_build, :failed, runner: runner4)
+ create(:ci_build, :failed, runner: runner5)
+
+ [runner4, runner5]
+ end
+ end
+
describe 'pagination' do
let(:data_path) { [:runners] }
diff --git a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
index 2acdd509355..46563aba992 100644
--- a/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
+++ b/spec/requests/api/graphql/container_repository/container_repository_details_spec.rb
@@ -430,6 +430,85 @@ RSpec.describe 'container repository details', feature_category: :container_regi
it_behaves_like 'returning an invalid value error'
end
+
+ context 'with referrers' do
+ let(:tags_response) { container_repository_details_response.dig('tags', 'edges') }
+ let(:raw_tags_response) do
+ [
+ {
+ name: 'latest',
+ digest: 'sha256:1234567892',
+ config_digest: 'sha256:3332132331',
+ media_type: 'application/vnd.oci.image.manifest.v1+json',
+ size_bytes: 1234509876,
+ created_at: 10.minutes.ago,
+ updated_at: 10.minutes.ago,
+ referrers: [
+ {
+ artifactType: 'application/vnd.example+type',
+ digest: 'sha256:57d3be92c2f857566ecc7f9306a80021c0a7fa631e0ef5146957235aea859961'
+ },
+ {
+ artifactType: 'application/vnd.example+type+2',
+ digest: 'sha256:01db72e42d61b8d2183d53475814cce2bfb9c8a254e97539a852441979cd5c90'
+ }
+ ]
+ },
+ {
+ name: 'latest',
+ digest: 'sha256:1234567893',
+ config_digest: 'sha256:3332132331',
+ media_type: 'application/vnd.oci.image.manifest.v1+json',
+ size_bytes: 1234509877,
+ created_at: 9.minutes.ago,
+ updated_at: 9.minutes.ago
+ }
+ ]
+ end
+
+ let(:query) do
+ <<~GQL
+ query($id: ContainerRepositoryID!, $n: String) {
+ containerRepository(id: $id) {
+ tags(name: $n, referrers: true) {
+ edges {
+ node {
+ #{all_graphql_fields_for('ContainerRepositoryTag')}
+ }
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ let(:url) { URI('/gitlab/v1/repositories/group1/proj1/tags/list/?before=tag1&referrers=true') }
+
+ let(:response_body) do
+ {
+ pagination: { previous: { uri: url }, next: { uri: url } },
+ response_body: ::Gitlab::Json.parse(raw_tags_response.to_json)
+ }
+ end
+
+ it 'includes referrers in response' do
+ subject
+
+ refs = tags_response.map { |tag| tag.dig('node', 'referrers') }
+
+ expect(refs.first.size).to eq(2)
+ expect(refs.first.first).to include({
+ 'artifactType' => 'application/vnd.example+type',
+ 'digest' => 'sha256:57d3be92c2f857566ecc7f9306a80021c0a7fa631e0ef5146957235aea859961'
+ })
+ expect(refs.first.second).to include({
+ 'artifactType' => 'application/vnd.example+type+2',
+ 'digest' => 'sha256:01db72e42d61b8d2183d53475814cce2bfb9c8a254e97539a852441979cd5c90'
+ })
+
+ expect(refs.second).to be_empty
+ end
+ end
end
it_behaves_like 'handling graphql network errors with the container registry'
diff --git a/spec/requests/api/graphql/mutations/branch_rules/create_spec.rb b/spec/requests/api/graphql/mutations/branch_rules/create_spec.rb
new file mode 100644
index 00000000000..85ba3d58ee5
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/branch_rules/create_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'BranchRuleCreate', feature_category: :source_code_management do
+ include GraphqlHelpers
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:current_user, reload: true) { create(:user) }
+
+ let(:params) do
+ {
+ project_path: project.full_path,
+ name: branch_name
+ }
+ end
+
+ let(:branch_name) { 'branch_name/*' }
+ let(:mutation) { graphql_mutation(:branch_rule_create, params) }
+ let(:mutation_response) { graphql_mutation_response(:branch_rule_create) }
+ let(:mutation_errors) { mutation_response['errors'] }
+
+ subject(:post_mutation) { post_graphql_mutation(mutation, current_user: current_user) }
+
+ context 'when the user does not have permission' do
+ before_all do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not create the board' do
+ expect { post_mutation }.not_to change { ProtectedBranch.count }
+ end
+ end
+
+ context 'when the user can create a branch rules' do
+ before_all do
+ project.add_maintainer(current_user)
+ end
+
+ it 'creates the protected branch' do
+ expect { post_mutation }.to change { ProtectedBranch.count }.by(1)
+ end
+
+ it 'returns the created branch rule' do
+ post_mutation
+
+ expect(mutation_response).to have_key('branchRule')
+ expect(mutation_response['branchRule']['name']).to eq(branch_name)
+ expect(mutation_errors).to be_empty
+ end
+
+ context 'when the branch rule already exist' do
+ let!(:existing_rule) { create :protected_branch, name: branch_name, project: project }
+
+ it 'does not create the protected branch' do
+ expect { post_mutation }.not_to change { ProtectedBranch.count }
+ end
+
+ it 'return an error message' do
+ post_mutation
+
+ expect(mutation_errors).to include 'Name has already been taken'
+ expect(mutation_response['branchRule']).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ml/models/create_spec.rb b/spec/requests/api/graphql/mutations/ml/models/create_spec.rb
new file mode 100644
index 00000000000..0daabeab0d1
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ml/models/create_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Creation of a machine learning model', feature_category: :mlops do
+ include GraphqlHelpers
+
+ let_it_be(:model) { create(:ml_models) }
+ let_it_be(:project) { model.project }
+ let_it_be(:current_user) { project.owner }
+
+ let(:input) { { project_path: project.full_path, name: name, description: description } }
+ let(:name) { 'some_name' }
+ let(:description) { 'A description' }
+
+ let(:mutation) { graphql_mutation(:ml_model_create, input) }
+ let(:mutation_response) { graphql_mutation_response(:ml_model_create) }
+
+ context 'when user is not allowed write changes' do
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?)
+ .with(current_user, :write_model_registry, project)
+ .and_return(false)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user is allowed write changes' do
+ it 'creates a models' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['model']).to include(
+ 'name' => name,
+ 'description' => description
+ )
+ end
+
+ context 'when name already exists' do
+ err_msg = "Name has already been taken"
+ let(:name) { model.name }
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: [err_msg]
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 05c1a2d96d9..7c5d86b9f5c 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -23,7 +23,9 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'foo-.*'
}
end
@@ -44,6 +46,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
pypiPackageRequestsForwarding
lockPypiPackageRequestsForwarding
nugetSymbolServerEnabled
+ terraformModuleDuplicatesAllowed
+ terraformModuleDuplicateExceptionRegex
}
errors
QL
@@ -73,6 +77,8 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
expect(package_settings_response['npmPackageRequestsForwarding']).to eq(params[:npm_package_requests_forwarding])
expect(package_settings_response['lockNpmPackageRequestsForwarding']).to eq(params[:lock_npm_package_requests_forwarding])
expect(package_settings_response['nugetSymbolServerEnabled']).to eq(params[:nuget_symbol_server_enabled])
+ expect(package_settings_response['terraformModuleDuplicatesAllowed']).to eq(params[:terraform_module_duplicates_allowed])
+ expect(package_settings_response['terraformModuleDuplicateExceptionRegex']).to eq(params[:terraform_module_duplicate_exception_regex])
end
end
@@ -115,7 +121,9 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
lock_pypi_package_requests_forwarding: false,
- nuget_symbol_server_enabled: false
+ nuget_symbol_server_enabled: false,
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: 'foo'
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'foo-.*',
@@ -129,7 +137,9 @@ RSpec.describe 'Updating the package settings', feature_category: :package_regis
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'foo-.*'
}
it_behaves_like 'returning a success'
diff --git a/spec/requests/api/graphql/mutations/organizations/update_spec.rb b/spec/requests/api/graphql/mutations/organizations/update_spec.rb
index 4e819c280d0..33890ae4592 100644
--- a/spec/requests/api/graphql/mutations/organizations/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/organizations/update_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Mutations::Organizations::Update, feature_category: :cell do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:organization) do
- create(:organization) { |org| create(:organization_user, organization: org, user: user) }
+ create(:organization) { |org| create(:organization_user, :owner, organization: org, user: user) }
end
let(:mutation) { graphql_mutation(:organization_update, params) }
diff --git a/spec/requests/api/graphql/mutations/work_items/create_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
index 78b93c3210b..2c2cd5f2acc 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
@@ -281,6 +281,18 @@ RSpec.describe 'Create a work item', feature_category: :team_planning do
it_behaves_like 'creates work item'
+ # This is a temporary measure just to ensure the internal id migration doesn't get conflicts
+ # More info in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139367
+ context 'when making the request in a production environment' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors', errors: [
+ 'Group level work items are disabled. Only project paths allowed in `namespacePath`.'
+ ]
+ end
+
context 'when the namespace_level_work_items feature flag is disabled' do
before do
stub_feature_flags(namespace_level_work_items: false)
diff --git a/spec/requests/api/graphql/namespace/projects_spec.rb b/spec/requests/api/graphql/namespace/projects_spec.rb
index a4bc94798be..107fcd8dcdd 100644
--- a/spec/requests/api/graphql/namespace/projects_spec.rb
+++ b/spec/requests/api/graphql/namespace/projects_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'getting projects', feature_category: :groups_and_projects do
'namespace',
{ 'fullPath' => subject.full_path },
<<~QUERY
+ id
projects(includeSubgroups: #{include_subgroups}) {
edges {
node {
@@ -53,24 +54,30 @@ RSpec.describe 'getting projects', feature_category: :groups_and_projects do
expect(graphql_data['namespace']['projects']['edges'].size).to eq(count)
end
+ end
- context 'with no user' do
- it 'finds only public projects' do
- post_graphql(query, current_user: nil)
+ it_behaves_like 'a graphql namespace'
- expect(graphql_data['namespace']).to be_nil
- end
+ context 'when no user is given' do
+ it 'finds only public projects' do
+ post_graphql(query, current_user: nil)
+
+ expect(graphql_data_at(:namespace, :projects, :edges).size).to eq(1)
end
end
- it_behaves_like 'a graphql namespace'
-
context 'when the namespace is a user' do
subject { user.namespace }
let(:include_subgroups) { false }
it_behaves_like 'a graphql namespace'
+
+ it 'does not show namespace entity for anonymous user' do
+ post_graphql(query, current_user: nil)
+
+ expect(graphql_data['namespace']).to be_nil
+ end
end
context 'when not including subgroups' do
diff --git a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
index c8819f1e38f..273b6b8c25b 100644
--- a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
+++ b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'rendering namespace statistics', feature_category: :metrics do
it 'hides statistics for unauthenticated requests' do
post_graphql(query, current_user: nil)
- expect(graphql_data['namespace']).to be_blank
+ expect(graphql_data_at(:namespace, :root_storage_statistics)).to be_blank
end
end
end
diff --git a/spec/requests/api/graphql/namespace_query_spec.rb b/spec/requests/api/graphql/namespace_query_spec.rb
index c0c7c5fee2b..86808915564 100644
--- a/spec/requests/api/graphql/namespace_query_spec.rb
+++ b/spec/requests/api/graphql/namespace_query_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe 'Query', feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:other_user) { create(:user) }
- let_it_be(:group_namespace) { create(:group) }
+ let_it_be(:group_namespace) { create(:group, :private) }
+ let_it_be(:public_group_namespace) { create(:group, :public) }
let_it_be(:user_namespace) { create(:user_namespace, owner: user) }
let_it_be(:project_namespace) { create(:project_namespace, parent: group_namespace) }
@@ -60,6 +61,51 @@ RSpec.describe 'Query', feature_category: :groups_and_projects do
end
end
+ context 'when used with a public group' do
+ let(:target_namespace) { public_group_namespace }
+
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ context 'when user is a member' do
+ before do
+ public_group_namespace.add_developer(user)
+ end
+
+ it 'fetches the expected data' do
+ expect(query_result).to include(
+ 'fullPath' => target_namespace.full_path,
+ 'name' => target_namespace.name
+ )
+ end
+ end
+
+ context 'when user is anonymous' do
+ let(:current_user) { nil }
+
+ it 'fetches the expected data' do
+ expect(query_result).to include(
+ 'fullPath' => target_namespace.full_path,
+ 'name' => target_namespace.name
+ )
+ end
+ end
+
+ context 'when user is not a member' do
+ let(:current_user) { other_user }
+
+ it 'fetches the expected data' do
+ expect(query_result).to include(
+ 'fullPath' => target_namespace.full_path,
+ 'name' => target_namespace.name
+ )
+ end
+ end
+ end
+
it_behaves_like 'retrieving a namespace' do
let(:target_namespace) { group_namespace }
diff --git a/spec/requests/api/graphql/organizations/organization_query_spec.rb b/spec/requests/api/graphql/organizations/organization_query_spec.rb
index c485e3b170d..14becd52e93 100644
--- a/spec/requests/api/graphql/organizations/organization_query_spec.rb
+++ b/spec/requests/api/graphql/organizations/organization_query_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe 'getting organization information', feature_category: :cell do
let(:query) { graphql_query_for(:organization, { id: organization.to_global_id }, organization_fields) }
let(:current_user) { user }
- let(:groups) { graphql_data_at(:organization, :groups, :nodes) }
let(:organization_fields) do
<<~FIELDS
id
@@ -23,24 +22,9 @@ RSpec.describe 'getting organization information', feature_category: :cell do
let_it_be(:organization_user) { create(:organization_user) }
let_it_be(:organization) { organization_user.organization }
let_it_be(:user) { organization_user.user }
- let_it_be(:parent_group) { create(:group, name: 'parent-group', organization: organization) }
- let_it_be(:public_group) { create(:group, name: 'public-group', parent: parent_group, organization: organization) }
- let_it_be(:other_group) { create(:group, name: 'other-group', organization: organization) }
- let_it_be(:outside_organization_group) { create(:group) }
-
- let_it_be(:private_group) do
- create(:group, :private, name: 'private-group', organization: organization)
- end
-
- let_it_be(:no_access_group_in_org) do
- create(:group, :private, name: 'no-access', organization: organization)
- end
-
- before_all do
- private_group.add_developer(user)
- public_group.add_developer(user)
- other_group.add_developer(user)
- outside_organization_group.add_developer(user)
+ let_it_be(:project) { create(:project, organization: organization) { |p| p.add_developer(user) } }
+ let_it_be(:other_group) do
+ create(:group, name: 'other-group', organization: organization) { |g| g.add_developer(user) }
end
subject(:request_organization) { post_graphql(query, current_user: current_user) }
@@ -62,25 +46,6 @@ RSpec.describe 'getting organization information', feature_category: :cell do
end
end
- context 'when resolve_organization_groups feature flag is disabled' do
- before do
- stub_feature_flags(resolve_organization_groups: false)
- end
-
- it 'returns no groups' do
- request_organization
-
- expect(graphql_data_at(:organization)).not_to be_nil
- expect(graphql_data_at(:organization, :groups, :nodes)).to be_empty
- end
- end
-
- it 'does not return ancestors of authorized groups' do
- request_organization
-
- expect(groups.pluck('id')).not_to include(parent_group.to_global_id.to_s)
- end
-
context 'when requesting organization user' do
let(:organization_fields) do
<<~FIELDS
@@ -102,13 +67,13 @@ RSpec.describe 'getting organization information', feature_category: :cell do
it 'returns correct organization user fields' do
request_organization
- organization_user_node = graphql_data_at(:organization, :organizationUsers, :nodes).first
+ organization_user_nodes = graphql_data_at(:organization, :organizationUsers, :nodes)
expected_attributes = {
"badges" => [{ "text" => "It's you!", "variant" => 'muted' }],
"id" => organization_user.to_global_id.to_s,
"user" => { "id" => user.to_global_id.to_s }
}
- expect(organization_user_node).to match(expected_attributes)
+ expect(organization_user_nodes).to include(expected_attributes)
end
it 'avoids N+1 queries for all the fields' do
@@ -116,6 +81,8 @@ RSpec.describe 'getting organization information', feature_category: :cell do
organization_user_2 = create(:organization_user, organization: organization)
other_group.add_developer(organization_user_2.user)
+ organization_user_from_project = create(:organization_user, organization: organization)
+ project.add_developer(organization_user_from_project.user)
expect { run_query }.not_to exceed_query_limit(base_query_count)
end
@@ -127,62 +94,144 @@ RSpec.describe 'getting organization information', feature_category: :cell do
end
end
- context 'with `search` argument' do
- let(:search) { 'oth' }
- let(:organization_fields) do
- <<~FIELDS
- id
- path
- groups(search: "#{search}") {
- nodes {
- id
- name
- }
- }
- FIELDS
+ context 'when requesting groups' do
+ let(:groups) { graphql_data_at(:organization, :groups, :nodes) }
+ let_it_be(:parent_group) { create(:group, name: 'parent-group', organization: organization) }
+ let_it_be(:public_group) do
+ create(:group, name: 'public-group', parent: parent_group, organization: organization)
end
- it 'filters groups by name' do
- request_organization
+ let_it_be(:private_group) do
+ create(:group, :private, name: 'private-group', organization: organization)
+ end
- expect(groups).to contain_exactly(a_graphql_entity_for(other_group))
+ before_all do
+ create(:group, :private, name: 'no-access', organization: organization)
+ private_group.add_developer(user)
+ public_group.add_developer(user)
+ create(:group) { |g| g.add_developer(user) } # outside organization
end
- end
- context 'with `sort` argument' do
- using RSpec::Parameterized::TableSyntax
+ context 'when resolve_organization_groups feature flag is disabled' do
+ before do
+ stub_feature_flags(resolve_organization_groups: false)
+ end
+
+ it 'returns no groups' do
+ request_organization
+
+ expect(graphql_data_at(:organization)).not_to be_nil
+ expect(graphql_data_at(:organization, :groups, :nodes)).to be_empty
+ end
+ end
- let(:authorized_groups) { [public_group, private_group, other_group] }
+ it 'does not return ancestors of authorized groups' do
+ request_organization
- where(:field, :direction, :sorted_groups) do
- 'id' | 'asc' | lazy { authorized_groups.sort_by(&:id) }
- 'id' | 'desc' | lazy { authorized_groups.sort_by(&:id).reverse }
- 'name' | 'asc' | lazy { authorized_groups.sort_by(&:name) }
- 'name' | 'desc' | lazy { authorized_groups.sort_by(&:name).reverse }
- 'path' | 'asc' | lazy { authorized_groups.sort_by(&:path) }
- 'path' | 'desc' | lazy { authorized_groups.sort_by(&:path).reverse }
+ expect(groups.pluck('id')).not_to include(parent_group.to_global_id.to_s)
end
- with_them do
- let(:sort) { "#{field}_#{direction}".upcase }
+ context 'with `search` argument' do
+ let(:search) { 'oth' }
let(:organization_fields) do
<<~FIELDS
id
path
- groups(sort: #{sort}) {
+ groups(search: "#{search}") {
nodes {
id
+ name
}
}
FIELDS
end
- it 'sorts the groups' do
+ it 'filters groups by name' do
request_organization
- expect(groups.pluck('id')).to eq(sorted_groups.map(&:to_global_id).map(&:to_s))
+ expect(groups).to contain_exactly(a_graphql_entity_for(other_group))
end
end
+
+ context 'with `sort` argument' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:authorized_groups) { [public_group, private_group, other_group] }
+
+ where(:field, :direction, :sorted_groups) do
+ 'id' | 'asc' | lazy { authorized_groups.sort_by(&:id) }
+ 'id' | 'desc' | lazy { authorized_groups.sort_by(&:id).reverse }
+ 'name' | 'asc' | lazy { authorized_groups.sort_by(&:name) }
+ 'name' | 'desc' | lazy { authorized_groups.sort_by(&:name).reverse }
+ 'path' | 'asc' | lazy { authorized_groups.sort_by(&:path) }
+ 'path' | 'desc' | lazy { authorized_groups.sort_by(&:path).reverse }
+ end
+
+ with_them do
+ let(:sort) { "#{field}_#{direction}".upcase }
+ let(:organization_fields) do
+ <<~FIELDS
+ id
+ path
+ groups(sort: #{sort}) {
+ nodes {
+ id
+ }
+ }
+ FIELDS
+ end
+
+ it 'sorts the groups' do
+ request_organization
+
+ expect(groups.pluck('id')).to eq(sorted_groups.map(&:to_global_id).map(&:to_s))
+ end
+ end
+ end
+ end
+
+ context 'when requesting projects' do
+ let(:projects) { graphql_data_at(:organization, :projects, :nodes) }
+ let(:organization_fields) do
+ <<~FIELDS
+ projects {
+ nodes {
+ id
+ }
+ }
+ FIELDS
+ end
+
+ before_all do
+ create(:project) { |p| p.add_developer(user) } # some other project that shouldn't show up in our results
+ end
+
+ before do
+ request_organization
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns projects' do
+ expect(projects).to contain_exactly(a_graphql_entity_for(project))
+ end
+
+ it_behaves_like 'sorted paginated query' do
+ include_context 'no sort argument'
+
+ let_it_be(:another_project) { create(:project, organization: organization) { |p| p.add_developer(user) } }
+ let_it_be(:another_project2) { create(:project, organization: organization) { |p| p.add_developer(user) } }
+ let(:first_param) { 2 }
+ let(:data_path) { [:organization, :projects] }
+ let(:all_records) { [another_project2, another_project, project].map { |p| global_id_of(p).to_s } }
+ end
+
+ def pagination_query(params)
+ graphql_query_for(
+ :organization, { id: organization.to_global_id },
+ query_nodes(:projects, :id, include_pagination_info: true, args: params)
+ )
+ end
end
end
end
diff --git a/spec/requests/api/graphql/organizations/organizations_query_spec.rb b/spec/requests/api/graphql/organizations/organizations_query_spec.rb
new file mode 100644
index 00000000000..12d81ed7412
--- /dev/null
+++ b/spec/requests/api/graphql/organizations/organizations_query_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting organizations information', feature_category: :cell do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:query) { graphql_query_for(:organizations, organizations_fields) }
+ let(:organizations) { graphql_data_at(:organizations, :nodes) }
+ let(:organizations_fields) do
+ <<~FIELDS
+ nodes {
+ id
+ path
+ }
+ FIELDS
+ end
+
+ before_all { create_list(:organization, 3) }
+
+ subject(:request_organization) { post_graphql(query, current_user: current_user) }
+
+ context 'without authenticated user' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ request_organization
+ end
+ end
+ end
+
+ context 'with authenticated user' do
+ let(:current_user) { user }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ request_organization
+ end
+ end
+
+ it_behaves_like 'sorted paginated query' do
+ include_context 'no sort argument'
+
+ let(:first_param) { 2 }
+ let(:data_path) { [:organizations] }
+ let(:all_records) { Organizations::Organization.order(id: :desc).map { |o| global_id_of(o).to_s } }
+ end
+
+ def pagination_query(params)
+ graphql_query_for(:organizations, params, "#{page_info} nodes { id }")
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/container_repositories_spec.rb b/spec/requests/api/graphql/project/container_repositories_spec.rb
index c86d3bdd14c..2307409c383 100644
--- a/spec/requests/api/graphql/project/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/project/container_repositories_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'getting container repositories in a project', feature_category:
let_it_be(:container_repositories) { [container_repository, container_repositories_delete_scheduled, container_repositories_delete_failed].flatten }
let_it_be(:container_expiration_policy) { project.container_expiration_policy }
- let(:excluded_fields) { %w[pipeline jobs productAnalyticsState] }
+ let(:excluded_fields) { %w[pipeline jobs productAnalyticsState mlModels] }
let(:container_repositories_fields) do
<<~GQL
edges {
@@ -155,7 +155,7 @@ RSpec.describe 'getting container repositories in a project', feature_category:
it_behaves_like 'handling graphql network errors with the container registry'
it_behaves_like 'not hitting graphql network errors with the container registry' do
- let(:excluded_fields) { %w[pipeline jobs tags tagsCount productAnalyticsState] }
+ let(:excluded_fields) { %w[pipeline jobs tags tagsCount productAnalyticsState mlModels] }
end
it 'returns the total count of container repositories' do
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 23be9fa5286..96933505838 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'getting merge request information nested in a project', feature_
# we exclude Project.pipeline because it needs arguments,
# codequalityReportsComparer because it is behind a feature flag
# and runners because the user is not an admin and therefore has no access
- let(:excluded) { %w[jobs pipeline runners codequalityReportsComparer] }
+ let(:excluded) { %w[jobs pipeline runners codequalityReportsComparer mlModels] }
let(:mr_fields) { all_graphql_fields_for('MergeRequest', excluded: excluded) }
before do
diff --git a/spec/requests/api/graphql/project/tree/tree_spec.rb b/spec/requests/api/graphql/project/tree/tree_spec.rb
index 77b72bf39a1..d71908d6458 100644
--- a/spec/requests/api/graphql/project/tree/tree_spec.rb
+++ b/spec/requests/api/graphql/project/tree/tree_spec.rb
@@ -167,6 +167,8 @@ RSpec.describe 'getting a tree in a project', feature_category: :source_code_man
end
context 'when the ref points to a SSH-signed commit' do
+ let_it_be(:project) { create(:project, :repository, :in_group) }
+
let_it_be(:ref) { 'ssh-signed-commit' }
let_it_be(:commit) { project.commit(ref) }
let_it_be(:current_user) { create(:user, email: commit.committer_email).tap { |user| project.add_owner(user) } }
diff --git a/spec/requests/api/graphql/projects/projects_spec.rb b/spec/requests/api/graphql/projects/projects_spec.rb
index 84b8c2285f0..dfebcb7c42c 100644
--- a/spec/requests/api/graphql/projects/projects_spec.rb
+++ b/spec/requests/api/graphql/projects/projects_spec.rb
@@ -45,14 +45,14 @@ RSpec.describe 'getting a collection of projects', feature_category: :source_cod
it 'avoids N+1 queries', :use_sql_query_cache, :clean_gitlab_redis_cache do
post_graphql(single_project_query, current_user: current_user)
- query_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
post_graphql(single_project_query, current_user: current_user)
- end.count
+ end
# There is an N+1 query for max_member_access_for_user_ids
expect do
post_graphql(query, current_user: current_user)
- end.not_to exceed_all_query_limit(query_count + 5)
+ end.not_to exceed_all_query_limit(control).with_threshold(5)
end
it 'returns the expected projects' do
diff --git a/spec/requests/api/graphql/user/user_achievements_query_spec.rb b/spec/requests/api/graphql/user/user_achievements_query_spec.rb
index 2e6c3dcba61..ccff5bdf919 100644
--- a/spec/requests/api/graphql/user/user_achievements_query_spec.rb
+++ b/spec/requests/api/graphql/user/user_achievements_query_spec.rb
@@ -60,14 +60,14 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do
end
it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: user)
- end.count
+ end
achievement2 = create(:achievement, namespace: group)
create_list(:user_achievement, 2, achievement: achievement2, user: user)
- expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control_count)
+ expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control)
end
context 'when the achievements feature flag is disabled for a namespace' do
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index fe77b7ae736..c6d44b057a7 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -199,7 +199,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
it 'avoids N+1 queries' do
post_graphql(query, current_user: current_user) # warm up
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
post_graphql(query, current_user: current_user)
end
@@ -207,7 +207,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
expect do
post_graphql(query, current_user: current_user)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
context 'when user is guest' do
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 82a4311f7d0..7b4075b3aeb 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -141,11 +141,11 @@ RSpec.describe API::GroupMilestones, feature_category: :team_planning do
it 'returns multiple issues without performing N + 1' do
perform_request
- control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request }
create(:issue, project: project, milestone: milestone)
- expect { perform_request }.not_to exceed_query_limit(control_count)
+ expect { perform_request }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 327dfd0a76b..6b949962e53 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -660,24 +660,24 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
get api("/groups/#{group1.id}", user1)
expect(response).to have_gitlab_http_status(:ok)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}", user1)
- end.count
+ end
create(:project, namespace: group1)
expect do
get api("/groups/#{group1.id}", user1)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
it 'avoids N+1 queries with shared group links' do
# setup at least 1 shared group, so that we record the queries that preload the nested associations too.
create(:group_group_link, shared_group: group1, shared_with_group: create(:group))
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}", user1)
- end.count
+ end
# setup "n" more shared groups
create(:group_group_link, shared_group: group1, shared_with_group: create(:group))
@@ -686,7 +686,7 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
# test that no of queries for 1 shared group is same as for n shared groups
expect do
get api("/groups/#{group1.id}", user1)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
@@ -1364,15 +1364,15 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
get api("/groups/#{group1.id}/projects", user1)
expect(response).to have_gitlab_http_status(:ok)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}/projects", user1)
- end.count
+ end
create(:project, namespace: group1)
expect do
get api("/groups/#{group1.id}/projects", user1)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
@@ -1563,15 +1563,15 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
subject
expect(response).to have_gitlab_http_status(:ok)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject
- end.count
+ end
create(:project_group_link, project: create(:project), group: group1)
expect do
subject
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
@@ -1937,6 +1937,59 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do
end
end
+ context 'when group is within a provided organization' do
+ let_it_be(:organization) { create(:organization) }
+
+ context 'when user is an organization user' do
+ before_all do
+ create(:organization_user, user: user3, organization: organization)
+ end
+
+ it 'creates group within organization' do
+ post api('/groups', user3), params: attributes_for_group_api(organization_id: organization.id)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['organization_id']).to eq(organization.id)
+ end
+
+ context 'when parent_group is not part of the organization' do
+ it 'does not create the group with not_found' do
+ post(
+ api('/groups', user3),
+ params: attributes_for_group_api(parent_id: group2.id, organization_id: organization.id)
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when organization does not exist' do
+ it 'does not create the group with not_found' do
+ post api('/groups', user3), params: attributes_for_group_api(organization_id: non_existing_record_id)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is not an organization user' do
+ it 'does not create the group' do
+ post api('/groups', user3), params: attributes_for_group_api(organization_id: organization.id)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when user is an admin' do
+ it 'creates group within organization' do
+ post api('/groups', admin, admin_mode: true), params: attributes_for_group_api(organization_id: organization.id)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['organization_id']).to eq(organization.id)
+ end
+ end
+ end
+
context "when authenticated as user with group permissions" do
it "creates group", :aggregate_failures do
group = attributes_for_group_api request_access_enabled: false
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 9a9ccc867a3..4f838be1c81 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -128,7 +128,7 @@ RSpec.describe API::ImportBitbucketServer, feature_category: :importers do
.to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything, timeout_strategy)
.and_return(double(execute: project))
- allow(Gitlab::UrlBlocker)
+ allow(Gitlab::HTTP_V2::UrlBlocker)
.to receive(:blocked_url?)
.and_return(true)
post api("/import/bitbucket_server", user), params: {
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index f555f39ff74..532492c9c2c 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -20,11 +20,18 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
}
end
+ let(:headers) do
+ {
+ 'x-oauth-scopes' => 'read:org'
+ }
+ end
+
let(:client) { double('client', user: provider_user, repository: provider_repo) }
before do
Grape::Endpoint.before_each do |endpoint|
allow(endpoint).to receive(:client).and_return(client)
+ allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return(headers)
end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index e59633b6d35..87f3ee640f3 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -1437,7 +1437,7 @@ RSpec.describe API::Internal::Base, feature_category: :system_access do
end
let(:changes) do
- "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
end
subject { post api('/internal/post_receive'), params: valid_params, headers: gitlab_shell_internal_api_request_header }
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 5ef041881b9..7934fa4a358 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -80,7 +80,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
it 'returns no_content for valid events' do
counters = { gitops_sync: 10, k8s_api_proxy_request: 5 }
- unique_counters = { agent_users_using_ci_tunnel: [10] }
+ unique_counters = { k8s_api_proxy_requests_unique_users_via_ci_access: [10] }
send_request(params: { counters: counters, unique_counters: unique_counters })
@@ -89,7 +89,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
it 'returns no_content for counts of zero' do
counters = { gitops_sync: 0, k8s_api_proxy_request: 0 }
- unique_counters = { agent_users_using_ci_tunnel: [] }
+ unique_counters = { k8s_api_proxy_requests_unique_users_via_ci_access: [] }
send_request(params: { counters: counters, unique_counters: unique_counters })
@@ -105,7 +105,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
it 'returns 400 for non unique_counter set' do
- unique_counters = { agent_users_using_ci_tunnel: 1 }
+ unique_counters = { k8s_api_proxy_requests_unique_users_via_ci_access: 1 }
send_request(params: { unique_counters: unique_counters })
@@ -125,7 +125,6 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
users = create_list(:user, 3)
user_ids = users.map(&:id) << users[0].id
unique_counters = {
- agent_users_using_ci_tunnel: user_ids,
k8s_api_proxy_requests_unique_users_via_ci_access: user_ids,
k8s_api_proxy_requests_unique_agents_via_ci_access: user_ids,
k8s_api_proxy_requests_unique_users_via_user_access: user_ids,
@@ -191,6 +190,7 @@ RSpec.describe API::Internal::Kubernetes, feature_category: :deployment_manageme
end
it 'tracks events and returns no_content', :aggregate_failures do
+ events[:agent_users_using_ci_tunnel] = events.values.flatten
events.each do |event_name, event_list|
event_list.each do |event|
expect(Gitlab::InternalEvents).to receive(:track_event)
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index dc02e830027..60f3c4780eb 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -412,7 +412,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
expect do
post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
it 'does not exceed expected queries count for user_ids', :request_store, :use_sql_query_cache do
@@ -430,7 +430,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
expect do
post invitations_url(project, maintainer), params: { user_id: users.map(&:id).join(','), access_level: Member::DEVELOPER }
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
it 'does not exceed expected queries count with secondary emails', :request_store, :use_sql_query_cache do
@@ -453,7 +453,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
expect do
post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
end
@@ -491,7 +491,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
expect do
post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
it 'does not exceed expected queries count for secondary emails', :request_store, :use_sql_query_cache do
@@ -514,7 +514,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do
expect do
post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
end
diff --git a/spec/requests/api/issue_links_spec.rb b/spec/requests/api/issue_links_spec.rb
index fcb199a91a4..a4a9eca92b9 100644
--- a/spec/requests/api/issue_links_spec.rb
+++ b/spec/requests/api/issue_links_spec.rb
@@ -40,11 +40,11 @@ RSpec.describe API::IssueLinks, feature_category: :team_planning do
it 'returns multiple links without N + 1' do
perform_request(user)
- control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request(user) }
create(:issue_link, source: issue, target: create(:issue, project: project))
- expect { perform_request(user) }.not_to exceed_query_limit(control_count)
+ expect { perform_request(user) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 9e54ec08486..6719297f54f 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -233,9 +233,9 @@ RSpec.describe API::Issues, feature_category: :team_planning do
issues = create_list(:issue, 3, project: project, closed_by: user)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user)
- end.count
+ end
milestone = create(:milestone, project: project)
create(:issue, project: project, milestone: milestone, closed_by: create(:user))
@@ -245,7 +245,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect do
get api("/projects/#{project.id}/issues", user)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
it 'returns 404 when project does not exist' do
@@ -361,9 +361,9 @@ RSpec.describe API::Issues, feature_category: :team_planning do
let(:label_c) { create(:label, title: 'bar', project: project) }
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues?with_labels_details=true", user)
- end.count
+ end
new_issue = create(:issue, project: project)
create(:label_link, label: label, target: new_issue)
@@ -372,7 +372,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
expect do
get api("/projects/#{project.id}/issues?with_labels_details=true", user)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 2110e4a077d..5e432cfca74 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -1054,10 +1054,10 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
context 'FIPS mode', :fips_mode do
it_behaves_like 'package workhorse uploads'
- it 'rejects the request for md5 file' do
+ it 'returns 200 for the request for md5 file' do
upload_file_with_token(params: params, file_extension: 'jar.md5')
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -1276,10 +1276,13 @@ RSpec.describe API::MavenPackages, feature_category: :package_registry do
end
context 'with FIPS mode enabled', :fips_mode do
- it 'rejects the request' do
+ it 'returns an empty body' do
+ expect_use_primary
+
subject
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('')
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index feb24a4e73f..7fc58140fb6 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -717,7 +717,7 @@ RSpec.describe API::Members, feature_category: :groups_and_projects do
end.to change { source.members.count }.by(-1)
end
- it_behaves_like 'rate limited endpoint', rate_limit_key: :member_delete do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :members_delete do
let(:current_user) { maintainer }
let(:another_member) { create(:user) }
diff --git a/spec/requests/api/merge_request_approvals_spec.rb b/spec/requests/api/merge_request_approvals_spec.rb
index 2de59750273..886fc70edf2 100644
--- a/spec/requests/api/merge_request_approvals_spec.rb
+++ b/spec/requests/api/merge_request_approvals_spec.rb
@@ -117,6 +117,18 @@ RSpec.describe API::MergeRequestApprovals, feature_category: :source_code_manage
end
context 'for a bot user' do
+ context 'when the MR is merged' do
+ let(:merge_request) { create(:merge_request, :merged, :simple, author: user, source_project: project) }
+
+ it 'returns 401' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/reset_approvals", bot)
+
+ merge_request.reload
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(merge_request.approvals.pluck(:user_id)).to contain_exactly(user2.id)
+ end
+ end
+
it 'clears approvals of the merge_request' do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/reset_approvals", bot)
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 6000fa29dc4..6ba51080bf0 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
control = ActiveRecord::QueryRecorder.new do
get api(path, user)
- end.count
+ end
mr = create(:merge_request)
create(:label_link, label: label, target: mr)
@@ -1232,7 +1232,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do
control = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/merge_requests", user)
- end.count
+ end
create(:merge_request, author: user, assignees: [user], source_project: project, target_project: project, created_at: base_time)
diff --git a/spec/requests/api/ml/mlflow/experiments_spec.rb b/spec/requests/api/ml/mlflow/experiments_spec.rb
index 409b4529699..ac2d5539408 100644
--- a/spec/requests/api/ml/mlflow/experiments_spec.rb
+++ b/spec/requests/api/ml/mlflow/experiments_spec.rb
@@ -207,4 +207,81 @@ RSpec.describe API::Ml::Mlflow::Experiments, feature_category: :mlops do
it_behaves_like 'MLflow|Bad Request on missing required', [:key, :value]
end
end
+
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/search' do
+ let_it_be(:experiment_b) do
+ create(:ml_experiments, project: project, name: "#{experiment.name}_2")
+ end
+
+ let_it_be(:experiment_c) do
+ create(:ml_experiments, project: project, name: "#{experiment.name}_1")
+ end
+
+ let(:order_by) { nil }
+ let(:default_params) do
+ {
+ 'max_results' => 2,
+ 'order_by' => order_by
+ }
+ end
+
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/search" }
+ let(:request) { post api(route), params: default_params.merge(**params), headers: headers }
+
+ it 'returns all the models', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to match_response_schema('ml/search_experiments')
+ expect(json_response["experiments"].count).to be(2)
+ end
+
+ describe 'pagination and ordering' do
+ RSpec.shared_examples 'a paginated search experiments request with order' do
+ it 'paginates respecting the provided order by' do
+ first_page_experiments = json_response['experiments']
+ expect(first_page_experiments.size).to eq(2)
+
+ expect(first_page_experiments[0]['experiment_id'].to_i).to eq(expected_order[0].iid)
+ expect(first_page_experiments[1]['experiment_id'].to_i).to eq(expected_order[1].iid)
+
+ params = default_params.merge(page_token: json_response['next_page_token'])
+
+ post api(route), params: params, headers: headers
+
+ second_page_response = Gitlab::Json.parse(response.body)
+ second_page_experiments = second_page_response['experiments']
+
+ expect(second_page_response['next_page_token']).to be_nil
+ expect(second_page_experiments.size).to eq(1)
+ expect(second_page_experiments[0]['experiment_id'].to_i).to eq(expected_order[2].iid)
+ end
+ end
+
+ let(:default_order) { [experiment_c, experiment_b, experiment] }
+
+ context 'when ordering is not provided' do
+ let(:expected_order) { default_order }
+
+ it_behaves_like 'a paginated search experiments request with order'
+ end
+
+ context 'when order by column is provided', 'and column exists' do
+ let(:order_by) { 'name ASC' }
+ let(:expected_order) { [experiment, experiment_c, experiment_b] }
+
+ it_behaves_like 'a paginated search experiments request with order'
+ end
+
+ context 'when order by column is provided', 'and column does not exist' do
+ let(:order_by) { 'something DESC' }
+ let(:expected_order) { default_order }
+
+ it_behaves_like 'a paginated search experiments request with order'
+ end
+ end
+
+ describe 'Error States' do
+ it_behaves_like 'MLflow|shared error cases'
+ it_behaves_like 'MLflow|Requires api scope and write permission'
+ end
+ end
end
diff --git a/spec/requests/api/ml/mlflow/model_versions_spec.rb b/spec/requests/api/ml/mlflow/model_versions_spec.rb
index e62bccf1507..812044651af 100644
--- a/spec/requests/api/ml/mlflow/model_versions_spec.rb
+++ b/spec/requests/api/ml/mlflow/model_versions_spec.rb
@@ -157,6 +157,23 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
expect(json_response["model_version"]["version"]).to eq('2.0.0')
end
+ describe 'user assigned version' do
+ let(:params) do
+ {
+ 'name' => model_name,
+ 'description' => 'description-text',
+ 'tags' => [{ 'key' => 'gitlab.version', 'value' => '1.2.3' }]
+ }
+ end
+
+ it 'assigns the supplied version string via the gitlab tag' do
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response["model_version"]["version"]).to eq('1.2.3')
+ expect(json_response["model_version"]["tags"]).to match_array([{ "key" => 'gitlab.version',
+ "value" => '1.2.3' }])
+ end
+ end
+
describe 'Error States' do
context 'when has access' do
context 'and model does not exist' do
@@ -164,6 +181,30 @@ RSpec.describe API::Ml::Mlflow::ModelVersions, feature_category: :mlops do
it_behaves_like 'MLflow|Not Found - Resource Does Not Exist'
end
+
+ # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'when a duplicate tag name is supplied' do
+ let(:params) do
+ { name: model_name, tags: [{ key: 'key1', value: 'value1' }, { key: 'key1', value: 'value2' }] }
+ end
+
+ it "returns a validation error", :aggregate_failures do
+ expect(json_response).to include({ 'error_code' => 'INVALID_PARAMETER_VALUE' })
+ expect(model.metadata.count).to be 0
+ end
+ end
+
+ # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'when an empty tag name is supplied' do
+ let(:params) do
+ { name: model_name, tags: [{ key: '', value: 'value1' }, { key: 'key1', value: 'value2' }] }
+ end
+
+ it "returns a validation error", :aggregate_failures do
+ expect(json_response).to include({ 'error_code' => 'INVALID_PARAMETER_VALUE' })
+ expect(model.metadata.count).to be 0
+ end
+ end
end
it_behaves_like 'MLflow|an authenticated resource'
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 5fd41013b25..2320b3be0c1 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -109,6 +109,19 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_a
expect(json_response.map { |resource| resource['id'] }).to match_array([user.namespace_id, group2.id])
end
end
+
+ context 'with top_level_only param' do
+ it 'returns only top level groups' do
+ group1.add_owner(user)
+ group2.add_owner(user)
+
+ get api("/namespaces?top_level_only=true", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.map { |resource| resource['id'] }).to match_array([user.namespace_id, group1.id])
+ end
+ end
end
end
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 42d83ff8139..27d69f1aa03 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe API::PagesDomains, feature_category: :pages do
let(:route_letsencrypt_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain_with_letsencrypt.domain}" }
before do
- allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ stub_pages_setting(enabled: true)
end
describe 'GET /pages/domains' do
diff --git a/spec/requests/api/pages_spec.rb b/spec/requests/api/pages_spec.rb
new file mode 100644
index 00000000000..23ffeb143cb
--- /dev/null
+++ b/spec/requests/api/pages_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe API::Pages, feature_category: :pages do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:admin) { create(:admin) }
+
+ let(:user) { create(:user) }
+
+ before do
+ stub_pages_setting(enabled: true)
+
+ create(
+ :project_setting,
+ project: project,
+ pages_unique_domain_enabled: true,
+ pages_unique_domain: 'unique-domain')
+ end
+
+ context "when get pages setting endpoint" do
+ let(:user) { create(:user) }
+
+ it "returns the :ok for project maintainers (and above)" do
+ project.add_maintainer(user)
+
+ get api("/projects/#{project.id}/pages", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it "returns the :forbidden for project developers (and below)" do
+ project.add_developer(user)
+
+ get api("/projects/#{project.id}/pages", user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context "when the pages feature is disabled" do
+ it "returns the :not_found when user is not in the project" do
+ project.project_feature.update!(pages_access_level: 0)
+
+ get api("/projects/#{project.id}/pages", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when the project has pages deployments", :time_freeze, :aggregate_failures do
+ let_it_be(:created_at) { Time.now.utc }
+
+ before_all do
+ create(:pages_deployment, path_prefix: '/foo', project: project, created_at: created_at)
+ create(:pages_deployment, project: project, created_at: created_at)
+
+ # this one is here to ensure the endpoint don't return "inactive" deployments
+ create(
+ :pages_deployment,
+ path_prefix: '/bar',
+ project: project,
+ created_at: created_at,
+ deleted_at: 5.minutes.from_now)
+ end
+
+ it "return the right data" do
+ project.add_owner(user)
+
+ get api("/projects/#{project.id}/pages", user)
+
+ expect(json_response["force_https"]).to eq(false)
+ expect(json_response["is_unique_domain_enabled"]).to eq(true)
+ expect(json_response["url"]).to eq("http://unique-domain.example.com")
+ expect(json_response["deployments"]).to match_array([
+ {
+ "created_at" => created_at.strftime('%Y-%m-%dT%H:%M:%S.%3LZ'),
+ "path_prefix" => "/foo",
+ "root_directory" => "public",
+ "url" => "http://unique-domain.example.com/foo"
+ },
+ {
+ "created_at" => created_at.strftime('%Y-%m-%dT%H:%M:%S.%3LZ'),
+ "path_prefix" => nil,
+ "root_directory" => "public",
+ "url" => "http://unique-domain.example.com/"
+ }
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 7797e8e9402..c9bba26524c 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -65,7 +65,6 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
shared_context 'using job token' do
before do
stub_exclusive_lease
- stub_feature_flags(ci_job_token_scope: true)
end
subject { public_send(method, api(url), params: params.merge({ job_token: job.token })) }
@@ -74,29 +73,15 @@ RSpec.describe API::ProjectContainerRepositories, feature_category: :container_r
shared_context 'using job token from another project' do
before do
stub_exclusive_lease
- stub_feature_flags(ci_job_token_scope: true)
end
subject { public_send(method, api(url), params: { job_token: job2.token }) }
end
- shared_context 'using job token while ci_job_token_scope feature flag is disabled' do
- before do
- stub_exclusive_lease
- stub_feature_flags(ci_job_token_scope: false)
- end
-
- subject { public_send(method, api(url), params: params.merge({ job_token: job.token })) }
- end
-
shared_examples 'rejected job token scopes' do
include_context 'using job token from another project' do
it_behaves_like 'rejected container repository access', :maintainer, :forbidden
end
-
- include_context 'using job token while ci_job_token_scope feature flag is disabled' do
- it_behaves_like 'rejected container repository access', :maintainer, :forbidden
- end
end
describe 'GET /projects/:id/registry/repositories' do
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 49471b98eba..a73f3366dcb 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -62,9 +62,9 @@ RSpec.describe API::ProjectImport, :aggregate_failures, feature_category: :impor
it_behaves_like 'requires import source to be enabled'
it 'executes a limited number of queries', :use_clean_rails_redis_caching do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
- expect(control_count).to be <= 111
+ expect(control.count).to be <= 111
end
it 'schedules an import using a namespace' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index b8e029385e3..cf6152a9b67 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1152,7 +1152,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect do
request
- end.not_to exceed_all_query_limit(control.count)
+ end.not_to exceed_all_query_limit(control)
end
end
@@ -3799,7 +3799,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect do
post api("/projects/#{project.id}/import_project_members/#{measure_project.id}", user)
- end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones)
+ end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones)
end
it 'returns 200 when it successfully imports members from another project' do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 493dc4e72c6..0c811a21fb0 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -156,9 +156,9 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
create(:release_link, release: project.releases.first)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/releases", maintainer)
- end.count
+ end
create_list(:release, 2, :with_evidence, project: project, author: maintainer)
create_list(:release, 2, project: project)
@@ -167,7 +167,7 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
expect do
get api("/projects/#{project.id}/releases", maintainer)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
it 'serializes releases for the first time and read cached data from the second time' do
@@ -1715,9 +1715,9 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
subject
expect(response).to have_gitlab_http_status(:ok)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
subject
- end.count
+ end
subgroups = create_list(:group, 10, parent: group1)
projects = create_list(:project, 10, namespace: subgroups[0])
@@ -1725,7 +1725,7 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or
expect do
subject
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/namespace_packages_spec.rb
index 949acdb17e1..d655085a30f 100644
--- a/spec/requests/api/terraform/modules/v1/packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/namespace_packages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package_registry do
+RSpec.describe API::Terraform::Modules::V1::NamespacePackages, feature_category: :package_registry do
include_context 'for terraform modules api setup'
using RSpec::Parameterized::TableSyntax
@@ -10,17 +10,19 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/versions") }
let(:headers) { { 'Authorization' => "Bearer #{tokens[:job_token]}" } }
- subject { get(url, headers: headers) }
+ subject(:get_versions) { get(url, headers: headers) }
context 'with a conflicting package name' do
- let!(:conflicting_package) { create(:terraform_module_package, project: project, name: "conflict-#{package.name}", version: '2.0.0') }
+ let!(:conflicting_package) do
+ create(:terraform_module_package, project: project, name: "conflict-#{package.name}", version: '2.0.0')
+ end
before do
group.add_developer(user)
end
it 'returns only one version' do
- subject
+ get_versions
expect(json_response['modules'][0]['versions'].size).to eq(1)
expect(json_response['modules'][0]['versions'][0]['version']).to eq('1.0.0')
@@ -77,14 +79,14 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/download' do
- context 'empty registry' do
+ context 'with empty registry' do
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/module-2/system/download") }
let(:headers) { {} }
- subject { get(url, headers: headers) }
+ subject(:get_download) { get(url, headers: headers) }
it 'returns not found when there is no module' do
- subject
+ get_download
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -150,14 +152,14 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system' do
- context 'empty registry' do
+ context 'with empty registry' do
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/non-existent/system") }
let(:headers) { { 'Authorization' => "Bearer #{tokens[:personal_access_token]}" } }
- subject { get(url, headers: headers) }
+ subject(:get_module) { get(url, headers: headers) }
it 'returns not found when there is no module' do
- subject
+ get_module
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -221,16 +223,16 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}") }
let(:headers) { {} }
- subject { get(url, headers: headers) }
+ subject(:get_module_version) { get(url, headers: headers) }
- context 'not found' do
+ context 'when not found' do
let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/2.0.0") }
let(:headers) { { 'Authorization' => "Bearer #{tokens[:job_token]}" } }
subject { get(url, headers: headers) }
it 'returns not found when the specified version is not present in the registry' do
- subject
+ get_module_version
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -343,7 +345,10 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version/file' do
- let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/file?token=#{token}") }
+ let(:url) do
+ api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/file?token=#{token}")
+ end
+
let(:tokens) do
{
personal_access_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = personal_access_token.id }.encoded,
@@ -352,7 +357,7 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
}
end
- subject { get(url, headers: headers) }
+ subject(:get_file) { get(url, headers: headers) }
context 'with valid namespace' do
where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
@@ -414,8 +419,15 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
context 'with package file pending destruction' do
- let_it_be(:package) { create(:package, package_type: :terraform_module, project: project, name: "module-555/pending-destruction", version: '1.0.0') }
- let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, :xml, package: package) }
+ let_it_be(:package) do
+ create(:package, package_type: :terraform_module, project: project, name: "module-555/pending-destruction",
+ version: '1.0.0')
+ end
+
+ let_it_be(:package_file_pending_destruction) do
+ create(:package_file, :pending_destruction, :xml, package: package)
+ end
+
let_it_be(:package_file) { create(:package_file, :terraform_module, package: package) }
let(:token) { tokens[:personal_access_token] }
@@ -426,7 +438,7 @@ RSpec.describe API::Terraform::Modules::V1::Packages, feature_category: :package
end
it 'does not return them' do
- subject
+ get_file
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).not_to eq(package_file_pending_destruction.file.file.read)
diff --git a/spec/requests/api/terraform/modules/v1/project_packages_spec.rb b/spec/requests/api/terraform/modules/v1/project_packages_spec.rb
index 1f3b2283d59..70f7ec64d40 100644
--- a/spec/requests/api/terraform/modules/v1/project_packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/project_packages_spec.rb
@@ -6,6 +6,18 @@ RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :
include_context 'for terraform modules api setup'
using RSpec::Parameterized::TableSyntax
+ describe 'GET /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system' do
+ it_behaves_like 'handling project level terraform module download requests' do
+ let(:module_version) { nil }
+ end
+ end
+
+ describe 'GET /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version' do
+ it_behaves_like 'handling project level terraform module download requests' do
+ let(:module_version) { package.version }
+ end
+ end
+
describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file/authorize' do
include_context 'workhorse headers'
@@ -91,7 +103,28 @@ RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :
)
end
+ shared_examples 'creating a package' do
+ it 'creates a package' do
+ expect { api_request }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ shared_examples 'not creating a package' do |expected_status|
+ it 'does not create a package' do
+ expect { api_request }
+ .to change { project.packages.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+
context 'with valid project' do
+ let(:user_headers) { { 'PRIVATE-TOKEN' => personal_access_token.token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
:public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
:public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
@@ -135,7 +168,6 @@ RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :
with_them do
let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
- let(:headers) { user_headers.merge(workhorse_headers) }
let(:snowplow_gitlab_standard_context) do
{ project: project, namespace: project.namespace, user: snowplow_user,
property: 'i_package_terraform_module_user' }
@@ -160,43 +192,73 @@ RSpec.describe API::Terraform::Modules::V1::ProjectPackages, feature_category: :
end
context 'when failed package file save' do
- let(:user_headers) { { 'PRIVATE-TOKEN' => personal_access_token.token } }
- let(:headers) { user_headers.merge(workhorse_headers) }
+ before do
+ project.add_developer(user)
+ allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError)
+ end
+
+ it_behaves_like 'not creating a package', :error
+ end
+
+ context 'with an existing package in the same project' do
+ let_it_be_with_reload(:existing_package) do
+ create(:terraform_module_package, name: 'mymodule/mysystem', version: '1.0.0', project: project)
+ end
before do
project.add_developer(user)
end
- it 'does not create package record', :aggregate_failures do
- allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError)
+ it_behaves_like 'not creating a package', :forbidden
+
+ context 'when marked as pending_destruction' do
+ before do
+ existing_package.pending_destruction!
+ end
- expect { api_request }
- .to change { project.packages.count }.by(0)
- .and change { Packages::PackageFile.count }.by(0)
- expect(response).to have_gitlab_http_status(:error)
+ it_behaves_like 'creating a package'
end
+ end
+
+ context 'with existing package in another project' do
+ let_it_be(:package_settings) { create(:namespace_package_setting, namespace: group) }
+ let_it_be(:project2) { create(:project, namespace: group) }
+ let!(:existing_package) { create(:terraform_module_package, name: 'mymodule/mysystem', project: project2) }
- context 'with an existing package' do
- let_it_be_with_reload(:existing_package) do
- create(:terraform_module_package, name: 'mymodule/mysystem', version: '1.0.0', project: project)
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when duplicates not allowed' do
+ it_behaves_like 'not creating a package', :forbidden
+ end
+
+ context 'when duplicates allowed' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
end
- it 'does not create a new package' do
- expect { api_request }
- .to change { project.packages.count }.by(0)
- .and change { Packages::PackageFile.count }.by(0)
- expect(response).to have_gitlab_http_status(:forbidden)
+ it_behaves_like 'creating a package'
+ end
+
+ context 'with duplicate regex exception' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: regex
+ )
+ end
+
+ context 'when regex matches' do
+ let(:regex) { ".*#{existing_package.name.last(3)}.*" }
+
+ it_behaves_like 'creating a package'
end
- context 'when marked as pending_destruction' do
- it 'does create a new package' do
- existing_package.pending_destruction!
+ context 'when regex does not match' do
+ let(:regex) { '.*non-matching-regex.*' }
- expect { api_request }
- .to change { project.packages.count }.by(1)
- .and change { Packages::PackageFile.count }.by(1)
- expect(response).to have_gitlab_http_status(:created)
- end
+ it_behaves_like 'not creating a package', :forbidden
end
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 86c4e04ef71..de3460208b7 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -265,9 +265,9 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
it 'avoids N+1 queries when requested by admin' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api(path, admin)
- end.count
+ end
create_list(:user, 3)
@@ -277,19 +277,19 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
expect do
get api(path, admin)
- end.not_to exceed_all_query_limit(control_count + 3)
+ end.not_to exceed_all_query_limit(control).with_threshold(3)
end
it 'avoids N+1 queries when requested by a regular user' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api(path, user)
- end.count
+ end
create_list(:user, 3)
expect do
get api(path, user)
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
@@ -2272,16 +2272,16 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
it 'avoids N+1 queries' do
second_project.add_maintainer(user)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
get api(path, user)
- end.count
+ end
deploy_key = create(:deploy_key, user: second_user)
create(:deploy_keys_project, project: second_project, deploy_key_id: deploy_key.id)
expect do
get api(path, user)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_query_limit(control)
end
end
end
@@ -2328,15 +2328,15 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
it 'avoids N+1 queries', :request_store do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
request
- end.count
+ end
create_list(:key, 2, user: user)
expect do
request
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
end
@@ -3044,15 +3044,15 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end
it 'avoids N+1 queries', :request_store do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
request
- end.count
+ end
create_list(:key, 2, user: user)
expect do
request
- end.not_to exceed_all_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/requests/application_controller_spec.rb b/spec/requests/application_controller_spec.rb
index 52fdf6bc69e..969fdbe9838 100644
--- a/spec/requests/application_controller_spec.rb
+++ b/spec/requests/application_controller_spec.rb
@@ -5,11 +5,33 @@ require 'spec_helper'
RSpec.describe ApplicationController, type: :request, feature_category: :shared do
let_it_be(:user) { create(:user) }
- before do
- sign_in(user)
- end
-
it_behaves_like 'Base action controller' do
+ before do
+ sign_in(user)
+ end
+
subject(:request) { get root_path }
end
+
+ describe 'session expiration' do
+ context 'when user is authenticated' do
+ it 'does not set the expire_after option' do
+ sign_in(user)
+
+ get root_path
+
+ expect(request.env['rack.session.options'][:expire_after]).to be_nil
+ end
+ end
+
+ context 'when user is unauthenticated' do
+ it 'sets the expire_after option' do
+ get root_path
+
+ expect(request.env['rack.session.options'][:expire_after]).to eq(
+ Settings.gitlab['unauthenticated_session_expire_delay']
+ )
+ end
+ end
+ end
end
diff --git a/spec/requests/explore/catalog_controller_spec.rb b/spec/requests/explore/catalog_controller_spec.rb
index e75b0bba5a6..59a5dcffd3c 100644
--- a/spec/requests/explore/catalog_controller_spec.rb
+++ b/spec/requests/explore/catalog_controller_spec.rb
@@ -13,10 +13,6 @@ RSpec.describe Explore::CatalogController, feature_category: :pipeline_compositi
catalog_resource.project.add_reporter(user)
end
- before do
- sign_in(user)
- end
-
shared_examples 'basic get requests' do |action|
let(:path) do
if action == :index
@@ -34,6 +30,10 @@ RSpec.describe Explore::CatalogController, feature_category: :pipeline_compositi
end
describe 'GET #show' do
+ before do
+ sign_in(user)
+ end
+
it_behaves_like 'basic get requests', :show
context 'when rendering a draft catalog resource' do
@@ -56,14 +56,30 @@ RSpec.describe Explore::CatalogController, feature_category: :pipeline_compositi
end
describe 'GET #index' do
- let(:subject) { get explore_catalog_index_path }
+ subject(:visit_explore_catalog) { get explore_catalog_index_path }
+
+ context 'with an authenticated user' do
+ before do
+ sign_in(user)
+ end
- it_behaves_like 'basic get requests', :index
+ it_behaves_like 'basic get requests', :index
- it_behaves_like 'internal event tracking' do
- let(:namespace) { user.namespace }
- let(:project) { nil }
- let(:event) { 'unique_users_visiting_ci_catalog' }
+ it_behaves_like 'internal event tracking' do
+ let(:namespace) { user.namespace }
+ let(:project) { nil }
+ let(:event) { 'unique_users_visiting_ci_catalog' }
+ end
+ end
+
+ context 'with an anonymous user' do
+ it_behaves_like 'basic get requests', :index
+
+ it 'does not track the event' do
+ expect(Gitlab::InternalEvents).not_to receive(:track_event)
+
+ visit_explore_catalog
+ end
end
end
end
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index 54a25333c02..ed24ad6489f 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -18,14 +18,16 @@ RSpec.describe Groups::MilestonesController, feature_category: :team_planning do
public_project = create(:project, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
create(:milestone, project: public_project)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get group_milestones_path(public_group, format: :json) }.count
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get group_milestones_path(public_group, format: :json)
+ end
projects = create_list(:project, 2, :public, :merge_requests_enabled, :issues_enabled, group: public_group)
projects.each do |project|
create(:milestone, project: project)
end
- expect { get group_milestones_path(public_group, format: :json) }.not_to exceed_all_query_limit(control_count)
+ expect { get group_milestones_path(public_group, format: :json) }.not_to exceed_all_query_limit(control)
expect(response).to have_gitlab_http_status(:ok)
milestones = json_response
@@ -66,11 +68,11 @@ RSpec.describe Groups::MilestonesController, feature_category: :team_planning do
it 'avoids N+1 database queries' do
perform_request # warm up the cache
- control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+ control = ActiveRecord::QueryRecorder.new { perform_request }
create(:merge_request, milestone: milestone, source_project: project, source_branch: 'fix')
- expect { perform_request }.not_to exceed_query_limit(control_count)
+ expect { perform_request }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/requests/groups/registry/repositories_controller_spec.rb b/spec/requests/groups/registry/repositories_controller_spec.rb
index f54acf118bb..e4818676f81 100644
--- a/spec/requests/groups/registry/repositories_controller_spec.rb
+++ b/spec/requests/groups/registry/repositories_controller_spec.rb
@@ -20,13 +20,13 @@ RSpec.describe Groups::Registry::RepositoriesController, feature_category: :cont
create(:container_repository, project: project)
endpoint = group_container_registries_path(group, format: :json)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get(endpoint) }.count
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get(endpoint) }
create_list(:project, 2, group: group).each do |project|
create_list(:container_repository, 2, project: project)
end
- expect { get(endpoint) }.not_to exceed_all_query_limit(control_count)
+ expect { get(endpoint) }.not_to exceed_all_query_limit(control)
# sanity check that response is 200
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/requests/import/gitlab_projects_controller_spec.rb b/spec/requests/import/gitlab_projects_controller_spec.rb
index 732851c7828..21f1f6598d7 100644
--- a/spec/requests/import/gitlab_projects_controller_spec.rb
+++ b/spec/requests/import/gitlab_projects_controller_spec.rb
@@ -28,6 +28,8 @@ RSpec.describe Import::GitlabProjectsController, feature_category: :importers do
end
context 'with a valid path' do
+ let(:experiment) { instance_double(ApplicationExperiment) }
+
it 'schedules an import and redirects to the new project path' do
stub_import(namespace)
@@ -36,6 +38,38 @@ RSpec.describe Import::GitlabProjectsController, feature_category: :importers do
expect(flash[:notice]).to include('is being imported')
expect(response).to have_gitlab_http_status(:found)
end
+
+ it 'tracks default_to_import_tab experiment' do
+ stub_import(namespace)
+
+ allow_next_instance_of(described_class) do |controller|
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+ end
+
+ expect(experiment).to receive(:track).with(:successfully_imported, property: 'gitlab_export')
+
+ subject
+ end
+
+ it 'does not track default_to_import_tab experiment when project was not imported' do
+ allow_next_instance_of(::Projects::GitlabProjectsImportService) do |service|
+ allow(service).to receive(:execute).and_return(build(:project))
+ end
+
+ allow_next_instance_of(described_class) do |controller|
+ allow(controller)
+ .to receive(:experiment)
+ .with(:default_to_import_tab, actor: user)
+ .and_return(experiment)
+ end
+
+ expect(experiment).not_to receive(:track)
+
+ subject
+ end
end
context 'with an invalid path' do
diff --git a/spec/requests/jwks_controller_spec.rb b/spec/requests/jwks_controller_spec.rb
index 3dc3ed68311..0cc30ff41fb 100644
--- a/spec/requests/jwks_controller_spec.rb
+++ b/spec/requests/jwks_controller_spec.rb
@@ -62,19 +62,5 @@ RSpec.describe JwksController, feature_category: :system_access do
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Cache-Control']).to include('max-age=86400', 'public', 'must-revalidate', 'no-transform')
end
-
- context 'when cache_control_headers_for_openid_jwks feature flag is disabled' do
- before do
- stub_feature_flags(cache_control_headers_for_openid_jwks: false)
- end
-
- it 'does not have cache control header' do
- get jwks_url
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers['Cache-Control']).not_to include('max-age=86400', 'public',
- 'no-transform')
- end
- end
end
end
diff --git a/spec/requests/oauth/authorizations_controller_spec.rb b/spec/requests/oauth/authorizations_controller_spec.rb
index 7887bf52542..6ef8970a142 100644
--- a/spec/requests/oauth/authorizations_controller_spec.rb
+++ b/spec/requests/oauth/authorizations_controller_spec.rb
@@ -5,16 +5,19 @@ require 'spec_helper'
RSpec.describe Oauth::AuthorizationsController, feature_category: :system_access do
let_it_be(:user) { create(:user) }
let_it_be(:application) { create(:oauth_application, redirect_uri: 'custom://test') }
- let_it_be(:oauth_authorization_path) do
- Gitlab::Routing.url_helpers.oauth_authorization_url(
+
+ let(:params) do
+ {
client_id: application.uid,
response_type: 'code',
scope: application.scopes,
redirect_uri: application.redirect_uri,
state: SecureRandom.hex
- )
+ }
end
+ let(:oauth_authorization_path) { Gitlab::Routing.url_helpers.oauth_authorization_url(params) }
+
before do
sign_in(user)
end
@@ -76,5 +79,23 @@ RSpec.describe Oauth::AuthorizationsController, feature_category: :system_access
end
end
end
+
+ context 'when the user is not signed in' do
+ before do
+ sign_out(user)
+ end
+
+ it 'sets a lower session expiry and redirects to the sign in page' do
+ get oauth_authorization_path
+
+ expect(request.env['rack.session.options'][:expire_after]).to eq(
+ Settings.gitlab['unauthenticated_session_expire_delay']
+ )
+
+ expect(request.session['user_return_to']).to eq("/oauth/authorize?#{params.to_query}")
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
end
end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index 6573fe570db..168008b0eb9 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -273,7 +273,7 @@ RSpec.describe 'OpenID Connect requests', feature_category: :system_access do
let(:expected_scopes) do
%w[
admin_mode api read_user read_api read_repository write_repository sudo openid profile email
- read_observability write_observability create_runner k8s_proxy ai_features
+ read_observability write_observability create_runner k8s_proxy ai_features read_service_ping
]
end
diff --git a/spec/requests/organizations/organizations_controller_spec.rb b/spec/requests/organizations/organizations_controller_spec.rb
index bfd0603eb3d..33eafaf8ebd 100644
--- a/spec/requests/organizations/organizations_controller_spec.rb
+++ b/spec/requests/organizations/organizations_controller_spec.rb
@@ -119,4 +119,29 @@ RSpec.describe Organizations::OrganizationsController, feature_category: :cell d
it_behaves_like 'controller action that requires authentication by any user'
end
+
+ describe 'POST #preview_markdown' do
+ subject(:gitlab_request) { post preview_markdown_organizations_path, params: { text: '### Foo \n **bar**' } }
+
+ it_behaves_like 'controller action that requires authentication by any user'
+
+ context 'when the user is signed in' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'returns html from markdown' do
+ stub_commonmark_sourcepos_disabled
+ sign_in(user)
+ gitlab_request
+
+ body = Gitlab::Json.parse(response.body)['body']
+
+ expect(body).not_to include('Foo</h3>')
+ expect(body).to include('<strong>bar</strong>')
+ end
+ end
+ end
end
diff --git a/spec/requests/organizations/settings_controller_spec.rb b/spec/requests/organizations/settings_controller_spec.rb
index 1d98e598159..0177187e3a3 100644
--- a/spec/requests/organizations/settings_controller_spec.rb
+++ b/spec/requests/organizations/settings_controller_spec.rb
@@ -21,13 +21,13 @@ RSpec.describe Organizations::SettingsController, feature_category: :cell do
end
context 'when the user is signed in' do
+ let_it_be(:user) { create(:user) }
+
before do
sign_in(user)
end
context 'with no association to an organization' do
- let_it_be(:user) { create(:user) }
-
it_behaves_like 'organization - not found response'
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
@@ -39,11 +39,18 @@ RSpec.describe Organizations::SettingsController, feature_category: :cell do
it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
end
- context 'as an organization user' do
- let_it_be(:user) { create :user }
+ context 'as a default organization user' do
+ before_all do
+ create(:organization_user, organization: organization, user: user)
+ end
- before do
- create :organization_user, organization: organization, user: user
+ it_behaves_like 'organization - not found response'
+ it_behaves_like 'organization - action disabled by `ui_for_organizations` feature flag'
+ end
+
+ context 'as an owner of an organization' do
+ before_all do
+ create(:organization_user, :owner, organization: organization, user: user)
end
it_behaves_like 'organization - successful response'
diff --git a/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb b/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
index d571999feb0..f1c87243516 100644
--- a/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
+++ b/spec/requests/projects/gcp/artifact_registry/docker_images_controller_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Projects::Gcp::ArtifactRegistry::DockerImagesController, feature_
describe '#index' do
let(:service_response) { ServiceResponse.success(payload: dummy_client_payload) }
let(:service_double) do
- instance_double('Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService')
+ instance_double('GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService')
end
subject(:get_index_page) do
@@ -30,7 +30,7 @@ RSpec.describe Projects::Gcp::ArtifactRegistry::DockerImagesController, feature_
end
before do
- allow_next_instance_of(Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService) do |service|
+ allow_next_instance_of(GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService) do |service|
allow(service).to receive(:execute).and_return(service_response)
end
end
@@ -58,6 +58,7 @@ RSpec.describe Projects::Gcp::ArtifactRegistry::DockerImagesController, feature_
expect(response.body).to include('tag2')
expect(response.body).to include('Prev')
expect(response.body).to include('Next')
+ expect(response.body).to include('https://location.pkg.dev/project/repo/image@sha256:6a')
end
context 'when the service returns an error response' do
diff --git a/spec/requests/projects/google_cloud/databases_controller_spec.rb b/spec/requests/projects/google_cloud/databases_controller_spec.rb
index fa978a3921f..46f6d27644c 100644
--- a/spec/requests/projects/google_cloud/databases_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/databases_controller_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when EnableCloudsqlService fails' do
before do
- allow_next_instance_of(::GoogleCloud::EnableCloudsqlService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::EnableCloudsqlService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :error, message: 'error' })
end
@@ -125,7 +125,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when EnableCloudsqlService is successful' do
before do
- allow_next_instance_of(::GoogleCloud::EnableCloudsqlService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::EnableCloudsqlService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :success, message: 'success' })
end
@@ -133,7 +133,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when CreateCloudsqlInstanceService fails' do
before do
- allow_next_instance_of(::GoogleCloud::CreateCloudsqlInstanceService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::CreateCloudsqlInstanceService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :error, message: 'error' })
end
@@ -162,7 +162,7 @@ RSpec.describe Projects::GoogleCloud::DatabasesController, :snowplow, feature_ca
context 'when CreateCloudsqlInstanceService is successful' do
before do
- allow_next_instance_of(::GoogleCloud::CreateCloudsqlInstanceService) do |service|
+ allow_next_instance_of(::CloudSeed::GoogleCloud::CreateCloudsqlInstanceService) do |service|
allow(service).to receive(:execute)
.and_return({ status: :success, message: 'success' })
end
diff --git a/spec/requests/projects/google_cloud/deployments_controller_spec.rb b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
index e9eac1e7ecd..1a6482477ef 100644
--- a/spec/requests/projects/google_cloud/deployments_controller_spec.rb
+++ b/spec/requests/projects/google_cloud/deployments_controller_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
context 'when enable service fails' do
before do
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |service|
allow(service)
.to receive(:execute)
.and_return(
@@ -146,7 +146,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
before do
mock_gcp_error = Google::Apis::ClientError.new('some_error')
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |service|
allow(service).to receive(:execute).and_raise(mock_gcp_error)
end
end
@@ -173,14 +173,14 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
context 'GCP_PROJECT_IDs are defined' do
before do
- allow_next_instance_of(GoogleCloud::EnableCloudRunService) do |enable_cloud_run_service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::EnableCloudRunService) do |enable_cloud_run_service|
allow(enable_cloud_run_service).to receive(:execute).and_return({ status: :success })
end
end
context 'when generate pipeline service fails' do
before do
- allow_next_instance_of(GoogleCloud::GeneratePipelineService) do |generate_pipeline_service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::GeneratePipelineService) do |generate_pipeline_service|
allow(generate_pipeline_service).to receive(:execute).and_return({ status: :error })
end
end
@@ -206,7 +206,7 @@ RSpec.describe Projects::GoogleCloud::DeploymentsController, feature_category: :
end
it 'redirects to create merge request form' do
- allow_next_instance_of(GoogleCloud::GeneratePipelineService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::GeneratePipelineService) do |service|
allow(service).to receive(:execute).and_return({ status: :success })
end
diff --git a/spec/requests/projects/ml/models_controller_spec.rb b/spec/requests/projects/ml/models_controller_spec.rb
index cda3f777a72..e469ee837bc 100644
--- a/spec/requests/projects/ml/models_controller_spec.rb
+++ b/spec/requests/projects/ml/models_controller_spec.rb
@@ -152,6 +152,27 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
end
end
+ describe 'GET new' do
+ subject(:create_model_request) do
+ new_model
+ response
+ end
+
+ before do
+ create_model_request
+ end
+
+ it 'renders the template' do
+ is_expected.to render_template('projects/ml/models/new')
+ end
+
+ context 'when user does not have access' do
+ let(:write_model_registry) { false }
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+ end
+
describe 'destroy' do
let(:model_for_deletion) do
create(:ml_models, project: project)
@@ -198,4 +219,8 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
def delete_model
delete project_ml_model_path(project, model_id)
end
+
+ def new_model
+ get new_project_ml_model_path(project)
+ end
end
diff --git a/spec/requests/projects/pipelines_controller_spec.rb b/spec/requests/projects/pipelines_controller_spec.rb
index aa3fefdef14..8be4fecea04 100644
--- a/spec/requests/projects/pipelines_controller_spec.rb
+++ b/spec/requests/projects/pipelines_controller_spec.rb
@@ -25,14 +25,14 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
create_pipelines
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get_pipelines_index
- end.count
+ end
create_pipelines
# There appears to be one extra query for Pipelines#has_warnings? for some reason
- expect { get_pipelines_index }.not_to exceed_all_query_limit(control_count + 1)
+ expect { get_pipelines_index }.not_to exceed_all_query_limit(control).with_threshold(1)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pipelines'].count).to eq(11)
end
@@ -56,9 +56,9 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
it 'does not execute N+1 queries' do
request_build_stage
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
request_build_stage
- end.count
+ end
create(:ci_build, pipeline: pipeline, stage: 'build')
@@ -70,7 +70,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
status: :failed)
end
- expect { request_build_stage }.not_to exceed_all_query_limit(control_count)
+ expect { request_build_stage }.not_to exceed_all_query_limit(control)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -134,14 +134,14 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
request_build_stage(retried: true)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
request_build_stage(retried: true)
- end.count
+ end
create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build')
create(:ci_build, :failed, pipeline: pipeline, stage: 'build')
- expect { request_build_stage(retried: true) }.not_to exceed_all_query_limit(control_count)
+ expect { request_build_stage(retried: true) }.not_to exceed_all_query_limit(control)
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/projects/settings/packages_and_registries_controller_spec.rb b/spec/requests/projects/settings/packages_and_registries_controller_spec.rb
index 2806beadd4e..c660be0f3bf 100644
--- a/spec/requests/projects/settings/packages_and_registries_controller_spec.rb
+++ b/spec/requests/projects/settings/packages_and_registries_controller_spec.rb
@@ -16,6 +16,36 @@ RSpec.describe Projects::Settings::PackagesAndRegistriesController, feature_cate
stub_container_registry_config(enabled: container_registry_enabled)
end
+ describe 'GET #show' do
+ context 'when user is authorized' do
+ let(:user) { project.creator }
+
+ subject { get namespace_project_settings_packages_and_registries_path(user.namespace, project) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'pushes the feature flag "packages_protected_packages" to the view' do
+ subject
+
+ expect(response.body).to have_pushed_frontend_feature_flags(packagesProtectedPackages: true)
+ end
+
+ context 'when feature flag "packages_protected_packages" is disabled' do
+ before do
+ stub_feature_flags(packages_protected_packages: false)
+ end
+
+ it 'does not push the feature flag "packages_protected_packages" to the view' do
+ subject
+
+ expect(response.body).not_to have_pushed_frontend_feature_flags(packagesProtectedPackages: true)
+ end
+ end
+ end
+ end
+
describe 'GET #cleanup_tags' do
subject { get cleanup_image_tags_namespace_project_settings_packages_and_registries_path(user.namespace, project) }
diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb
index 365b20ad4aa..eaf11653256 100644
--- a/spec/requests/search_controller_spec.rb
+++ b/spec/requests/search_controller_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
control = ActiveRecord::QueryRecorder.new { send_search_request(params_for_one) }
expect(response.body).to include('search-results') # Confirm search results to prevent false positives
- expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control.count)
+ expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control)
expect(response.body).to include('search-results') # Confirm search results to prevent false positives
end
end
@@ -125,7 +125,7 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc
control = ActiveRecord::QueryRecorder.new { send_search_request(params_for_one) }
expect(response.body).to include('search-results') # Confirm search results to prevent false positives
- expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control.count)
+ expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control)
expect(response.body).to include('search-results') # Confirm search results to prevent false positives
end
end
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index da111831c15..3b2d3a6f7a5 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -799,6 +799,46 @@ RSpec.describe UsersController, feature_category: :user_management do
end
end
+ describe 'GET #groups' do
+ before do
+ sign_in(user)
+ end
+
+ context 'format html' do
+ it 'renders groups page' do
+ get user_groups_url user.username
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('show')
+ end
+ end
+
+ context 'format json' do
+ it 'response with groups data' do
+ get user_groups_url user.username, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('html')
+ end
+
+ context 'pagination' do
+ let!(:per_page_limit) { 3 }
+
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(per_page_limit)
+ create_list(:group, per_page_limit + 2).each { |group| group.add_owner(user) }
+ end
+
+ it 'paginates without count' do
+ get user_groups_url user.username, format: :json
+
+ expect(assigns(:groups).size).to eq(per_page_limit)
+ expect(assigns(:groups)).to be_a(Kaminari::PaginatableWithoutCount)
+ end
+ end
+ end
+ end
+
describe '#ensure_canonical_path' do
before do
sign_in(user)
diff --git a/spec/routing/organizations/organizations_controller_routing_spec.rb b/spec/routing/organizations/organizations_controller_routing_spec.rb
index f105bb31ccf..c5c7a0ae377 100644
--- a/spec/routing/organizations/organizations_controller_routing_spec.rb
+++ b/spec/routing/organizations/organizations_controller_routing_spec.rb
@@ -29,4 +29,9 @@ RSpec.describe Organizations::OrganizationsController, :routing, feature_categor
expect(get("/-/organizations/#{organization.path}/users"))
.to route_to('organizations/organizations#users', organization_path: organization.path)
end
+
+ it 'routes to #preview_markdown' do
+ expect(post("/-/organizations/preview_markdown"))
+ .to route_to('organizations/organizations#preview_markdown')
+ end
end
diff --git a/spec/routing/uploads_routing_spec.rb b/spec/routing/uploads_routing_spec.rb
index 63840b4c30b..c63ca7608c5 100644
--- a/spec/routing/uploads_routing_spec.rb
+++ b/spec/routing/uploads_routing_spec.rb
@@ -79,6 +79,19 @@ RSpec.describe 'Uploads', 'routing' do
end
end
+ context 'for organizations' do
+ it 'allows fetching organization avatars' do
+ expect(get('/uploads/-/system/organizations/organization_detail/avatar/1/test.jpg')).to route_to(
+ controller: 'uploads',
+ action: 'show',
+ model: 'organizations/organization_detail',
+ id: '1',
+ filename: 'test.jpg',
+ mounted_as: 'avatar'
+ )
+ end
+ end
+
it 'does not allow creating uploads for other models' do
unroutable_models = UploadsController::MODEL_CLASSES.keys.compact - %w[personal_snippet user abuse_report]
diff --git a/spec/rubocop/cop/gitlab/license_available_usage_spec.rb b/spec/rubocop/cop/gitlab/license_available_usage_spec.rb
new file mode 100644
index 00000000000..230224e8e53
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/license_available_usage_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require 'rspec-parameterized'
+require_relative '../../../../rubocop/cop/gitlab/license_available_usage'
+
+RSpec.describe RuboCop::Cop::Gitlab::LicenseAvailableUsage, feature_category: :shared do
+ let(:msg) { described_class::MSG }
+
+ describe 'uses license check' do
+ it 'registers an offense' do
+ expect_offense(<<~SOURCE)
+ License.feature_available?(:elastic_search) && super
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid License.feature_available? usage in ApplicationSetting due to possible cyclical dependency issue. For more information see: https://gitlab.com/gitlab-org/gitlab/-/issues/423237
+ SOURCE
+ end
+ end
+
+ describe 'no license check' do
+ let(:source) do
+ <<~RUBY
+ class C
+ def check_without_license_usage?
+ test?(:feature)
+ end
+ end
+ RUBY
+ end
+
+ it 'does not register an offense' do
+ expect_no_offenses(source)
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rails/avoid_time_comparison_spec.rb b/spec/rubocop/cop/rails/avoid_time_comparison_spec.rb
new file mode 100644
index 00000000000..8ab430072b1
--- /dev/null
+++ b/spec/rubocop/cop/rails/avoid_time_comparison_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/rails/avoid_time_comparison'
+
+RSpec.describe RuboCop::Cop::Rails::AvoidTimeComparison, feature_category: :shared do
+ shared_examples 'using time comparison' do
+ let(:violation_string_length) { "datetime > #{time}".length }
+
+ it 'flags violation' do
+ expect_offense(<<~RUBY)
+ datetime > #{time}
+ #{'^' * violation_string_length} Avoid time comparison, use `.past?` or `.future?` instead.
+ RUBY
+
+ expect_offense(<<~RUBY)
+ datetime < #{time}
+ #{'^' * violation_string_length} Avoid time comparison, use `.past?` or `.future?` instead.
+ RUBY
+
+ expect_offense(<<~RUBY)
+ #{time} < datetime
+ #{'^' * violation_string_length} Avoid time comparison, use `.past?` or `.future?` instead.
+ RUBY
+ end
+ end
+
+ context 'when comparing with Time.now', :aggregate_failures do
+ let(:time) { 'Time.now' }
+
+ it_behaves_like 'using time comparison'
+ end
+
+ context 'when comparing with ::Time.now', :aggregate_failures do
+ let(:time) { '::Time.now' }
+
+ it_behaves_like 'using time comparison'
+ end
+
+ context 'when comparing with Time.zone.now', :aggregate_failures do
+ let(:time) { 'Time.zone.now' }
+
+ it_behaves_like 'using time comparison'
+ end
+
+ context 'when comparing with Time.current', :aggregate_failures do
+ let(:time) { 'Time.current' }
+
+ it_behaves_like 'using time comparison'
+ end
+
+ it 'does not flag assigning time methods to variables' do
+ expect_no_offenses(<<~RUBY)
+ datetime = Time.now
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/scalability/file_uploads_spec.rb b/spec/rubocop/cop/scalability/file_uploads_spec.rb
index 43ac9457ed6..50049d76f0e 100644
--- a/spec/rubocop/cop/scalability/file_uploads_spec.rb
+++ b/spec/rubocop/cop/scalability/file_uploads_spec.rb
@@ -4,7 +4,7 @@ require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/scalability/file_uploads'
RSpec.describe RuboCop::Cop::Scalability::FileUploads, feature_category: :scalability do
- let(:message) { 'Do not upload files without workhorse acceleration. Please refer to https://docs.gitlab.com/ee/development/uploads.html' }
+ let(:message) { 'Do not upload files without workhorse acceleration. Please refer to https://docs.gitlab.com/ee/development/uploads/' }
context 'with required params' do
it 'detects File in types array' do
diff --git a/spec/scripts/download_downstream_artifact_spec.rb b/spec/scripts/download_downstream_artifact_spec.rb
new file mode 100644
index 00000000000..05d1dc9933f
--- /dev/null
+++ b/spec/scripts/download_downstream_artifact_spec.rb
@@ -0,0 +1,180 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'gitlab/rspec/all'
+require_relative '../../scripts/download-downstream-artifact'
+
+# rubocop:disable RSpec/VerifiedDoubles -- doubles are simple mocks of a few methods from external code
+
+RSpec.describe DownloadDownstreamArtifact, feature_category: :tooling do
+ include StubENV
+
+ subject(:execute) { described_class.new(options).execute }
+
+ before do
+ stub_env('PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE', nil)
+ stub_env('CI_PROJECT_ID', nil)
+ stub_env('CI_PIPELINE_ID', nil)
+ stub_env('CI_API_V4_URL', nil)
+ stub_env('DOWNSTREAM_PROJECT', nil)
+ stub_env('DOWNSTREAM_JOB_NAME', nil)
+ stub_env('TRIGGER_JOB_NAME', nil)
+ stub_env('DOWNSTREAM_JOB_ARTIFACT_PATH', nil)
+ stub_env('OUTPUT_ARTIFACT_PATH', nil)
+ end
+
+ describe '#execute' do
+ let(:options) do
+ {
+ api_token: 'asdf1234',
+ endpoint: 'https://gitlab.com/api/v4',
+ upstream_project: 'upstream/project',
+ upstream_pipeline_id: 123,
+ downstream_project: 'downstream/project',
+ downstream_job_name: 'test-job',
+ trigger_job_name: 'trigger-job',
+ downstream_artifact_path: 'scores-DOWNSTREAM_JOB_ID.csv',
+ output_artifact_path: 'scores.csv'
+ }
+ end
+
+ let(:client) { double('Gitlab::Client') }
+ let(:artifact_response) { double('io', read: 'artifact content') }
+
+ let(:job) do
+ Struct.new(:id, :name, :web_url).new(789, 'test-job', 'https://example.com/jobs/789')
+ end
+
+ let(:downstream_pipeline) do
+ Struct.new(:id, :web_url).new(111, 'https://example.com/pipelines/111')
+ end
+
+ let(:pipeline_bridges) do
+ double('pipeline_bridges', auto_paginate: [double(name: 'trigger-job', downstream_pipeline: downstream_pipeline)])
+ end
+
+ let(:expected_output) do
+ <<~OUTPUT
+ Fetching scores artifact from downstream pipeline triggered via trigger-job...
+ Downstream pipeline is https://example.com/pipelines/111.
+ Downstream job "test-job": https://example.com/jobs/789.
+ Fetching artifact "scores-789.csv" from test-job...
+ Artifact saved as scores.csv ...
+ OUTPUT
+ end
+
+ before do
+ allow(Gitlab).to receive(:client)
+ .with(endpoint: options[:endpoint], private_token: options[:api_token])
+ .and_return(client)
+
+ allow(client).to receive(:pipeline_bridges).and_return(pipeline_bridges)
+ allow(client).to receive(:pipeline).and_return(downstream_pipeline)
+ allow(client).to receive(:pipeline_jobs).and_return([job])
+ allow(client).to receive(:download_job_artifact_file).and_return(artifact_response)
+ allow(File).to receive(:write)
+ end
+
+ it 'downloads artifact from downstream pipeline' do
+ expect(client).to receive(:download_job_artifact_file).with('downstream/project', 789, 'scores-789.csv')
+
+ expect { execute }.to output(expected_output).to_stdout
+ end
+
+ it 'saves artifact to output path' do
+ expect(File).to receive(:write).with('scores.csv', 'artifact content')
+
+ expect { execute }.to output(expected_output).to_stdout
+ end
+
+ context 'when options come from environment variables' do
+ before do
+ stub_env('PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE', 'asdf1234')
+ stub_env('CI_PROJECT_ID', 'upstream/project')
+ stub_env('CI_PIPELINE_ID', '123')
+ stub_env('CI_API_V4_URL', 'https://gitlab.com/api/v4')
+ stub_env('DOWNSTREAM_PROJECT', 'downstream/project')
+ stub_env('DOWNSTREAM_JOB_NAME', 'test-job')
+ stub_env('TRIGGER_JOB_NAME', 'trigger-job')
+ stub_env('DOWNSTREAM_JOB_ARTIFACT_PATH', 'scores-DOWNSTREAM_JOB_ID.csv')
+ stub_env('OUTPUT_ARTIFACT_PATH', 'scores.csv')
+
+ stub_const('API::DEFAULT_OPTIONS', {
+ project: ENV['CI_PROJECT_ID'],
+ pipeline_id: ENV['CI_PIPELINE_ID'],
+ api_token: ENV['PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE'],
+ endpoint: ENV['CI_API_V4_URL']
+ })
+ end
+
+ it 'uses the environment variable values' do
+ options = described_class.options_from_env
+
+ expect(File).to receive(:write)
+ expect { described_class.new(options).execute }.to output(expected_output).to_stdout
+ end
+ end
+
+ context 'when the downstream pipeline cannot be found' do
+ let(:pipeline_bridges) do
+ double('pipeline_bridges', auto_paginate: [double(name: 'trigger-job', downstream_pipeline: nil)])
+ end
+
+ it 'aborts' do
+ expect(File).not_to receive(:write)
+ expect { described_class.new(options).execute }
+ .to output(
+ %r{Could not find downstream pipeline triggered via trigger-job in project downstream/project}
+ ).to_stderr
+ .and raise_error(SystemExit)
+ end
+ end
+
+ context 'when the downstream job cannot be found' do
+ let(:job) { double('job', name: 'foo') }
+
+ it 'aborts' do
+ expect(File).not_to receive(:write)
+ expect { described_class.new(options).execute }
+ .to output(
+ %r{Could not find job with name 'test-job' in https://example.com/pipelines/111}
+ ).to_stderr
+ .and raise_error(SystemExit)
+ end
+ end
+
+ context 'when the downstream artifact cannot be found' do
+ let(:artifact_response) { 'error' }
+
+ it 'aborts' do
+ expect(File).not_to receive(:write)
+ expect { described_class.new(options).execute }
+ .to output(
+ %r{Could not download artifact. Request returned: error}
+ ).to_stderr
+ .and raise_error(SystemExit)
+ end
+ end
+ end
+
+ context 'when called without an API token' do
+ let(:options) do
+ {
+ endpoint: 'https://gitlab.com/api/v4',
+ upstream_project: 'upstream/project',
+ upstream_pipeline_id: 123,
+ downstream_project: 'downstream/project',
+ downstream_job_name: 'test-job',
+ trigger_job_name: 'trigger-job',
+ downstream_artifact_path: 'scores-DOWNSTREAM_JOB_ID.csv',
+ output_artifact_path: 'scores.csv'
+ }
+ end
+
+ it 'raises an error' do
+ expect { described_class.new(options) }.to raise_error(ArgumentError)
+ end
+ end
+end
+
+# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index 717a584f931..4897dbea09c 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -629,21 +629,21 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
canonical: |
<p><strong>bold</strong></p>
static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
+ <p data-sourcepos="1:1-1:8" dir="auto"><strong data-sourcepos="1:1-1:8">bold</strong></p>
wysiwyg: |-
<p dir="auto"><strong>bold</strong></p>
02_01_00__inlines__strong__002:
canonical: |
<p><strong>bold with more text</strong></p>
static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
+ <p data-sourcepos="1:1-1:23" dir="auto"><strong data-sourcepos="1:1-1:23">bold with more text</strong></p>
wysiwyg: |-
<p dir="auto"><strong>bold with more text</strong></p>
02_03_00__inlines__strikethrough_extension__001:
canonical: |
<p><del>Hi</del> Hello, world!</p>
static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
+ <p data-sourcepos="1:1-1:20" dir="auto"><del data-sourcepos="1:1-1:6">Hi</del> Hello, world!</p>
wysiwyg: |-
<p dir="auto"><s>Hi</s> Hello, world!</p>
03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
@@ -678,14 +678,14 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
canonical: |
<p><a href="groups-test-file">groups-test-file</a></p>
static: |-
- <p data-sourcepos="1:1-1:45" dir="auto"><a href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
+ <p data-sourcepos="1:1-1:45" dir="auto"><a data-sourcepos="1:1-1:45" href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
wysiwyg: |-
<p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="/uploads/groups-test-file">groups-test-file</a></p>
06_02_00__api_request_overrides__project_repo_link__001:
canonical: |
<p><a href="projects-test-file">projects-test-file</a></p>
static: |-
- <p data-sourcepos="1:1-1:40" dir="auto"><a href="/glfm_group/glfm_project/-/blob/master/projects-test-file" class="gfm">projects-test-file</a></p>
+ <p data-sourcepos="1:1-1:40" dir="auto"><a data-sourcepos="1:1-1:40" href="/glfm_group/glfm_project/-/blob/master/projects-test-file" class="gfm">projects-test-file</a></p>
wysiwyg: |-
<p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="projects-test-file">projects-test-file</a></p>
06_03_00__api_request_overrides__project_snippet_ref__001:
@@ -706,7 +706,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process', feature_category: :team
canonical: |
<p><a href="project-wikis-test-file">project-wikis-test-file</a></p>
static: |-
- <p data-sourcepos="1:1-1:50" dir="auto"><a href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
+ <p data-sourcepos="1:1-1:50" dir="auto"><a data-sourcepos="1:1-1:50" href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
wysiwyg: |-
<p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="project-wikis-test-file">project-wikis-test-file</a></p>
YAML
diff --git a/spec/scripts/setup/generate_as_if_foss_env_spec.rb b/spec/scripts/setup/generate_as_if_foss_env_spec.rb
new file mode 100644
index 00000000000..e437ce24e79
--- /dev/null
+++ b/spec/scripts/setup/generate_as_if_foss_env_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'gitlab/rspec/stub_env'
+
+# NOTE: Under the context of fast_spec_helper, when we `require 'gitlab'`
+# we do not load the Gitlab client, but our own Gitlab module.
+# Keep this in mind and just stub anything which might touch it!
+require_relative '../../../scripts/setup/generate-as-if-foss-env'
+
+RSpec.describe GenerateAsIfFossEnv, feature_category: :tooling do
+ include StubENV
+
+ subject(:generate) { described_class.new }
+
+ before do
+ stub_env(RUBY_VERSION: '3.1')
+ end
+
+ shared_context 'when there are all jobs' do
+ let(:jobs) do
+ [
+ 'rspec fast_spec_helper',
+ 'rspec unit pg14',
+ 'rspec integration pg14',
+ 'rspec system pg14',
+ 'rspec migration pg14',
+ 'rspec background-migration pg14',
+ 'rspec-all frontend_fixture',
+ 'build-assets-image',
+ 'build-qa-image',
+ 'compile-production-assets',
+ 'compile-storybook',
+ 'compile-test-assets',
+ 'eslint',
+ 'generate-apollo-graphql-schema',
+ 'graphql-schema-dump',
+ 'jest',
+ 'jest-integration',
+ 'qa:internal',
+ 'qa:selectors',
+ 'static-analysis'
+ ]
+ end
+
+ before do
+ messages = receive_message_chain(:client, :pipeline_jobs, :auto_paginate)
+
+ yield_jobs = jobs.inject(messages) do |stub, job|
+ stub.and_yield(double(name: job)) # rubocop:disable RSpec/VerifiedDoubles -- As explained at the top of this file, we do not load the Gitlab client
+ end
+
+ allow(Gitlab).to yield_jobs
+ end
+ end
+
+ describe '#variables' do
+ include_context 'when there are all jobs'
+
+ it 'returns correct variables' do
+ expect(generate.variables).to eq({
+ START_AS_IF_FOSS: 'true',
+ RUBY_VERSION: ENV['RUBY_VERSION'],
+ ENABLE_RSPEC: 'true',
+ ENABLE_RSPEC_FAST_SPEC_HELPER: 'true',
+ ENABLE_RSPEC_UNIT: 'true',
+ ENABLE_RSPEC_INTEGRATION: 'true',
+ ENABLE_RSPEC_SYSTEM: 'true',
+ ENABLE_RSPEC_MIGRATION: 'true',
+ ENABLE_RSPEC_BACKGROUND_MIGRATION: 'true',
+ ENABLE_RSPEC_FRONTEND_FIXTURE: 'true',
+ ENABLE_BUILD_ASSETS_IMAGE: 'true',
+ ENABLE_BUILD_QA_IMAGE: 'true',
+ ENABLE_COMPILE_PRODUCTION_ASSETS: 'true',
+ ENABLE_COMPILE_STORYBOOK: 'true',
+ ENABLE_COMPILE_TEST_ASSETS: 'true',
+ ENABLE_ESLINT: 'true',
+ ENABLE_GENERATE_APOLLO_GRAPHQL_SCHEMA: 'true',
+ ENABLE_GRAPHQL_SCHEMA_DUMP: 'true',
+ ENABLE_JEST: 'true',
+ ENABLE_JEST_INTEGRATION: 'true',
+ ENABLE_QA_INTERNAL: 'true',
+ ENABLE_QA_SELECTORS: 'true',
+ ENABLE_STATIC_ANALYSIS: 'true'
+ })
+ end
+ end
+
+ describe '#display' do
+ include_context 'when there are all jobs'
+
+ it 'puts correct variables' do
+ expect { generate.display }.to output(<<~ENV).to_stdout
+ START_AS_IF_FOSS=true
+ RUBY_VERSION=#{ENV['RUBY_VERSION']}
+ ENABLE_RSPEC=true
+ ENABLE_RSPEC_FAST_SPEC_HELPER=true
+ ENABLE_RSPEC_UNIT=true
+ ENABLE_RSPEC_INTEGRATION=true
+ ENABLE_RSPEC_SYSTEM=true
+ ENABLE_RSPEC_MIGRATION=true
+ ENABLE_RSPEC_BACKGROUND_MIGRATION=true
+ ENABLE_RSPEC_FRONTEND_FIXTURE=true
+ ENABLE_BUILD_ASSETS_IMAGE=true
+ ENABLE_BUILD_QA_IMAGE=true
+ ENABLE_COMPILE_PRODUCTION_ASSETS=true
+ ENABLE_COMPILE_STORYBOOK=true
+ ENABLE_COMPILE_TEST_ASSETS=true
+ ENABLE_ESLINT=true
+ ENABLE_GENERATE_APOLLO_GRAPHQL_SCHEMA=true
+ ENABLE_GRAPHQL_SCHEMA_DUMP=true
+ ENABLE_JEST=true
+ ENABLE_JEST_INTEGRATION=true
+ ENABLE_QA_INTERNAL=true
+ ENABLE_QA_SELECTORS=true
+ ENABLE_STATIC_ANALYSIS=true
+ ENV
+ end
+ end
+
+ describe '.gitlab/ci/rules.gitlab-ci.yml' do
+ include_context 'when there are all jobs'
+
+ let(:rules_yaml) do
+ File.read(File.expand_path('../../../.gitlab/ci/rules.gitlab-ci.yml', __dir__))
+ end
+
+ it 'uses all the ENABLE variables' do
+ generate.variables.each_key do |variable|
+ next unless variable.start_with?('ENABLE_')
+
+ expect(rules_yaml).to include("- if: '$#{variable} == \"true\"'")
+ end
+ end
+ end
+
+ describe '.gitlab/ci/as-if-foss.gitlab-ci.yml' do
+ include_context 'when there are all jobs'
+
+ let(:ci_yaml) do
+ File.read(File.expand_path('../../../.gitlab/ci/as-if-foss.gitlab-ci.yml', __dir__))
+ end
+
+ it 'uses all the ENABLE variables' do
+ generate.variables.each_key do |variable|
+ next unless variable.start_with?('ENABLE_')
+
+ expect(ci_yaml).to include("#{variable}: $#{variable}")
+ end
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/activity_serializer_spec.rb b/spec/serializers/activity_pub/activity_serializer_spec.rb
new file mode 100644
index 00000000000..93b52614490
--- /dev/null
+++ b/spec/serializers/activity_pub/activity_serializer_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::ActivitySerializer, feature_category: :integrations do
+ let(:implementer_class) do
+ Class.new(described_class)
+ end
+
+ let(:serializer) { implementer_class.new.represent(resource) }
+
+ let(:resource) { build_stubbed(:release) }
+
+ let(:transitive_entity_class) do
+ Class.new(Grape::Entity) do
+ expose :id do |*|
+ 'https://example.com/unique/url'
+ end
+
+ expose :type do |*|
+ 'Follow'
+ end
+
+ expose :actor do |*|
+ 'https://example.com/actor/alice'
+ end
+
+ expose :object do |*|
+ 'https://example.com/actor/bob'
+ end
+ end
+ end
+
+ let(:intransitive_entity_class) do
+ Class.new(Grape::Entity) do
+ expose :id do |*|
+ 'https://example.com/unique/url'
+ end
+
+ expose :type do |*|
+ 'Question'
+ end
+
+ expose :actor do |*|
+ 'https://example.com/actor/alice'
+ end
+
+ expose :content do |*|
+ "What's up?"
+ end
+ end
+ end
+
+ let(:entity_class) { transitive_entity_class }
+
+ shared_examples_for 'activity document' do
+ it 'belongs to the ActivityStreams namespace' do
+ expect(serializer['@context']).to eq 'https://www.w3.org/ns/activitystreams'
+ end
+
+ it 'has a unique identifier' do
+ expect(serializer).to have_key 'id'
+ end
+
+ it 'has a type' do
+ expect(serializer).to have_key 'type'
+ end
+
+ it 'has an actor' do
+ expect(serializer['actor']).to eq 'https://example.com/actor/alice'
+ end
+ end
+
+ before do
+ implementer_class.entity entity_class
+ end
+
+ context 'with a valid represented entity' do
+ it_behaves_like 'activity document'
+ end
+
+ context 'when the represented entity provides no identifier' do
+ before do
+ allow(entity_class).to receive(:represent).and_return({ type: 'Person', actor: 'http://something/' })
+ end
+
+ it 'raises an exception' do
+ expect { serializer }.to raise_error(ActivityPub::ActivitySerializer::MissingIdentifierError)
+ end
+ end
+
+ context 'when the represented entity provides no type' do
+ before do
+ allow(entity_class).to receive(:represent).and_return({
+ id: 'http://something/',
+ actor: 'http://something-else/'
+ })
+ end
+
+ it 'raises an exception' do
+ expect { serializer }.to raise_error(ActivityPub::ActivitySerializer::MissingTypeError)
+ end
+ end
+
+ context 'when the represented entity provides no actor' do
+ before do
+ allow(entity_class).to receive(:represent).and_return({ id: 'http://something/', type: 'Person' })
+ end
+
+ it 'raises an exception' do
+ expect { serializer }.to raise_error(ActivityPub::ActivitySerializer::MissingActorError)
+ end
+ end
+
+ context 'when the represented entity provides no object' do
+ let(:entity_class) { intransitive_entity_class }
+
+ context 'when the caller provides the :intransitive option' do
+ let(:serializer) { implementer_class.new.represent(resource, intransitive: true) }
+
+ it_behaves_like 'activity document'
+ end
+
+ context 'when the caller does not provide the :intransitive option' do
+ it 'raises an exception' do
+ expect { serializer }.to raise_error(ActivityPub::ActivitySerializer::MissingObjectError)
+ end
+ end
+ end
+
+ context 'when the caller does provide the :intransitive option and an object' do
+ let(:serializer) { implementer_class.new.represent(resource, intransitive: true) }
+
+ it 'raises an exception' do
+ expect { serializer }.to raise_error(ActivityPub::ActivitySerializer::IntransitiveWithObjectError)
+ end
+ end
+end
diff --git a/spec/serializers/activity_pub/activity_streams_serializer_spec.rb b/spec/serializers/activity_pub/activity_streams_serializer_spec.rb
deleted file mode 100644
index c74beba7a81..00000000000
--- a/spec/serializers/activity_pub/activity_streams_serializer_spec.rb
+++ /dev/null
@@ -1,157 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ActivityPub::ActivityStreamsSerializer, feature_category: :integrations do
- let(:implementer_class) do
- Class.new(described_class) do
- include WithPagination
- end
- end
-
- let(:entity_class) do
- Class.new(Grape::Entity) do
- expose :id do |*|
- 'https://example.com/unique/url'
- end
-
- expose :type do |*|
- 'Person'
- end
-
- expose :name do |*|
- 'Alice'
- end
- end
- end
-
- shared_examples_for 'ActivityStreams document' do
- it 'belongs to the ActivityStreams namespace' do
- expect(subject['@context']).to eq 'https://www.w3.org/ns/activitystreams'
- end
-
- it 'has a unique identifier' do
- expect(subject).to have_key 'id'
- end
-
- it 'has a type' do
- expect(subject).to have_key 'type'
- end
- end
-
- before do
- implementer_class.entity entity_class
- end
-
- context 'when the serializer is not paginated' do
- let(:resource) { build_stubbed(:release) }
- let(:outbox_url) { 'https://example.com/unique/url/outbox' }
-
- context 'with a valid represented entity' do
- subject { implementer_class.new.represent(resource, outbox: outbox_url) }
-
- it_behaves_like 'ActivityStreams document'
-
- it 'exposes an outbox' do
- expect(subject['outbox']).to eq 'https://example.com/unique/url/outbox'
- end
-
- it 'includes serialized data' do
- expect(subject['name']).to eq 'Alice'
- end
- end
-
- context 'when the represented entity provides no identifier' do
- subject { implementer_class.new.represent(resource, outbox: outbox_url) }
-
- before do
- allow(entity_class).to receive(:represent).and_return({ type: 'Person' })
- end
-
- it 'raises an exception' do
- expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingIdentifierError)
- end
- end
-
- context 'when the represented entity provides no type' do
- subject { implementer_class.new.represent(resource, outbox: outbox_url) }
-
- before do
- allow(entity_class).to receive(:represent).and_return({ id: 'https://example.com/unique/url' })
- end
-
- it 'raises an exception' do
- expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingTypeError)
- end
- end
-
- context 'when the caller provides no outbox parameter' do
- subject { implementer_class.new.represent(resource) }
-
- it 'raises an exception' do
- expect { subject }.to raise_error(ActivityPub::ActivityStreamsSerializer::MissingOutboxError)
- end
- end
- end
-
- context 'when the serializer is paginated' do
- let(:resources) { build_stubbed_list(:release, 3) }
- let(:request) { ActionDispatch::Request.new(request_data) }
- let(:response) { ActionDispatch::Response.new }
- let(:url) { 'https://example.com/resource/url' }
- let(:decorated) { implementer_class.new.with_pagination(request, response) }
-
- before do
- allow(resources).to receive(:page).and_return(resources)
- allow(resources).to receive(:per).and_return(resources)
- allow(resources).to receive(:current_page).and_return(2)
- allow(resources).to receive(:total_pages).and_return(3)
- allow(resources).to receive(:total_count).and_return(10)
- allow(decorated.paginator).to receive(:paginate).and_return(resources)
- end
-
- context 'when no page parameter is provided' do
- subject { decorated.represent(resources) }
-
- let(:request_data) do
- { "rack.url_scheme" => "https", "HTTP_HOST" => "example.com", "PATH_INFO" => '/resource/url' }
- end
-
- it_behaves_like 'ActivityStreams document'
-
- it 'is an index document for the pagination' do
- expect(subject['type']).to eq 'OrderedCollection'
- end
-
- it 'contains the total amount of items' do
- expect(subject['totalItems']).to eq 10
- end
-
- it 'contains links to first and last page' do
- expect(subject['first']).to eq "#{url}?page=1"
- expect(subject['last']).to eq "#{url}?page=3"
- end
- end
-
- context 'when a page parameter is provided' do
- subject { decorated.represent(resources) }
-
- let(:request_data) do
- { 'rack.url_scheme' => 'https', 'HTTP_HOST' => 'example.com', 'PATH_INFO' => '/resource/url',
- 'QUERY_STRING' => 'page=2&per_page=1' }
- end
-
- it_behaves_like 'ActivityStreams document'
-
- it 'is a page document' do
- expect(subject['type']).to eq 'OrderedCollectionPage'
- end
-
- it 'contains navigation links' do
- expect(subject['prev']).to be_present
- expect(subject['next']).to be_present
- expect(subject['partOf']).to be_present
- end
- end
- end
-end
diff --git a/spec/serializers/activity_pub/publish_release_activity_serializer_spec.rb b/spec/serializers/activity_pub/publish_release_activity_serializer_spec.rb
new file mode 100644
index 00000000000..287b806bb35
--- /dev/null
+++ b/spec/serializers/activity_pub/publish_release_activity_serializer_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ActivityPub::PublishReleaseActivitySerializer, feature_category: :release_orchestration do
+ let(:release) { build_stubbed(:release) }
+
+ let(:serializer) { described_class.new.represent(release) }
+
+ it 'serializes the activity attributes' do
+ expect(serializer).to include(:id, :type, :actor, :object)
+ end
+end
diff --git a/spec/serializers/activity_pub/releases_actor_serializer_spec.rb b/spec/serializers/activity_pub/releases_actor_serializer_spec.rb
index bc754eabe5c..47a170a04f5 100644
--- a/spec/serializers/activity_pub/releases_actor_serializer_spec.rb
+++ b/spec/serializers/activity_pub/releases_actor_serializer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe ActivityPub::ReleasesActorSerializer, feature_category: :groups_a
let(:releases) { build_stubbed_list(:release, 3, project: project) }
context 'when there is a single object provided' do
- subject { described_class.new.represent(project, outbox: '/outbox') }
+ subject { described_class.new.represent(project, outbox: '/outbox', inbox: '/inbox') }
it 'serializes the actor attributes' do
expect(subject).to include(:id, :type, :preferredUsername, :name, :content, :context)
diff --git a/spec/serializers/admin/abuse_report_details_entity_spec.rb b/spec/serializers/admin/abuse_report_details_entity_spec.rb
index 67f6bdfee85..8b18c64ac88 100644
--- a/spec/serializers/admin/abuse_report_details_entity_spec.rb
+++ b/spec/serializers/admin/abuse_report_details_entity_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threat do
include Gitlab::Routing
- let_it_be(:report) { build_stubbed(:abuse_report) }
+ let_it_be(:report) { create(:abuse_report) }
let_it_be(:user) { report.user }
let_it_be(:reporter) { report.reporter }
let_it_be(:past_report) { create_default(:abuse_report, :closed, user: user) }
@@ -29,7 +29,7 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
it 'correctly exposes `user`', :aggregate_failures do
user_hash = entity_hash[:user]
- expect(user_hash.keys).to match_array([
+ expect(user_hash.keys).to include(
:name,
:username,
:avatar_url,
@@ -38,7 +38,6 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:last_activity_on,
:path,
:admin_path,
- :plan,
:verification_state,
:past_closed_reports,
:similar_open_reports,
@@ -47,7 +46,7 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:snippets_count,
:groups_count,
:notes_count
- ])
+ )
expect(user_hash[:verification_state].keys).to match_array([
:email,
@@ -127,55 +126,5 @@ RSpec.describe Admin::AbuseReportDetailsEntity, feature_category: :insider_threa
:path
])
end
-
- describe 'users plan' do
- it 'does not include the plan' do
- expect(entity_hash[:user][:plan]).to be_nil
- end
-
- context 'when on .com', :saas, if: Gitlab.ee? do
- before do
- stub_ee_application_setting(should_check_namespace_plan: true)
- create(:namespace_with_plan, plan: :bronze_plan, owner: user) # rubocop:disable RSpec/FactoryBot/AvoidCreate
- end
-
- it 'includes the plan' do
- expect(entity_hash[:user][:plan]).to eq('Bronze')
- end
- end
- end
-
- describe 'users credit card' do
- let(:credit_card_hash) { entity_hash[:user][:credit_card] }
-
- context 'when the user has no verified credit card' do
- it 'does not expose the credit card' do
- expect(credit_card_hash).to be_nil
- end
- end
-
- context 'when the user does have a verified credit card' do
- let!(:credit_card) { build_stubbed(:credit_card_validation, user: user) }
-
- it 'exposes the credit card' do
- expect(credit_card_hash.keys).to match_array([
- :similar_records_count,
- :card_matches_link
- ])
- end
-
- context 'when not on ee', unless: Gitlab.ee? do
- it 'does not include the path to the admin card matches page' do
- expect(credit_card_hash[:card_matches_link]).to be_nil
- end
- end
-
- context 'when on ee', if: Gitlab.ee? do
- it 'includes the path to the admin card matches page' do
- expect(credit_card_hash[:card_matches_link]).not_to be_nil
- end
- end
- end
- end
end
end
diff --git a/spec/serializers/ci/group_variable_entity_spec.rb b/spec/serializers/ci/group_variable_entity_spec.rb
index 42c4e940421..c4f8543408f 100644
--- a/spec/serializers/ci/group_variable_entity_spec.rb
+++ b/spec/serializers/ci/group_variable_entity_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::GroupVariableEntity do
it 'contains required fields' do
expect(subject.keys).to contain_exactly(
- :id, :key, :value, :protected, :variable_type, :environment_scope, :raw, :masked
+ :id, :key, :description, :value, :protected, :variable_type, :environment_scope, :raw, :masked
)
end
end
diff --git a/spec/serializers/ci/variable_entity_spec.rb b/spec/serializers/ci/variable_entity_spec.rb
index 96111604028..184fc100a03 100644
--- a/spec/serializers/ci/variable_entity_spec.rb
+++ b/spec/serializers/ci/variable_entity_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::VariableEntity do
it 'contains required fields' do
expect(subject.keys).to contain_exactly(
- :id, :key, :value, :protected, :environment_scope, :variable_type, :raw, :masked
+ :id, :key, :description, :value, :protected, :environment_scope, :variable_type, :raw, :masked
)
end
end
diff --git a/spec/serializers/integrations/field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index aa503bdfcc8..273128e0bf1 100644
--- a/spec/serializers/integrations/field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Integrations::FieldEntity, feature_category: :integrations do
name: 'webhook',
title: nil,
placeholder: nil,
- help: 'http://mattermost.example.com/hooks/',
+ help: 'http://mattermost.example.com/hooks/...',
required: true,
choices: nil,
value: '************',
diff --git a/spec/serializers/profile/event_entity_spec.rb b/spec/serializers/profile/event_entity_spec.rb
index 3dade4210b3..2c880500e05 100644
--- a/spec/serializers/profile/event_entity_spec.rb
+++ b/spec/serializers/profile/event_entity_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Profile::EventEntity, feature_category: :user_profile do
end
context 'for push events' do
- let_it_be(:commit_from) { Gitlab::Git::BLANK_SHA }
+ let_it_be(:commit_from) { Gitlab::Git::SHA1_BLANK_SHA }
let_it_be(:commit_title) { 'My commit' }
let(:event) { build(:push_event, project: project, author: target_user) }
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index 0c3eef69fa5..1a178ce5d60 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -12,11 +12,9 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
- let_it_be(:content_length) { 1000 }
let(:headers) do
{
- 'content-length' => content_length,
'content-type' => content_type,
'content-disposition' => content_disposition
}
@@ -102,51 +100,27 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
end
- context 'when content-length is not valid' do
- context 'when content-length exceeds limit' do
+ context 'when file size is not valid' do
+ context 'when size exceeds limit' do
let(:file_size_limit) { 1 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File size 1000 B exceeds limit of 1 B'
- )
- end
- end
-
- context 'when content-length is missing' do
- let(:content_length) { nil }
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(
- described_class::ServiceError,
- 'Missing content-length header'
+ 'File size 100 B exceeds limit of 1 B'
)
end
end
end
- context 'when content-length is equals the file size limit' do
- let(:content_length) { 150 }
- let(:file_size_limit) { 150 }
+ context 'when size is equals the file size limit' do
+ let(:file_size_limit) { 100 }
it 'does not raise an error' do
expect { subject.execute }.not_to raise_error
end
end
- context 'when partially downloaded file exceeds limit' do
- let(:content_length) { 151 }
- let(:file_size_limit) { 150 }
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(
- described_class::ServiceError,
- 'File size 151 B exceeds limit of 150 B'
- )
- end
- end
-
context 'when chunk code is not 200' do
let(:chunk_code) { 404 }
@@ -203,25 +177,23 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
context 'on redirect chunk' do
let(:chunk_code) { 303 }
- it 'does not run content type & length validations' do
+ it 'does not run content type & validation' do
expect(service).not_to receive(:validate_content_type)
- expect(service).not_to receive(:validate_content_length)
service.execute
end
end
context 'when there is one data chunk' do
- it 'validates content type & length' do
+ it 'validates content type' do
expect(service).to receive(:validate_content_type)
- expect(service).to receive(:validate_content_length)
service.execute
end
end
context 'when there are multiple data chunks' do
- it 'validates content type & length only once' do
+ it 'validates content type only once' do
data_chunk = double(
'data chunk',
size: 1000,
@@ -237,7 +209,6 @@ RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
end
expect(service).to receive(:validate_content_type).once
- expect(service).to receive(:validate_content_length).once
service.execute
end
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
index 60f3ee11442..af6a70989c9 100644
--- a/spec/services/ci/abort_pipelines_service_spec.rb
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -70,12 +70,12 @@ RSpec.describe Ci::AbortPipelinesService, feature_category: :continuous_integrat
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { abort_project_pipelines }.count
+ control = ActiveRecord::QueryRecorder.new { abort_project_pipelines }
pipelines = create_list(:ci_pipeline, 5, :running, project: project)
create_list(:ci_build, 5, :running, pipeline: pipelines.first)
- expect { abort_project_pipelines }.not_to exceed_query_limit(control_count)
+ expect { abort_project_pipelines }.not_to exceed_query_limit(control)
end
context 'with live build logs' do
diff --git a/spec/services/ci/cancel_pipeline_service_spec.rb b/spec/services/ci/cancel_pipeline_service_spec.rb
index 256d2db1ed2..6051485c4df 100644
--- a/spec/services/ci/cancel_pipeline_service_spec.rb
+++ b/spec/services/ci/cancel_pipeline_service_spec.rb
@@ -13,12 +13,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
current_user: current_user,
cascade_to_children: cascade_to_children,
auto_canceled_by_pipeline: auto_canceled_by_pipeline,
- execute_async: execute_async)
+ execute_async: execute_async,
+ safe_cancellation: safe_cancellation)
end
let(:cascade_to_children) { true }
let(:auto_canceled_by_pipeline) { nil }
let(:execute_async) { true }
+ let(:safe_cancellation) { false }
shared_examples 'force_execute' do
context 'when pipeline is not cancelable' do
@@ -30,9 +32,14 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
context 'when pipeline is cancelable' do
before do
- create(:ci_build, :running, pipeline: pipeline)
- create(:ci_build, :created, pipeline: pipeline)
- create(:ci_build, :success, pipeline: pipeline)
+ create(:ci_build, :running, pipeline: pipeline, name: 'build1')
+ create(:ci_build, :created, pipeline: pipeline, name: 'build2')
+ create(:ci_build, :success, pipeline: pipeline, name: 'build3')
+ create(:ci_build, :pending, :interruptible, pipeline: pipeline, name: 'build4')
+
+ create(:ci_bridge, :running, pipeline: pipeline, name: 'bridge1')
+ create(:ci_bridge, :running, :interruptible, pipeline: pipeline, name: 'bridge2')
+ create(:ci_bridge, :success, :interruptible, pipeline: pipeline, name: 'bridge3')
end
it 'logs the event' do
@@ -55,7 +62,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels all cancelable jobs' do
expect(response).to be_success
- expect(pipeline.all_jobs.pluck(:status)).to match_array(%w[canceled canceled success])
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 canceled],
+ %w[build2 canceled],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
end
context 'when auto_canceled_by_pipeline is provided' do
@@ -74,6 +89,28 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
end
end
+ context 'when cascade_to_children: false and safe_cancellation: true' do
+ # We are testing the `safe_cancellation: true`` case with only `cascade_to_children: false`
+ # because `safe_cancellation` is passed as `true` only when `cascade_to_children` is `false`
+ # from `CancelRedundantPipelinesService`.
+
+ let(:cascade_to_children) { false }
+ let(:safe_cancellation) { true }
+
+ it 'cancels only interruptible jobs' do
+ expect(response).to be_success
+ expect(pipeline.all_jobs.pluck(:name, :status)).to match_array([
+ %w[build1 running],
+ %w[build2 created],
+ %w[build3 success],
+ %w[build4 canceled],
+ %w[bridge1 running],
+ %w[bridge2 canceled],
+ %w[bridge3 success]
+ ])
+ end
+ end
+
context 'when pipeline has child pipelines' do
let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let!(:child_job) { create(:ci_build, :running, pipeline: child_pipeline) }
@@ -81,8 +118,8 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
let!(:grandchild_job) { create(:ci_build, :running, pipeline: grandchild_pipeline) }
before do
- child_pipeline.source_bridge.update!(status: :running)
- grandchild_pipeline.source_bridge.update!(status: :running)
+ child_pipeline.source_bridge.update!(name: 'child_pipeline_bridge', status: :running)
+ grandchild_pipeline.source_bridge.update!(name: 'grandchild_pipeline_bridge', status: :running)
end
context 'when execute_async: false' do
@@ -91,8 +128,15 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
it 'cancels the bridge jobs and child jobs' do
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
- expect(child_pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
+ expect(child_pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[grandchild_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_canceled
expect(grandchild_job.reload).to be_canceled
end
@@ -110,7 +154,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
end
end
@@ -124,7 +173,12 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
expect(response).to be_success
- expect(pipeline.bridges.pluck(:status)).to be_all('canceled')
+ expect(pipeline.bridges.pluck(:name, :status)).to match_array([
+ %w[bridge1 canceled],
+ %w[bridge2 canceled],
+ %w[bridge3 success],
+ %w[child_pipeline_bridge canceled]
+ ])
expect(child_job.reload).to be_running
end
end
diff --git a/spec/services/ci/catalog/resources/create_service_spec.rb b/spec/services/ci/catalog/resources/create_service_spec.rb
index 202c76acaec..5839b9ac2fe 100644
--- a/spec/services/ci/catalog/resources/create_service_spec.rb
+++ b/spec/services/ci/catalog/resources/create_service_spec.rb
@@ -8,10 +8,6 @@ RSpec.describe Ci::Catalog::Resources::CreateService, feature_category: :pipelin
let(:service) { described_class.new(project, user) }
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
describe '#execute' do
context 'with an unauthorized user' do
it 'raises an AccessDeniedError' do
diff --git a/spec/services/ci/catalog/resources/destroy_service_spec.rb b/spec/services/ci/catalog/resources/destroy_service_spec.rb
index da5ba7ad0bc..4783506416d 100644
--- a/spec/services/ci/catalog/resources/destroy_service_spec.rb
+++ b/spec/services/ci/catalog/resources/destroy_service_spec.rb
@@ -9,10 +9,6 @@ RSpec.describe Ci::Catalog::Resources::DestroyService, feature_category: :pipeli
let(:service) { described_class.new(project, user) }
- before do
- stub_licensed_features(ci_namespace_catalog: true)
- end
-
describe '#execute' do
context 'with an unauthorized user' do
it 'raises an AccessDeniedError' do
diff --git a/spec/services/ci/catalog/resources/versions/create_service_spec.rb b/spec/services/ci/catalog/resources/versions/create_service_spec.rb
index e614a74a4a1..b57525fc8e1 100644
--- a/spec/services/ci/catalog/resources/versions/create_service_spec.rb
+++ b/spec/services/ci/catalog/resources/versions/create_service_spec.rb
@@ -115,6 +115,7 @@ RSpec.describe Ci::Catalog::Resources::Versions::CreateService, feature_category
expect(response).to be_success
version = Ci::Catalog::Resources::Version.last
+ base_path = "#{Settings.gitlab.host}/#{project.full_path}"
expect(project.ci_components.count).to eq(4)
expect(project.ci_components.first.name).to eq('blank-yaml')
@@ -122,25 +123,25 @@ RSpec.describe Ci::Catalog::Resources::Versions::CreateService, feature_category
expect(project.ci_components.first.inputs).to eq({})
expect(project.ci_components.first.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.first.version).to eq(version)
- expect(project.ci_components.first.path).to eq('templates/blank-yaml.yml')
+ expect(project.ci_components.first.path).to eq("#{base_path}/blank-yaml@#{version.name}")
expect(project.ci_components.second.name).to eq('dast')
expect(project.ci_components.second.project).to eq(version.project)
expect(project.ci_components.second.inputs).to eq({})
expect(project.ci_components.second.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.second.version).to eq(version)
- expect(project.ci_components.second.path).to eq('templates/dast/template.yml')
+ expect(project.ci_components.second.path).to eq("#{base_path}/dast@#{version.name}")
expect(project.ci_components.third.name).to eq('secret-detection')
expect(project.ci_components.third.project).to eq(version.project)
expect(project.ci_components.third.inputs).to eq({ "website" => nil })
expect(project.ci_components.third.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.third.version).to eq(version)
- expect(project.ci_components.third.path).to eq('templates/secret-detection.yml')
+ expect(project.ci_components.third.path).to eq("#{base_path}/secret-detection@#{version.name}")
expect(project.ci_components.fourth.name).to eq('template')
expect(project.ci_components.fourth.project).to eq(version.project)
expect(project.ci_components.fourth.inputs).to eq({ "environment" => nil })
expect(project.ci_components.fourth.catalog_resource).to eq(version.catalog_resource)
expect(project.ci_components.fourth.version).to eq(version)
- expect(project.ci_components.fourth.path).to eq('templates/template.yml')
+ expect(project.ci_components.fourth.path).to eq("#{base_path}/template@#{version.name}")
end
end
end
diff --git a/spec/services/ci/create_pipeline_service/partitioning_spec.rb b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
index 70c4eb49698..574bc05827a 100644
--- a/spec/services/ci/create_pipeline_service/partitioning_spec.rb
+++ b/spec/services/ci/create_pipeline_service/partitioning_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
let(:pipeline) { service.execute(:push).payload }
- let(:current_partition_id) { ci_testing_partition_id }
+ let(:current_partition_id) { ci_testing_partition_id_for_check_constraints }
before do
stub_ci_pipeline_yaml_file(config)
diff --git a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
index 851c6f8fbea..7e8a3ef3d7b 100644
--- a/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
+++ b/spec/services/ci/create_pipeline_service/workflow_auto_cancel_spec.rb
@@ -57,7 +57,49 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'creates a pipeline with errors' do
expect(pipeline).to be_persisted
expect(pipeline.errors.full_messages).to include(
- 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, disabled')
+ 'workflow:auto_cancel on new commit must be one of: conservative, interruptible, none')
+ end
+ end
+
+ context 'when using with workflow:rules' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_new_commit: interruptible
+ rules:
+ - if: $VAR123 == "valid value"
+ auto_cancel:
+ on_new_commit: none
+ - when: always
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when the rule matches' do
+ before do
+ create(:ci_variable, project: project, key: 'VAR123', value: 'valid value')
+ end
+
+ it 'creates a pipeline with on_new_commit' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('none')
+ end
+ end
+
+ context 'when the rule does not match' do
+ it 'creates a pipeline with on_new_commit' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_new_commit).to eq('interruptible')
+ end
end
end
end
@@ -165,5 +207,47 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
'workflow:auto_cancel on job failure must be one of: none, all')
end
end
+
+ context 'when using with workflow:rules' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ auto_cancel:
+ on_job_failure: none
+ rules:
+ - if: $VAR123 == "valid value"
+ auto_cancel:
+ on_job_failure: all
+ - when: always
+
+ test1:
+ script: exit 0
+ YAML
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ context 'when the rule matches' do
+ before do
+ create(:ci_variable, project: project, key: 'VAR123', value: 'valid value')
+ end
+
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('all')
+ end
+ end
+
+ context 'when the rule does not match' do
+ it 'creates a pipeline with on_job_failure' do
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors).to be_empty
+ expect(pipeline.pipeline_metadata.auto_cancel_on_job_failure).to eq('none')
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index 3d0ce456aa5..a74b820de09 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Ci::ExpirePipelineCacheService, feature_category: :continuous_int
create(:ci_sources_pipeline, pipeline: pipeline)
create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
- expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
+ expect { subject.execute(pipeline) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index c060c72ffb2..bdb4ed182dc 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
more_artifacts
- expect { subject }.not_to exceed_query_limit(control.count)
+ expect { subject }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
index 0d83187f9e4..7b5eef92f53 100644
--- a/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/cancel_redundant_pipelines_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
project.update!(auto_cancel_pending_pipelines: 'enabled')
end
- it 'cancels only previous interruptible builds' do
+ it 'cancels only previous non started builds' do
execute
expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
@@ -153,6 +153,36 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
end
+
+ context 'when the child pipeline auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ child_pipeline.create_pipeline_metadata!(
+ project: child_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels interruptible child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'success')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any child pipeline builds' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+
+ execute
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
+ end
+ end
+ end
end
context 'when the child pipeline has non-interruptible non-started job' do
@@ -227,6 +257,37 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
context 'when there are trigger jobs' do
before do
create(:ci_bridge, :created, pipeline: prev_pipeline)
@@ -246,6 +307,152 @@ RSpec.describe Ci::PipelineCreation::CancelRedundantPipelinesService, feature_ca
end
end
+ context 'when auto_cancel_on_new_commit is `interruptible`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'interruptible'
+ )
+ end
+
+ it 'cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'still cancels only interruptible jobs' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'canceled', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly(
+ 'created', 'success', 'running', 'failed', 'success'
+ )
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `none`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'none'
+ )
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is `conservative`' do
+ before do
+ prev_pipeline.create_pipeline_metadata!(
+ project: prev_pipeline.project, auto_cancel_on_new_commit: 'conservative'
+ )
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'cancels only previous non started builds' do
+ execute
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+
+ context 'when there are non-interruptible completed jobs in the pipeline' do
+ before do
+ create(:ci_build, :failed, pipeline: prev_pipeline)
+ create(:ci_build, :success, pipeline: prev_pipeline)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the FF ci_workflow_auto_cancel_on_new_commit is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_auto_cancel_on_new_commit: false)
+ end
+
+ it 'does not cancel any job' do
+ execute
+
+ expect(job_statuses(prev_pipeline)).to contain_exactly(
+ 'running', 'success', 'created', 'failed', 'success'
+ )
+ expect(job_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+ end
+
+ context 'when auto_cancel_on_new_commit is an invalid value' do
+ before do
+ allow(prev_pipeline).to receive(:auto_cancel_on_new_commit).and_return('invalid')
+ relation = Ci::Pipeline.id_in(prev_pipeline.id)
+ allow(relation).to receive(:each).and_yield(prev_pipeline)
+ allow(Ci::Pipeline).to receive(:id_in).and_return(relation)
+ end
+
+ it 'raises an error' do
+ expect { execute }.to raise_error(ArgumentError, 'Unknown auto_cancel_on_new_commit value: invalid')
+ end
+ end
+
it 'does not cancel future pipelines' do
expect(prev_pipeline.id).to be < pipeline.id
expect(build_statuses(pipeline)).to contain_exactly('pending')
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index 1646afde21d..1708f475e6b 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -403,11 +403,11 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do
end
it 'does not cause an N+1 when updating the job ownership' do
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }.count
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }
create_list(:ci_build, 2, :skipped, pipeline: pipeline, ci_stage: deploy_stage)
- expect { service.execute(job) }.not_to exceed_all_query_limit(control_count)
+ expect { service.execute(job) }.not_to exceed_all_query_limit(control)
end
end
diff --git a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
index 590df18469d..0fa2afdcdfc 100644
--- a/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_manager_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', feature_category: :fleet_visibility do
+RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', :freeze_time, feature_category: :fleet_visibility do
subject(:execute) { described_class.new(runner, 'some_token', system_id: system_id).execute }
context 'with runner registered with registration token' do
@@ -21,7 +21,7 @@ RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', featur
context 'with runner created in UI' do
let!(:runner_manager1) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_1') }
let!(:runner_manager2) { create(:ci_runner_machine, runner: runner, system_xid: 'system_id_2') }
- let!(:runner) { create(:ci_runner, registration_type: :authenticated_user) }
+ let!(:runner) { create(:ci_runner, registration_type: :authenticated_user, contacted_at: Time.current) }
context 'with system_id specified' do
let(:system_id) { runner_manager1.system_xid }
@@ -34,6 +34,24 @@ RSpec.describe ::Ci::Runners::UnregisterRunnerManagerService, '#execute', featur
expect(runner[:errors]).to be_nil
expect(runner.runner_managers).to contain_exactly(runner_manager2)
end
+
+ it 'does not clear runner heartbeat' do
+ expect(runner).not_to receive(:clear_heartbeat)
+
+ expect(execute).to be_success
+ end
+
+ context "when there are no runner managers left after deletion" do
+ let!(:runner_manager2) { nil }
+
+ it 'clears the heartbeat attributes' do
+ expect(runner).to receive(:clear_heartbeat).and_call_original
+
+ expect do
+ expect(execute).to be_success
+ end.to change { runner.reload.read_attribute(:contacted_at) }.from(Time.current).to(nil)
+ end
+ end
end
context 'with unknown system_id' do
diff --git a/spec/services/ci/unlock_pipeline_service_spec.rb b/spec/services/ci/unlock_pipeline_service_spec.rb
index 1a1150dca9e..16537ce5eaa 100644
--- a/spec/services/ci/unlock_pipeline_service_spec.rb
+++ b/spec/services/ci/unlock_pipeline_service_spec.rb
@@ -39,6 +39,28 @@ RSpec.describe Ci::UnlockPipelineService, :unlock_pipelines, :clean_gitlab_redis
)
end
+ context 'when disable_ci_partition_pruning is disabled' do
+ before do
+ stub_feature_flags(disable_ci_partition_pruning: false)
+ end
+
+ it 'unlocks the pipeline and all its artifacts' do
+ expect { execute }
+ .to change { pipeline.reload.locked }.from('artifacts_locked').to('unlocked')
+ .and change { pipeline.reload.job_artifacts.all?(&:artifact_unlocked?) }.to(true)
+ .and change { pipeline.reload.pipeline_artifacts.all?(&:artifact_unlocked?) }.to(true)
+
+ expect(execute).to eq(
+ status: :success,
+ skipped_already_leased: false,
+ skipped_already_unlocked: false,
+ exec_timeout: false,
+ unlocked_job_artifacts: pipeline.job_artifacts.count,
+ unlocked_pipeline_artifacts: pipeline.pipeline_artifacts.count
+ )
+ end
+ end
+
context 'and pipeline is already unlocked' do
before do
described_class.new(pipeline).execute
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 4fd4492278d..c5959127f34 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -331,11 +331,11 @@ RSpec.describe Ci::UpdateBuildQueueService, feature_category: :continuous_integr
let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) }
it 'does execute the same amount of queries regardless of number of runners' do
- control_count = ActiveRecord::QueryRecorder.new { subject.tick(build) }.count
+ control = ActiveRecord::QueryRecorder.new { subject.tick(build) }
create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d])
- expect { subject.tick(build) }.not_to exceed_all_query_limit(control_count)
+ expect { subject.tick(build) }.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb b/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb
new file mode 100644
index 00000000000..eb9324fd24b
--- /dev/null
+++ b/spec/services/click_house/sync_strategies/base_sync_strategy_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::SyncStrategies::BaseSyncStrategy, feature_category: :value_stream_management do
+ let(:strategy) { described_class.new }
+
+ describe '#execute' do
+ subject(:execute) { strategy.execute }
+
+ context 'when clickhouse configuration database is available', :click_house do
+ before do
+ allow(strategy).to receive(:model_class).and_return(::Event)
+ allow(strategy).to receive(:projections).and_return([:id])
+ allow(strategy).to receive(:csv_mapping).and_return({ id: :id })
+ allow(strategy).to receive(:insert_query).and_return("INSERT INTO events (id) SETTINGS async_insert=1,
+ wait_for_async_insert=1 FORMAT CSV")
+ end
+
+ context 'when there is nothing to sync' do
+ it 'adds metadata for the worker' do
+ expect(execute).to eq({ status: :processed, records_inserted: 0, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events).to be_empty
+ end
+ end
+
+ context 'when syncing records' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
+ let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
+ let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
+ let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
+
+ it 'inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ expected_records = [
+ hash_including('id' => project_event2.id),
+ hash_including('id' => event_without_parent.id),
+ hash_including('id' => group_event.id),
+ hash_including('id' => project_event1.id)
+ ]
+
+ events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+
+ expect(events).to match(expected_records)
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(project_event1.id)
+ end
+
+ context 'when multiple batches are needed' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::INSERT_BATCH_SIZE", 1)
+ end
+
+ it 'inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT * FROM events', :main)
+ expect(events.size).to eq(4)
+ end
+
+ context 'when new records are inserted while processing' do
+ it 'does not process new records created during the iteration' do
+ # Simulating the case when there is an insert during the iteration
+ call_count = 0
+ allow(strategy).to receive(:next_batch).and_wrap_original do |method|
+ call_count += 1
+ create(:event) if call_count == 3
+ method.call
+ end
+
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+ end
+ end
+ end
+
+ context 'when time limit is reached' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ it 'stops the processing' do
+ allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
+ allow(runtime_limiter).to receive(:over_time?).and_return(false, true)
+ end
+
+ expect(execute).to eq({ status: :processed, records_inserted: 2, reached_end_of_table: false })
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(event_without_parent.id)
+ end
+ end
+
+ context 'when syncing from a certain point' do
+ before do
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event2.id)
+ end
+
+ it 'syncs records after the cursor' do
+ expect(execute).to eq({ status: :processed, records_inserted: 3, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+
+ expect(events).to eq([{ 'id' => event_without_parent.id }, { 'id' => group_event.id },
+ { 'id' => project_event1.id }])
+ end
+
+ context 'when there is nothing to sync' do
+ it 'does nothing' do
+ ClickHouse::SyncCursor.update_cursor_for(:events, project_event1.id)
+
+ expect(execute).to eq({ status: :processed, records_inserted: 0, reached_end_of_table: true })
+
+ events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
+ expect(events).to be_empty
+ end
+ end
+ end
+ end
+ end
+
+ context 'when clickhouse is not configured' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ end
+
+ it 'skips execution' do
+ expect(execute).to eq({ status: :disabled })
+ end
+ end
+
+ context 'when exclusive lease error happens' do
+ it 'skips execution' do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+
+ expect(strategy).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ expect(execute).to eq({ status: :skipped })
+ end
+ end
+ end
+
+ describe '#projections' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:projections) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `projections`")
+ end
+ end
+
+ describe '#csv_mapping' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:csv_mapping) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `csv_mapping`")
+ end
+ end
+
+ describe '#insert_query' do
+ it 'raises a NotImplementedError' do
+ expect { strategy.send(:insert_query) }.to raise_error(NotImplementedError,
+ "Subclasses must implement `insert_query`")
+ end
+ end
+end
diff --git a/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb b/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb
new file mode 100644
index 00000000000..05fcf6ddeb3
--- /dev/null
+++ b/spec/services/click_house/sync_strategies/event_sync_strategy_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::SyncStrategies::EventSyncStrategy, feature_category: :value_stream_management do
+ let(:strategy) { described_class.new }
+
+ describe '#execute' do
+ subject(:execute) { strategy.execute }
+
+ context 'when syncing records', :click_house do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
+ let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
+ let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
+ let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
+ # looks invalid but we have some records like this on PRD
+
+ it 'correctly inserts all records' do
+ expect(execute).to eq({ status: :processed, records_inserted: 4, reached_end_of_table: true })
+
+ expected_records = [
+ hash_including('id' => project_event2.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue'),
+ hash_including('id' => event_without_parent.id, 'path' => '', 'target_type' => ''),
+ hash_including('id' => group_event.id, 'path' => "#{group.id}/", 'target_type' => ''),
+ hash_including('id' => project_event1.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
+ 'target_type' => 'Issue')
+ ]
+
+ events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
+
+ expect(events).to match(expected_records)
+
+ last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
+ expect(last_processed_id).to eq(project_event1.id)
+ end
+ end
+ end
+
+ describe '#projections' do
+ it 'returns correct projections' do
+ expect(strategy.send(:projections)).to match_array([
+ :id,
+ described_class::PATH_COLUMN,
+ :author_id,
+ :target_id,
+ :target_type,
+ 'action AS raw_action',
+ 'EXTRACT(epoch FROM created_at) AS casted_created_at',
+ 'EXTRACT(epoch FROM updated_at) AS casted_updated_at'
+ ])
+ end
+ end
+
+ describe '#csv_mapping' do
+ it 'returns correct csv mapping' do
+ expect(strategy.send(:csv_mapping)).to eq({
+ id: :id,
+ path: :path,
+ author_id: :author_id,
+ target_id: :target_id,
+ target_type: :target_type,
+ action: :raw_action,
+ created_at: :casted_created_at,
+ updated_at: :casted_updated_at
+ })
+ end
+ end
+
+ describe '#insert_query' do
+ let(:expected_query) do
+ <<~SQL.squish
+ INSERT INTO events (id, path, author_id,
+ target_id, target_type,
+ action, created_at, updated_at)
+ SETTINGS async_insert=1,
+ wait_for_async_insert=1 FORMAT CSV
+ SQL
+ end
+
+ it 'returns correct insert query' do
+ expect(strategy.send(:insert_query)).to eq(expected_query)
+ end
+ end
+
+ describe '#model_class' do
+ it 'returns the correct model class' do
+ expect(strategy.send(:model_class)).to eq(::Event)
+ end
+ end
+
+ describe '#enabled?' do
+ context 'when the clickhouse database is configured the feature flag is enabled' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ stub_feature_flags(event_sync_worker_for_click_house: true)
+ end
+
+ it 'returns true' do
+ expect(strategy.send(:enabled?)).to be_truthy
+ end
+ end
+
+ context 'when the clickhouse database is not configured' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({})
+ end
+
+ it 'returns false' do
+ expect(strategy.send(:enabled?)).to be_falsey
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ allow(ClickHouse::Client.configuration).to receive(:databases).and_return({ main: :some_db })
+ stub_feature_flags(event_sync_worker_for_click_house: false)
+ end
+
+ it 'returns false' do
+ expect(strategy.send(:enabled?)).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
index c31e76170d5..f6f1206e753 100644
--- a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateCloudsqlInstanceService, feature_category: :deployment_management do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:gcp_project_id) { 'gcp_project_120' }
diff --git a/spec/services/google_cloud/create_service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
index 3b57f2a9e5f..da30037963b 100644
--- a/spec/services/google_cloud/create_service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/create_service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::CreateServiceAccountsService, feature_category: :deployment_management do
describe '#execute' do
before do
mock_google_oauth2_creds = Struct.new(:app_id, :app_secret)
diff --git a/spec/services/google_cloud/enable_cloud_run_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
index 3de9e7fcd5c..09f1b3460cc 100644
--- a/spec/services/google_cloud/enable_cloud_run_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloud_run_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudRunService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
index b14b827e8b8..137393e4544 100644
--- a/spec/services/google_cloud/enable_cloudsql_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_cloudsql_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableCloudsqlService, feature_category: :deployment_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:params) do
diff --git a/spec/services/google_cloud/enable_vision_ai_service_spec.rb b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
index 5adafcffe69..c37b5681a4b 100644
--- a/spec/services/google_cloud/enable_vision_ai_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/enable_vision_ai_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::EnableVisionAiService, feature_category: :deployment_management do
describe 'when a project does not have any gcp projects' do
let_it_be(:project) { create(:project) }
diff --git a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
index f8d5ba99bf6..c4a0be78213 100644
--- a/spec/services/google_cloud/fetch_google_ip_list_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/fetch_google_ip_list_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
+RSpec.describe CloudSeed::GoogleCloud::FetchGoogleIpListService, :use_clean_rails_memory_store_caching,
:clean_gitlab_redis_rate_limiting, feature_category: :build_artifacts do
include StubRequests
diff --git a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
index a748fed7134..2af03291484 100644
--- a/spec/services/google_cloud/gcp_region_add_or_replace_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/gcp_region_add_or_replace_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GcpRegionAddOrReplaceService, feature_category: :deployment_management do
it 'adds and replaces GCP region vars' do
project = create(:project, :public)
service = described_class.new(project)
diff --git a/spec/services/google_cloud/generate_pipeline_service_spec.rb b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
index 8f49e1af901..14c1e6bae7f 100644
--- a/spec/services/google_cloud/generate_pipeline_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/generate_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GeneratePipelineService, feature_category: :deployment_management do
describe 'for cloud-run' do
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
@@ -64,7 +64,10 @@ RSpec.describe GoogleCloud::GeneratePipelineService, feature_category: :deployme
describe 'when there is an existing pipeline without `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before_all do
@@ -119,7 +122,10 @@ EOF
describe 'when there is an existing pipeline with `deploy` stage' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -166,7 +172,10 @@ EOF
describe 'when there is an existing pipeline with `includes`' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_RUN }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
@@ -210,7 +219,10 @@ EOF
describe 'when there is no existing pipeline' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:service_params) { { action: GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE } }
+ let_it_be(:service_params) do
+ { action: CloudSeed::GoogleCloud::GeneratePipelineService::ACTION_DEPLOY_TO_CLOUD_STORAGE }
+ end
+
let_it_be(:service) { described_class.new(project, maintainer, service_params) }
before do
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
index cd2ad00ac3f..fb17d578af7 100644
--- a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::GetCloudsqlInstancesService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
let(:project) { create(:project) }
diff --git a/spec/services/google_cloud/service_accounts_service_spec.rb b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
index c900bf7d300..62d58b3198a 100644
--- a/spec/services/google_cloud/service_accounts_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/service_accounts_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::ServiceAccountsService, feature_category: :deployment_management do
let(:service) { described_class.new(project) }
describe 'find_for_project' do
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
index 5095277f61a..ce02672e3fa 100644
--- a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
+++ b/spec/services/cloud_seed/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
+RSpec.describe CloudSeed::GoogleCloud::SetupCloudsqlInstanceService, feature_category: :deployment_management do
let(:random_user) { create(:user) }
let(:project) { create(:project) }
let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 4ab0080d8a2..5ad3fa1bca6 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
before do
if issue.design_collection.repository.exists?
issue.design_collection.repository.expire_all_method_caches
- issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
+ issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::SHA1_BLANK_SHA])
end
allow(DesignManagement::NewVersionWorker)
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index 6a4769d77d5..f7041fb818e 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -364,19 +364,37 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
end
- describe 'Project' do
- describe '#join_project' do
- subject { service.join_project(project, user) }
+ describe '#join_source' do
+ let(:source) { project }
+ subject(:join_source) { service.join_source(source, user) }
+
+ context 'when source is a group' do
+ let_it_be(:source) { create(:group) }
+
+ it { is_expected.to be_falsey }
+
+ specify do
+ expect { join_source }.not_to change { Event.count }
+ end
+ end
+
+ context 'when source is a project' do
it { is_expected.to be_truthy }
- it { expect { subject }.to change { Event.count }.from(0).to(1) }
+
+ specify do
+ expect { join_source }.to change { Event.count }.from(0).to(1)
+ end
end
+ end
- describe '#expired_leave_project' do
- subject { service.expired_leave_project(project, user) }
+ describe '#expired_leave_project' do
+ subject(:expired_leave_project) { service.expired_leave_project(project, user) }
- it { is_expected.to be_truthy }
- it { expect { subject }.to change { Event.count }.from(0).to(1) }
+ it { is_expected.to be_truthy }
+
+ specify do
+ expect { expired_leave_project }.to change { Event.count }.from(0).to(1)
end
end
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index e083c8d7316..2a041d9b3e2 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Git::BaseHooksService, feature_category: :source_code_management
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { 'refs/tags/v1.1.0' }
let(:checkout_sha) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 8fd542542ae..39a5f28060c 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context "with a new default branch" do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'generates a push event with more than one commit' do
execute_service
@@ -178,7 +178,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context "with a new non-default branch" do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:branch) { 'fix' }
let(:commit_id) { project.commit(branch).id }
@@ -198,7 +198,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'removing a branch' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'generates a push event with no commits' do
execute_service
@@ -222,7 +222,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
)
end
- let(:blank_sha) { Gitlab::Git::BLANK_SHA }
+ let(:blank_sha) { Gitlab::Git::SHA1_BLANK_SHA }
def clears_cache(extended: [])
expect(service).to receive(:invalidated_file_types).and_return(extended)
@@ -361,7 +361,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'creating the default branch' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'processes a limited number of commit messages' do
expect(project.repository)
@@ -414,7 +414,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
end
context 'removing the default branch' do
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not process commit messages' do
expect(project.repository).not_to receive(:commits)
@@ -429,7 +429,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
context 'creating a normal branch' do
let(:branch) { 'fix' }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'processes a limited number of commit messages' do
expect(project.repository)
@@ -463,7 +463,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
context 'removing a normal branch' do
let(:branch) { 'fix' }
- let(:newrev) { Gitlab::Git::BLANK_SHA }
+ let(:newrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'does not process commit messages' do
expect(project.repository).not_to receive(:commits)
@@ -530,7 +530,7 @@ RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, featur
let(:branch) { 'fix' }
context 'oldrev is the blank SHA' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
it 'is treated as a new branch' do
expect(service).to receive(:branch_create_hooks)
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index db4f3ace64b..bb5fe1b7b11 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services:
let_it_be(:user) { create(:user) }
let_it_be_with_refind(:project) { create(:project, :repository) }
- let(:blankrev) { Gitlab::Git::BLANK_SHA }
+ let(:blankrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
let(:newrev) { sample_commit.id }
let(:branch) { 'master' }
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index 93d65b0b344..c117988f0a1 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -21,9 +21,9 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
let(:changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
{ index: 1, oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" },
- { index: 2, oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" }
+ { index: 2, oldrev: '123456', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/delete" }
]
end
@@ -71,9 +71,9 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
let(:changes) do
[
- { oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
+ { oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" },
{ oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" },
- { oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" }
+ { oldrev: '123456', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/delete" }
].map do |change|
multiple_changes(change, push_event_activities_limit + 1)
end.flatten
@@ -216,19 +216,19 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
context 'when there are merge requests associated with branches' do
let(:tag_changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
]
end
let(:branch_changes) do
[
- { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
- { index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
- { index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" },
+ { index: 0, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
+ { index: 1, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
+ { index: 2, oldrev: Gitlab::Git::SHA1_BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" },
{ index: 3, oldrev: '789015', newrev: '789016', ref: "#{ref_prefix}/changed1" },
{ index: 4, oldrev: '789017', newrev: '789018', ref: "#{ref_prefix}/changed2" },
- { index: 5, oldrev: '789019', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed1" },
- { index: 6, oldrev: '789020', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed2" }
+ { index: 5, oldrev: '789019', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/removed1" },
+ { index: 6, oldrev: '789020', newrev: Gitlab::Git::SHA1_BLANK_SHA, ref: "#{ref_prefix}/removed2" }
]
end
@@ -246,7 +246,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
project.id,
user.id,
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
'789012',
"#{ref_prefix}/create1",
{ 'push_options' => nil }).ordered
@@ -254,7 +254,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(
project.id,
user.id,
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
'789013',
"#{ref_prefix}/create2",
{ 'push_options' => nil }).ordered
@@ -271,7 +271,7 @@ RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_man
project.id,
user.id,
'789020',
- Gitlab::Git::BLANK_SHA,
+ Gitlab::Git::SHA1_BLANK_SHA,
"#{ref_prefix}/removed2",
{ 'push_options' => nil }).ordered
diff --git a/spec/services/git/tag_hooks_service_spec.rb b/spec/services/git/tag_hooks_service_spec.rb
index 3e06443126b..afa8a4e72d3 100644
--- a/spec/services/git/tag_hooks_service_spec.rb
+++ b/spec/services/git/tag_hooks_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Git::TagHooksService, :service, feature_category: :source_code_ma
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { "refs/tags/#{tag_name}" }
let(:tag_name) { 'v1.1.0' }
diff --git a/spec/services/git/tag_push_service_spec.rb b/spec/services/git/tag_push_service_spec.rb
index 0d40c331d11..ba0f94d6fe6 100644
--- a/spec/services/git/tag_push_service_spec.rb
+++ b/spec/services/git/tag_push_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Git::TagPushService, feature_category: :source_code_management do
let(:project) { create(:project, :repository) }
let(:service) { described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) }
- let(:blankrev) { Gitlab::Git::BLANK_SHA }
+ let(:blankrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:oldrev) { blankrev }
let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:tag) { 'v1.1.0' }
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index b076b2d51ef..e584b0db63f 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -347,7 +347,7 @@ RSpec.describe Git::WikiPushService, services: true, feature_category: :wiki do
end
def current_sha
- repository.commit('master')&.id || Gitlab::Git::BLANK_SHA
+ repository.commit('master')&.id || Gitlab::Git::SHA1_BLANK_SHA
end
# It is important not to re-use the WikiPage services here, since they create
diff --git a/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb b/spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
index 3f57add10e3..f19cbaa21cd 100644
--- a/spec/services/integrations/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
+++ b/spec/services/google_cloud_platform/artifact_registry/list_docker_images_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Integrations::GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService, feature_category: :container_registry do
+RSpec.describe GoogleCloudPlatform::ArtifactRegistry::ListDockerImagesService, feature_category: :container_registry do
let_it_be(:project) { create(:project, :private) }
let(:user) { project.owner }
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index b2b27a1a075..8ce69d12b3f 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -3,45 +3,58 @@
require 'spec_helper'
RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_projects do
- let!(:user) { create(:user) }
- let!(:group_params) { { path: "group_path", visibility_level: Gitlab::VisibilityLevel::PUBLIC } }
+ let_it_be(:user, reload: true) { create(:user) }
+ let(:current_user) { user }
+ let(:group_params) { { path: 'group_path', visibility_level: Gitlab::VisibilityLevel::PUBLIC }.merge(extra_params) }
+ let(:extra_params) { {} }
+ let(:created_group) { response }
- subject { service.execute }
+ subject(:response) { described_class.new(current_user, group_params).execute }
shared_examples 'has sync-ed traversal_ids' do
- specify { expect(subject.reload.traversal_ids).to eq([subject.parent&.traversal_ids, subject.id].flatten.compact) }
+ specify do
+ expect(created_group.traversal_ids).to eq([created_group.parent&.traversal_ids, created_group.id].flatten.compact)
+ end
+ end
+
+ shared_examples 'creating a group' do
+ specify do
+ expect { response }.to change { Group.count }
+ expect(created_group).to be_persisted
+ end
end
- describe 'visibility level restrictions' do
- let!(:service) { described_class.new(user, group_params) }
+ shared_examples 'does not create a group' do
+ specify do
+ expect { response }.not_to change { Group.count }
+ expect(created_group).not_to be_persisted
+ end
+ end
- context "create groups without restricted visibility level" do
- it { is_expected.to be_persisted }
+ context 'for visibility level restrictions' do
+ context 'without restricted visibility level' do
+ it_behaves_like 'creating a group'
end
- context "cannot create group with restricted visibility level" do
+ context 'with restricted visibility level' do
before do
- allow_any_instance_of(ApplicationSetting).to receive(:restricted_visibility_levels).and_return([Gitlab::VisibilityLevel::PUBLIC])
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
- it { is_expected.not_to be_persisted }
+ it_behaves_like 'does not create a group'
end
end
- context 'when `setup_for_company:true` is passed' do
- let(:params) { group_params.merge(setup_for_company: true) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ context 'with `setup_for_company` attribute' do
+ let(:extra_params) { { setup_for_company: true } }
- it 'creates group with the specified setup_for_company' do
+ it 'has the specified setup_for_company' do
expect(created_group.setup_for_company).to eq(true)
end
end
- context 'creating a group with `default_branch_protection` attribute' do
- let(:params) { group_params.merge(default_branch_protection: Gitlab::Access::PROTECTION_NONE) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ context 'with `default_branch_protection` attribute' do
+ let(:extra_params) { { default_branch_protection: Gitlab::Access::PROTECTION_NONE } }
context 'for users who have the ability to create a group with `default_branch_protection`' do
it 'creates group with the specified branch protection level' do
@@ -52,23 +65,22 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'for users who do not have the ability to create a group with `default_branch_protection`' do
it 'does not create the group with the specified branch protection level' do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection).and_return(false)
expect(created_group.default_branch_protection).not_to eq(Gitlab::Access::PROTECTION_NONE)
end
end
end
- context 'creating a group with `default_branch_protection_defaults` attribute' do
+ context 'with `default_branch_protection_defaults` attribute' do
let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
- let(:params) { group_params.merge(default_branch_protection_defaults: branch_protection) }
- let(:service) { described_class.new(user, params) }
- let(:created_group) { service.execute }
+ let(:extra_params) { { default_branch_protection_defaults: branch_protection } }
context 'for users who have the ability to create a group with `default_branch_protection`' do
before do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
+ allow(Ability)
+ .to receive(:allowed?).with(user, :update_default_branch_protection, an_instance_of(Group)).and_return(true)
end
it 'creates group with the specified default branch protection settings' do
@@ -79,31 +91,26 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'for users who do not have the ability to create a group with `default_branch_protection_defaults`' do
it 'does not create the group with the specified default branch protection settings' do
allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection) { false }
+ allow(Ability).to receive(:allowed?).with(user, :create_group_with_default_branch_protection).and_return(false)
expect(created_group.default_branch_protection_defaults).not_to eq(Gitlab::Access::PROTECTION_NONE)
end
end
end
- context 'creating a group with `allow_mfa_for_subgroups` attribute' do
- let(:params) { group_params.merge(allow_mfa_for_subgroups: false) }
- let(:service) { described_class.new(user, params) }
+ context 'with `allow_mfa_for_subgroups` attribute' do
+ let(:extra_params) { { allow_mfa_for_subgroups: false } }
- it 'creates group without error' do
- expect(service.execute).to be_persisted
- end
+ it_behaves_like 'creating a group'
end
- describe 'creating a top level group' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for a top level group' do
context 'when user can create a group' do
before do
user.update_attribute(:can_create_group, true)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
context 'with before_commit callback' do
it_behaves_like 'has sync-ed traversal_ids'
@@ -115,144 +122,167 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
user.update_attribute(:can_create_group, false)
end
- it { is_expected.not_to be_persisted }
+ it_behaves_like 'does not create a group'
end
end
- describe 'creating subgroup' do
- let!(:group) { create(:group) }
- let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ context 'when creating a group within an organization' do
+ context 'when organization is provided' do
+ let_it_be(:organization) { create(:organization) }
+ let(:extra_params) { { organization_id: organization.id } }
- context 'as group owner' do
+ context 'when user can create the group' do
+ before do
+ create(:organization_user, user: user, organization: organization)
+ end
+
+ it_behaves_like 'creating a group'
+ end
+
+ context 'when user is an admin', :enable_admin_mode do
+ let(:current_user) { create(:admin) }
+
+ it_behaves_like 'creating a group'
+ end
+
+ context 'when user can not create the group' do
+ it_behaves_like 'does not create a group'
+
+ it 'returns an error and does not set organization_id' do
+ expect(created_group.errors[:organization_id].first)
+ .to eq(s_("CreateGroup|You don't have permission to create a group in the provided organization."))
+ expect(created_group.organization_id).to be_nil
+ end
+ end
+ end
+
+ context 'when organization is the default organization and not set by params' do
before do
- group.add_owner(user)
+ create(:organization, :default)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
+ end
+ end
+
+ context 'for a subgroup' do
+ let_it_be(:group) { create(:group) }
+ let(:extra_params) { { parent_id: group.id } }
+
+ context 'as group owner' do
+ before_all do
+ group.add_owner(user)
+ end
+ it_behaves_like 'creating a group'
it_behaves_like 'has sync-ed traversal_ids'
end
context 'as guest' do
- it 'does not save group and returns an error' do
- is_expected.not_to be_persisted
+ it_behaves_like 'does not create a group'
- expect(subject.errors[:parent_id].first).to eq(s_('CreateGroup|You don’t have permission to create a subgroup in this group.'))
- expect(subject.parent_id).to be_nil
+ it 'returns an error and does not set parent_id' do
+ expect(created_group.errors[:parent_id].first)
+ .to eq(s_('CreateGroup|You don’t have permission to create a subgroup in this group.'))
+ expect(created_group.parent_id).to be_nil
end
end
context 'as owner' do
- before do
+ before_all do
group.add_owner(user)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
end
context 'as maintainer' do
- before do
+ before_all do
group.add_maintainer(user)
end
- it { is_expected.to be_persisted }
+ it_behaves_like 'creating a group'
end
end
- describe "when visibility level is passed as a string" do
- let(:service) { described_class.new(user, group_params) }
- let(:group_params) { { path: 'group_path', visibility: 'public' } }
-
- it "assigns the correct visibility level" do
- group = service.execute
+ context 'when visibility level is passed as a string' do
+ let(:extra_params) { { visibility: 'public' } }
- expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ it 'assigns the correct visibility level' do
+ expect(created_group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
end
- describe 'creating a mattermost team' do
- let!(:params) { group_params.merge(create_chat_team: "true") }
- let!(:service) { described_class.new(user, params) }
+ context 'for creating a mattermost team' do
+ let(:extra_params) { { create_chat_team: 'true' } }
before do
stub_mattermost_setting(enabled: true)
end
it 'create the chat team with the group' do
- allow_any_instance_of(::Mattermost::Team).to receive(:create)
- .and_return({ 'name' => 'tanuki', 'id' => 'lskdjfwlekfjsdifjj' })
+ allow_next_instance_of(::Mattermost::Team) do |instance|
+ allow(instance).to receive(:create).and_return({ 'name' => 'tanuki', 'id' => 'lskdjfwlekfjsdifjj' })
+ end
- expect { subject }.to change { ChatTeam.count }.from(0).to(1)
+ expect { response }.to change { ChatTeam.count }.from(0).to(1)
end
end
- describe 'creating a setting record' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for creating a setting record' do
it 'create the settings record connected to the group' do
- group = subject
- expect(group.namespace_settings).to be_persisted
+ expect(created_group.namespace_settings).to be_persisted
end
end
- describe 'creating a details record' do
- let(:service) { described_class.new(user, group_params) }
-
+ context 'for creating a details record' do
it 'create the details record connected to the group' do
- group = subject
- expect(group.namespace_details).to be_persisted
+ expect(created_group.namespace_details).to be_persisted
end
end
- describe 'create service for the group' do
- let(:service) { described_class.new(user, group_params) }
- let(:created_group) { service.execute }
+ context 'with an active instance-level integration' do
+ let_it_be(:instance_integration) do
+ create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/')
+ end
+
+ it 'creates a service from the instance-level integration' do
+ expect(created_group.integrations.count).to eq(1)
+ expect(created_group.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ end
- context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_integration, :instance, api_url: 'https://prometheus.instance.com/') }
+ context 'with an active group-level integration' do
+ let(:extra_params) { { parent_id: group.id } }
+ let_it_be(:group) { create(:group) { |g| g.add_owner(user) } }
+ let_it_be(:group_integration) do
+ create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/')
+ end
- it 'creates a service from the instance-level integration' do
+ it 'creates a service from the group-level integration' do
expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(instance_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ expect(created_group.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(group_integration.id)
end
- context 'with an active group-level integration' do
- let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
- let!(:group_integration) { create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/') }
- let(:group) do
- create(:group).tap do |group|
- group.add_owner(user)
- end
+ context 'with an active subgroup' do
+ let(:extra_params) { { parent_id: subgroup.id } }
+ let_it_be(:subgroup) { create(:group, parent: group) { |g| g.add_owner(user) } }
+ let_it_be(:subgroup_integration) do
+ create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/')
end
- it 'creates a service from the group-level integration' do
+ it 'creates a service from the subgroup-level integration' do
expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(group_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(group_integration.id)
- end
-
- context 'with an active subgroup' do
- let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
- let!(:subgroup_integration) { create(:prometheus_integration, :group, group: subgroup, api_url: 'https://prometheus.subgroup.com/') }
- let(:subgroup) do
- create(:group, parent: group).tap do |subgroup|
- subgroup.add_owner(user)
- end
- end
-
- it 'creates a service from the subgroup-level integration' do
- expect(created_group.integrations.count).to eq(1)
- expect(created_group.integrations.first.api_url).to eq(subgroup_integration.api_url)
- expect(created_group.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
- end
+ expect(created_group.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
end
end
end
- context 'shared runners configuration' do
- context 'parent group present' do
+ context 'with shared runners configuration' do
+ context 'when parent group is present' do
using RSpec::Parameterized::TableSyntax
where(:shared_runners_config, :descendants_override_disabled_shared_runners_config) do
@@ -263,30 +293,31 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
end
with_them do
- let!(:group) { create(:group, shared_runners_enabled: shared_runners_config, allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config) }
- let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ let(:extra_params) { { parent_id: group.id } }
+ let(:group) do
+ create(
+ :group,
+ shared_runners_enabled: shared_runners_config,
+ allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config
+ )
+ end
before do
group.add_owner(user)
end
it 'creates group following the parent config' do
- new_group = service.execute
-
- expect(new_group.shared_runners_enabled).to eq(shared_runners_config)
- expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(descendants_override_disabled_shared_runners_config)
+ expect(created_group.shared_runners_enabled).to eq(shared_runners_config)
+ expect(created_group.allow_descendants_override_disabled_shared_runners)
+ .to eq(descendants_override_disabled_shared_runners_config)
end
end
end
- context 'root group' do
- let!(:service) { described_class.new(user) }
-
+ context 'for root group' do
it 'follows default config' do
- new_group = service.execute
-
- expect(new_group.shared_runners_enabled).to eq(true)
- expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(false)
+ expect(created_group.shared_runners_enabled).to eq(true)
+ expect(created_group.allow_descendants_override_disabled_shared_runners).to eq(false)
end
end
end
diff --git a/spec/services/groups/participants_service_spec.rb b/spec/services/groups/participants_service_spec.rb
index e934921317d..beab7311b93 100644
--- a/spec/services/groups/participants_service_spec.rb
+++ b/spec/services/groups/participants_service_spec.rb
@@ -10,7 +10,8 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:subproject) { create(:project, group: subgroup) }
- let(:service) { described_class.new(group, developer) }
+ let(:params) { {} }
+ let(:service) { described_class.new(group, developer, params) }
subject(:service_result) { service.execute(nil) }
@@ -74,6 +75,19 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
it { is_expected.to include(private_group_member.username) }
end
+
+ context 'when search param is given' do
+ let(:params) { { search: 'johnd' } }
+
+ let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| group.add_guest(u) } }
+ let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| group.add_guest(u) } }
+
+ it 'only returns matching members' do
+ users = service_result.select { |hash| hash[:type].eql?('User') }
+
+ expect(users.pluck(:username)).to eq([member_1.username])
+ end
+ end
end
def user_to_autocompletable(user)
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 78deb3cf254..f50163041f8 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -444,6 +444,60 @@ RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
end
end
+ context 'when setting enable_namespace_descendants_cache' do
+ let(:params) { { enable_namespace_descendants_cache: true } }
+
+ subject(:result) { described_class.new(public_group, user, params).execute }
+
+ context 'when the group_hierarchy_optimization feature flag is enabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: true)
+ end
+
+ context 'when enabling the setting' do
+ it 'creates the initial Namespaces::Descendants record' do
+ expect { result }.to change { public_group.reload.namespace_descendants.present? }.from(false).to(true)
+ end
+ end
+
+ context 'when accidentally enabling the setting again' do
+ it 'does nothing' do
+ namespace_descendants = create(:namespace_descendants, namespace: public_group)
+
+ expect { result }.not_to change { namespace_descendants.reload }
+ end
+ end
+
+ context 'when disabling the setting' do
+ before do
+ params[:enable_namespace_descendants_cache] = false
+ end
+
+ it 'removes the Namespaces::Descendants record' do
+ create(:namespace_descendants, namespace: public_group)
+
+ expect { result }.to change { public_group.reload.namespace_descendants }.to(nil)
+ end
+
+ context 'when the Namespaces::Descendants record is missing' do
+ it 'does not raise error' do
+ expect { result }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'when the group_hierarchy_optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(group_hierarchy_optimization: false)
+ end
+
+ it 'does nothing' do
+ expect { result }.not_to change { public_group.reload.namespace_descendants.present? }.from(false)
+ end
+ end
+ end
+
context 'EventStore' do
let(:service) { described_class.new(group, user, **params) }
let(:root_group) { create(:group, path: 'root') }
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index fc649b61426..6fe17a31f3e 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -21,16 +21,24 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
+ let(:headers) do
+ {
+ 'x-oauth-scopes' => 'read:org'
+ }
+ end
+
let(:client) { Gitlab::GithubImport::Client.new(token) }
let(:project_double) { instance_double(Project, persisted?: true) }
subject(:github_importer) { described_class.new(client, user, params) }
before do
+ allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return(headers)
allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
allow(settings)
.to receive(:write)
.with(
+ extended_events: true,
optional_stages: optional_stages,
timeout_strategy: timeout_strategy
)
@@ -92,6 +100,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
.with(optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -117,6 +126,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -149,6 +159,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@@ -185,11 +196,30 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
end
+ context 'validates scopes when collaborator import is true' do
+ let(:optional_stages) do
+ {
+ collaborators_import: true
+ }
+ end
+
+ let(:headers) do
+ {
+ 'x-oauth-scopes' => 'read:user'
+ }
+ end
+
+ it 'returns error when scope is not adequate' do
+ expect(subject.execute(access_params, :github)).to include(scope_error)
+ end
+ end
+
context 'when timeout strategy param is present' do
let(:timeout_strategy) { 'pessimistic' }
@@ -200,6 +230,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
@@ -213,10 +244,25 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
+ extended_events: true,
timeout_strategy: timeout_strategy
)
end
end
+
+ context 'when `github_import_extended_events`` feature flag is disabled' do
+ before do
+ stub_feature_flags(github_import_extended_events: false)
+ end
+
+ it 'saves extend_events to import_data' do
+ expect(settings)
+ .to receive(:write)
+ .with(a_hash_including(extended_events: false))
+
+ subject.execute(access_params, :github)
+ end
+ end
end
context 'when import source is disabled' do
@@ -309,6 +355,14 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
+ def scope_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'Your GitHub access token does not have the correct scope to import collaborators.'
+ }
+ end
+
def blocked_url_error(url)
{
status: :error,
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 3d83c9ec9c2..4ea7bb89d61 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Issuable::CommonSystemNotesService, feature_category: :team_plann
it_behaves_like 'system note creation', { title: 'New title' }, 'changed title'
it_behaves_like 'system note creation', { description: 'New description' }, 'changed the description'
it_behaves_like 'system note creation', { discussion_locked: true }, 'locked the discussion in this issue'
- it_behaves_like 'system note creation', { time_estimate: 5 }, 'changed time estimate'
+ it_behaves_like 'system note creation', { time_estimate: 5 }, 'added time estimate of 5s'
context 'when new label is added' do
let(:label) { create(:label, project: project) }
@@ -142,5 +142,9 @@ RSpec.describe Issuable::CommonSystemNotesService, feature_category: :team_plann
context 'when changing dates' do
it_behaves_like 'system note for issuable date changes'
end
+
+ context 'when setting an estimae' do
+ it_behaves_like 'system note creation', { time_estimate: 5 }, 'added time estimate of 5s', false
+ end
end
end
diff --git a/spec/services/issue_email_participants/create_service_spec.rb b/spec/services/issue_email_participants/create_service_spec.rb
index fcfdeeb08f3..dc8d5a6ea74 100644
--- a/spec/services/issue_email_participants/create_service_spec.rb
+++ b/spec/services/issue_email_participants/create_service_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
let(:expected_emails) { emails }
let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
- let(:error_underprivileged) { _("You don't have permission to add email participants.") }
- let(:error_no_participants) do
+ let(:error_underprivileged) { _("You don't have permission to manage email participants.") }
+ let(:error_no_participants_added) do
_("No email participants were added. Either none were provided, or they already exist.")
end
@@ -58,7 +58,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
end
context 'when no emails are provided' do
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
it_behaves_like 'a failed service execution'
end
@@ -69,7 +69,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
it_behaves_like 'a successful service execution'
context 'when email is already a participant of the issue' do
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
before do
issue.issue_email_participants.create!(email: emails.first)
@@ -89,7 +89,7 @@ RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service
end
let(:emails) { ['over-max@example.com'] }
- let(:error_message) { error_no_participants }
+ let(:error_message) { error_no_participants_added }
it_behaves_like 'a failed service execution'
diff --git a/spec/services/issue_email_participants/destroy_service_spec.rb b/spec/services/issue_email_participants/destroy_service_spec.rb
new file mode 100644
index 00000000000..70e09bb8d3b
--- /dev/null
+++ b/spec/services/issue_email_participants/destroy_service_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssueEmailParticipants::DestroyService, feature_category: :service_desk do
+ shared_examples 'a successful service execution' do
+ it 'removes participants', :aggregate_failures do
+ expect(response).to be_success
+
+ issue.reset
+ note = issue.notes.last
+ expect(note.system?).to be true
+ expect(note.author).to eq(user)
+
+ participants_emails = issue.email_participants_emails_downcase
+
+ expected_emails.each do |email|
+ expect(participants_emails).not_to include(email)
+ expect(response.message).to include(email)
+ expect(note.note).to include(email)
+ end
+ end
+ end
+
+ shared_examples 'a failed service execution' do
+ it 'returns error ServiceResponse with message', :aggregate_failures do
+ expect(response).to be_error
+ expect(response.message).to eq(error_message)
+ end
+ end
+
+ describe '#execute' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project) }
+
+ let(:emails) { nil }
+ let(:service) { described_class.new(target: issue, current_user: user, emails: emails) }
+ let(:expected_emails) { emails }
+
+ let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
+ let(:error_underprivileged) { _("You don't have permission to manage email participants.") }
+ let(:error_no_participants_removed) do
+ _("No email participants were removed. Either none were provided, or they don't exist.")
+ end
+
+ subject(:response) { service.execute }
+
+ context 'when the user is not a project member' do
+ let(:error_message) { error_underprivileged }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when user has reporter role in project' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ context 'when no emails are provided' do
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when one email is provided' do
+ let(:emails) { ['user@example.com'] }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+
+ context 'when email is a participant of the issue' do
+ before do
+ issue.issue_email_participants.create!(email: 'user@example.com')
+ end
+
+ it_behaves_like 'a successful service execution'
+
+ context 'when email is formatted in a different case' do
+ let(:emails) { ['USER@example.com'] }
+ let(:expected_emails) { emails.map(&:downcase) }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+ end
+
+ context 'when multiple emails are provided' do
+ let(:emails) { ['user@example.com', 'user2@example.com'] }
+ let(:error_message) { error_no_participants_removed }
+
+ it_behaves_like 'a failed service execution'
+
+ context 'when duplicate email provided' do
+ let(:emails) { ['user@example.com', 'user@example.com'] }
+ let(:expected_emails) { emails[...-1] }
+
+ it_behaves_like 'a failed service execution'
+ end
+
+ context 'when one email is a participant of the issue' do
+ let(:expected_emails) { emails[...-1] }
+
+ before do
+ issue.issue_email_participants.create!(email: emails.first)
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+
+ context 'when both emails are a participant of the issue' do
+ before do
+ emails.each do |email|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+
+ context 'when more than the allowed number of emails are provided' do
+ let(:emails) { (1..7).map { |i| "user#{i}@example.com" } }
+ let(:expected_emails) { emails[...-1] }
+
+ before do
+ emails.each do |email|
+ issue.issue_email_participants.create!(email: email)
+ end
+ end
+
+ it_behaves_like 'a successful service execution'
+ end
+ end
+
+ context 'when feature flag issue_email_participants is disabled' do
+ let(:error_message) { error_feature_flag }
+
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it_behaves_like 'a failed service execution'
+ end
+ end
+end
diff --git a/spec/services/issue_links/list_service_spec.rb b/spec/services/issue_links/list_service_spec.rb
index b5cc8c4dcdc..f9e5e88aff0 100644
--- a/spec/services/issue_links/list_service_spec.rb
+++ b/spec/services/issue_links/list_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
end
it 'ensures no N+1 queries are made' do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
project = create :project, :public
milestone = create :milestone, project: project
@@ -44,7 +44,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do
create :issue_link, source: issue_x, target: issue_z
create :issue_link, source: issue_y, target: issue_z
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'returns related issues JSON' do
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 83dfca923fb..016174f9888 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -175,11 +175,11 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_
let(:labeled_issues) { create_list(:labeled_issue, 2, project: project, author: user, labels: [feature_label, idea_label]) }
it 'does not run a query for each label link' do
- control_count = ActiveRecord::QueryRecorder.new { csv }.count
+ control = ActiveRecord::QueryRecorder.new { csv }
labeled_issues
- expect { csv }.not_to exceed_query_limit(control_count)
+ expect { csv }.not_to exceed_query_limit(control)
expect(csv.count).to eq(4)
end
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index 4781daf7688..6748292d389 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -39,13 +39,13 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run extra queries when extra namespaces are included', :use_clean_rails_memory_store_caching do
service.execute(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.execute(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.execute(issue) }
third_project = create(:project, :public)
create_closing_mr(source_project: third_project)
service.execute(issue) # warm cache
- expect { service.execute(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.execute(issue) }.not_to exceed_query_limit(control)
end
it 'preloads the head pipeline for each merge request, and its routes' do
@@ -58,12 +58,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
end
closing_mr_other_project.update!(head_pipeline: create(:ci_pipeline))
- control_count = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) }
+ control = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) }
closing_mr.update!(head_pipeline: create(:ci_pipeline))
expect { service.execute(issue).each(&pipeline_routes) }
- .not_to exceed_query_limit(control_count)
+ .not_to exceed_query_limit(control)
end
it 'only loads issue notes once' do
@@ -95,12 +95,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do
service.referenced_merge_requests(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }
create(:note, project: project, noteable: issue, author: create(:user))
service.referenced_merge_requests(issue) # warm cache
- expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control)
end
end
end
@@ -121,12 +121,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p
context 'performance' do
it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do
service.closed_by_merge_requests(issue) # warm cache
- control_count = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }.count
+ control = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }
create(:note, :system, project: project, noteable: issue, author: create(:user))
service.closed_by_merge_requests(issue) # warm cache
- expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control_count)
+ expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 0cb13bfb917..e8bcdc2c44b 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -592,11 +592,19 @@ RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning
update_issue(confidential: true)
end
+ it 'allows assignment of guest users' do
+ update_issue(confidential: true)
+
+ update_issue(assignee_ids: [guest.id])
+
+ expect(issue.reload.assignees).to contain_exactly(guest)
+ end
+
it 'does not update assignee_id with unauthorized users' do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
update_issue(confidential: true)
+
non_member = create(:user)
- original_assignees = issue.assignees
+ original_assignees = issue.assignees.to_a
update_issue(assignee_ids: [non_member.id])
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 2b398210034..3a1474e4fef 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -42,11 +42,15 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning
it 'do not cause additional query for finding labels' do
label_titles = [project_label.title]
- control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }
+ control = ActiveRecord::QueryRecorder.new do
+ described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+ end
new_label = create(:label, project: project)
label_titles = [project_label.title, new_label.title]
- expect { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }.not_to exceed_query_limit(control_count)
+ expect do
+ described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+ end.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index b977292bcf4..c08b40e9528 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'adds a user to members' do
expect(execute_service[:status]).to eq(:success)
- expect(source.users).to include member
+ expect(source).to have_user(member)
expect(Onboarding::Progress.completed?(source, :user_added)).to be(true)
end
@@ -119,14 +119,34 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
before do
# validations will fail because we try to invite them to the project as a guest
source.group.add_developer(member)
+ allow(Gitlab::EventStore).to receive(:publish)
end
- it 'triggers the members added and authorizations changed events' do
+ it 'triggers the authorizations changed events' do
expect(Gitlab::EventStore)
- .to receive(:publish)
- .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .to receive(:publish_group)
+ .with(array_including(an_instance_of(ProjectAuthorizations::AuthorizationsAddedEvent)))
.and_call_original
+ execute_service
+ end
+
+ context 'when feature flag "add_policy_approvers_to_rules" is disabled' do
+ before do
+ stub_feature_flags(add_policy_approvers_to_rules: false)
+ end
+
+ it 'triggers the authorizations changed event' do
+ expect(Gitlab::EventStore)
+ .to receive(:publish)
+ .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent))
+ .and_call_original
+
+ execute_service
+ end
+ end
+
+ it 'triggers the members added event' do
expect(Gitlab::EventStore)
.to receive(:publish)
.with(an_instance_of(Members::MembersAddedEvent))
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index 3860543a85e..b23f5856575 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -263,7 +263,7 @@ RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do
it 'emails the users that their group membership expiry has changed' do
members.each do |member|
- expect(notification_service).to receive(:updated_group_member_expiration).with(member)
+ expect(notification_service).to receive(:updated_member_expiration).with(member)
end
subject
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index 8761aba432f..6e20c42c8f6 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -16,11 +16,7 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
stub_feature_flags ff_require_saml_auth_to_approve: false
end
- context 'with invalid approval' do
- before do
- allow(merge_request.approvals).to receive(:new).and_return(double(save: false))
- end
-
+ shared_examples 'no-op call' do
it 'does not reset approvals' do
expect(merge_request.approvals).not_to receive(:reset)
@@ -47,22 +43,34 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
end
end
+ context 'with invalid approval' do
+ before do
+ allow(merge_request.approvals).to receive(:new).and_return(double(save: false))
+ end
+
+ it_behaves_like 'no-op call'
+ end
+
context 'with an already approved MR' do
before do
merge_request.approvals.create!(user: user)
end
- it 'does not create an approval' do
- expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
- end
+ it_behaves_like 'no-op call'
+ end
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { service.execute(merge_request) }
- end
+ context 'with a merged MR' do
+ let(:merge_request) { create(:merge_request, :merged) }
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { service.execute(merge_request) }
+ it_behaves_like 'no-op call'
+ end
+
+ context 'user cannot update the merge request' do
+ before do
+ project.add_guest(user)
end
+
+ it_behaves_like 'no-op call'
end
context 'with valid approval' do
@@ -115,27 +123,5 @@ RSpec.describe MergeRequests::ApprovalService, feature_category: :code_review_wo
let(:action) { service.execute(merge_request) }
end
end
-
- context 'user cannot update the merge request' do
- before do
- project.add_guest(user)
- end
-
- it 'does not update approvals' do
- expect { service.execute(merge_request) }.not_to change { merge_request.approvals.size }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { service.execute(merge_request) }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { service.execute(merge_request) }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
- let(:action) { service.execute(merge_request) }
- end
- end
end
end
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 5eb53b1bcba..416b28bff05 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe MergeRequests::Conflicts::ListService, feature_category: :code_re
it 'returns a falsey value when the MR has a missing revision after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
- allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::BLANK_SHA)
+ allow(merge_request).to receive_message_chain(:target_branch_head, :raw, :id).and_return(Gitlab::Git::SHA1_BLANK_SHA)
expect(service.can_be_resolved_in_ui?).to be_falsey
end
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index 31b3e513a51..85a84f07094 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe MergeRequests::GetUrlsService, feature_category: :code_review_wor
let(:source_branch) { "merge-test" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/#{merge_request.iid}" }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
- let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
+ let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::SHA1_BLANK_SHA} refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master" }
@@ -131,7 +131,7 @@ RSpec.describe MergeRequests::GetUrlsService, feature_category: :code_review_wor
context 'pushing new branch and existing branch (with merge request created) at once' do
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: "markdown") }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/markdown" }
let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 038977e4fd0..e34eb804a82 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -34,9 +34,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:label1) { 'mylabel1' }
let(:label2) { 'mylabel2' }
let(:label3) { 'mylabel3' }
- let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
+ let(:new_branch_changes) { "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
- let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
+ let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::SHA1_BLANK_SHA} refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
let(:error_mr_required) { "A merge_request.create push option is required to create a merge request for branch #{source_branch}" }
@@ -802,7 +802,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:changes) do
[
new_branch_changes,
- "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/feature_conflict"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/feature_conflict"
]
end
@@ -814,7 +814,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
let(:limit) { MergeRequests::PushOptionsHandlerService::LIMIT }
let(:changes) do
TestEnv::BRANCH_SHA.to_a[0..limit].map do |x|
- "#{Gitlab::Git::BLANK_SHA} #{x.first} refs/heads/#{x.last}"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} #{x.first} refs/heads/#{x.last}"
end
end
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
index de99fb244d3..bcde2fd5165 100644
--- a/spec/services/merge_requests/pushed_branches_service_spec.rb
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -37,11 +37,11 @@ RSpec.describe MergeRequests::PushedBranchesService, feature_category: :source_c
end
it 'returns empty result without any SQL query performed' do
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
expect(service.execute).to be_empty
- end.count
+ end
- expect(control_count).to be_zero
+ expect(control.count).to be_zero
end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index dd50dfa49e0..e2b1c91d6eb 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -712,10 +712,10 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
it 'refreshes the merge request' do
expect(refresh_service).to receive(:execute_hooks)
- .with(@fork_merge_request, 'update', old_rev: Gitlab::Git::BLANK_SHA)
+ .with(@fork_merge_request, 'update', old_rev: Gitlab::Git::SHA1_BLANK_SHA)
allow_any_instance_of(Repository).to receive(:merge_base).and_return(@oldrev)
- refresh_service.execute(Gitlab::Git::BLANK_SHA, @newrev, 'refs/heads/master')
+ refresh_service.execute(Gitlab::Git::SHA1_BLANK_SHA, @newrev, 'refs/heads/master')
reload_mrs
expect(@merge_request.notes).to be_empty
diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb
index 77056cbe541..a6654989374 100644
--- a/spec/services/merge_requests/reload_diffs_service_spec.rb
+++ b/spec/services/merge_requests/reload_diffs_service_spec.rb
@@ -45,11 +45,11 @@ RSpec.describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_
current_user
merge_request
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
subject.execute
- end.count
+ end
- expect { subject.execute }.not_to exceed_query_limit(control_count)
+ expect { subject.execute }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/merge_requests/remove_approval_service_spec.rb b/spec/services/merge_requests/remove_approval_service_spec.rb
index e4e54db5013..b0109a022eb 100644
--- a/spec/services/merge_requests/remove_approval_service_spec.rb
+++ b/spec/services/merge_requests/remove_approval_service_spec.rb
@@ -19,6 +19,34 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
project.add_developer(user)
end
+ shared_examples 'no-op call' do
+ it 'does not create an unapproval note and triggers web hook' do
+ expect(service).not_to receive(:execute_hooks)
+ expect(SystemNoteService).not_to receive(:unapprove_mr)
+
+ execute!
+ end
+
+ it 'does not track merge request unapprove action' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .not_to receive(:track_unapprove_mr_action).with(user: user)
+
+ execute!
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { execute! }
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { execute! }
+ end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
+ let(:action) { execute! }
+ end
+ end
+
context 'with a user who has approved' do
let!(:approval) { create(:approval, user: user, merge_request: merge_request) }
let(:notification_service) { NotificationService.new }
@@ -27,6 +55,12 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
allow(service).to receive(:notification_service).and_return(notification_service)
end
+ context 'when the merge request is merged' do
+ let(:merge_request) { create(:merge_request, :merged, source_project: project) }
+
+ it_behaves_like 'no-op call'
+ end
+
it 'removes the approval' do
expect { execute! }.to change { merge_request.approvals.size }.from(2).to(1)
end
@@ -60,31 +94,7 @@ RSpec.describe MergeRequests::RemoveApprovalService, feature_category: :code_rev
end
context 'with a user who has not approved' do
- it 'does not create an unapproval note and triggers web hook' do
- expect(service).not_to receive(:execute_hooks)
- expect(SystemNoteService).not_to receive(:unapprove_mr)
-
- execute!
- end
-
- it 'does not track merge request unapprove action' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_unapprove_mr_action).with(user: user)
-
- execute!
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
- let(:action) { execute! }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
- let(:action) { execute! }
- end
-
- it_behaves_like 'does not trigger GraphQL subscription mergeRequestApprovalStateUpdated' do
- let(:action) { execute! }
- end
+ it_behaves_like 'no-op call'
end
end
end
diff --git a/spec/services/merge_requests/request_review_service_spec.rb b/spec/services/merge_requests/request_review_service_spec.rb
index ef96bf11e0b..a5f0d5b5c5a 100644
--- a/spec/services/merge_requests/request_review_service_spec.rb
+++ b/spec/services/merge_requests/request_review_service_spec.rb
@@ -71,6 +71,14 @@ RSpec.describe MergeRequests::RequestReviewService, feature_category: :code_revi
service.execute(merge_request, user)
end
+ it 'creates a sytem note' do
+ expect(SystemNoteService)
+ .to receive(:request_review)
+ .with(merge_request, project, current_user, user)
+
+ service.execute(merge_request, user)
+ end
+
it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
let(:action) { result }
end
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index 209177c348b..a05e11c34d7 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -6,13 +6,14 @@ RSpec.describe Milestones::DestroyService, feature_category: :team_planning do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:milestone) { create(:milestone, title: 'Milestone v1.0', project: project) }
+ let(:container) { project }
before do
project.add_maintainer(user)
end
def service
- described_class.new(project, user, {})
+ described_class.new(container, user, {})
end
describe '#execute' do
@@ -45,6 +46,7 @@ RSpec.describe Milestones::DestroyService, feature_category: :team_planning do
context 'group milestones' do
let(:group) { create(:group) }
let(:group_milestone) { create(:milestone, group: group) }
+ let(:container) { group }
before do
project.update!(namespace: group)
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index 203ac2d3f40..caaf6488e40 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -62,13 +62,20 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
+ create(:resource_milestone_event, issue: issue, milestone: milestone)
+
merge_request = create(:merge_request, milestone: milestone, source_project: project)
+ create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
promoted_milestone = service.execute(milestone)
expect(promoted_milestone).to be_group_milestone
+
expect(issue.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
+
+ expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(2)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
end
end
@@ -101,9 +108,14 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets all issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
+ create(:resource_milestone_event, issue: issue, milestone: milestone)
issue_2 = create(:issue, milestone: milestone_2, project: project_2)
+ create(:resource_milestone_event, issue: issue_2, milestone: milestone_2)
+
merge_request = create(:merge_request, milestone: milestone, source_project: project)
+ create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
merge_request_2 = create(:merge_request, milestone: milestone_2, source_project: project_2)
+ create(:resource_milestone_event, merge_request: merge_request_2, milestone: milestone_2)
promoted_milestone = service.execute(milestone)
@@ -111,6 +123,10 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
expect(issue_2.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
expect(merge_request_2.reload.milestone).to eq(promoted_milestone)
+
+ expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(4)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
+ expect(ResourceMilestoneEvent.where(milestone_id: milestone_2).count).to eq(0)
end
end
end
diff --git a/spec/services/ml/create_model_service_spec.rb b/spec/services/ml/create_model_service_spec.rb
index 74c1dd5fec7..88e7c00d1f9 100644
--- a/spec/services/ml/create_model_service_spec.rb
+++ b/spec/services/ml/create_model_service_spec.rb
@@ -50,9 +50,10 @@ RSpec.describe ::Ml::CreateModelService, feature_category: :mlops do
let(:name) { existing_model.name }
let(:project) { existing_model.project }
- it 'raises an error', :aggregate_failures do
- expect { create_model }.to raise_error(ActiveRecord::RecordInvalid)
+ it 'returns a model with errors', :aggregate_failures do
+ expect(create_model).not_to be_persisted
expect(Gitlab::InternalEvents).not_to have_received(:track_event)
+ expect(create_model.errors.full_messages).to eq(["Name has already been taken"])
end
end
diff --git a/spec/services/ml/create_model_version_service_spec.rb b/spec/services/ml/create_model_version_service_spec.rb
index b3aead4a92c..be2bfc86b54 100644
--- a/spec/services/ml/create_model_version_service_spec.rb
+++ b/spec/services/ml/create_model_version_service_spec.rb
@@ -75,5 +75,60 @@ RSpec.describe ::Ml::CreateModelVersionService, feature_category: :mlops do
expect(model.reload.latest_version.package.name).to eq(model.name)
expect(model.latest_version.package.version).to eq(model.latest_version.version)
end
+
+ context 'when metadata are supplied, add them as metadata' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: 'key2', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'creates metadata records', :aggregate_failures do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1)
+
+ expect(service.metadata.count).to be 2
+ end
+ end
+
+ # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'for metadata with duplicate keys, it does not create duplicate records' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: 'key1', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'raises an error', :aggregate_failures do
+ expect { service }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ # # TODO: Ensure consisted error responses https://gitlab.com/gitlab-org/gitlab/-/issues/429731
+ context 'for metadata with invalid keys, it does not create invalid records' do
+ let(:metadata) { [{ key: 'key1', value: 'value1' }, { key: '', value: 'value2' }] }
+ let(:params) { { metadata: metadata } }
+
+ it 'raises an error', :aggregate_failures do
+ expect { service }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ context 'when a version string is supplied during creation' do
+ let(:params) { { version: '1.2.3' } }
+
+ it 'creates a package' do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change {
+ Packages::MlModel::Package.count
+ }.by(1)
+ expect(model.reload.latest_version.version).to eq('1.2.3')
+ expect(model.latest_version.package.version).to eq('1.2.3')
+ end
+ end
+
+ context 'when a nil version string is supplied during creation' do
+ let(:params) { { version: nil } }
+
+ it 'creates a package' do
+ expect { service }.to change { Ml::ModelVersion.count }.by(1).and change {
+ Packages::MlModel::Package.count
+ }.by(1)
+ expect(model.reload.latest_version.version).to eq('1.0.0')
+ expect(model.latest_version.package.version).to eq('1.0.0')
+ end
end
end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index 41f3499a1bb..002c7df9284 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -46,7 +46,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: false,
pypi_package_requests_forwarding: nil,
lock_pypi_package_requests_forwarding: false,
- nuget_symbol_server_enabled: false
+ nuget_symbol_server_enabled: false,
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: 'foo'
}, to: {
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
@@ -60,7 +62,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
it_behaves_like 'returning a success'
@@ -112,7 +116,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService, feature_category: :
lock_npm_package_requests_forwarding: true,
pypi_package_requests_forwarding: true,
lock_pypi_package_requests_forwarding: true,
- nuget_symbol_server_enabled: true
+ nuget_symbol_server_enabled: true,
+ terraform_module_duplicates_allowed: true,
+ terraform_module_duplicate_exception_regex: 'bar'
}
end
diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb
index bfd1dcd7d80..b4788428f14 100644
--- a/spec/services/notification_recipients/build_service_spec.rb
+++ b/spec/services/notification_recipients/build_service_spec.rb
@@ -21,13 +21,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_new_note_recipients(note)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_new_note_recipients(note)
end
create_user
- expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
+ expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control).with_threshold(threshold)
end
end
@@ -76,13 +76,15 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_new_review_recipients(review)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_new_review_recipients(review)
end
create_user
- expect { service.build_new_review_recipients(review) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
+ expect do
+ service.build_new_review_recipients(review)
+ end.not_to exceed_query_limit(control).with_threshold(threshold)
end
end
@@ -130,13 +132,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla
service.build_requested_review_recipients(note)
- control_count = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
service.build_requested_review_recipients(note)
end
create_user
- expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control_count)
+ expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 40597c30c4a..15e7f794795 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -3179,6 +3179,22 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
+ describe '#invite_member' do
+ let_it_be(:group_member) { create(:group_member) }
+
+ subject(:invite_member) { notification.invite_member(group_member, 'token') }
+
+ it 'sends exactly one email' do
+ expect(Notify)
+ .to receive(:member_invited_email).with('Group', group_member.id, 'token').at_least(:once).and_call_original
+
+ invite_member
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', group_member.id, 'token', mail: 'member_invited_email')
+ end
+ end
+
describe '#new_instance_access_request', :deliver_mails_inline do
let_it_be(:user) { create(:user, :blocked_pending_approval) }
let_it_be(:admins) { create_list(:admin, 12, :with_sign_ins) }
@@ -3278,43 +3294,6 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
let(:source) { group }
end
end
-
- describe '#new_group_member' do
- let(:group) { create(:group) }
-
- it 'sends a notification' do
- group.add_guest(added_user)
- should_only_email(added_user)
- end
-
- describe 'when notifications are disabled' do
- before do
- create_global_setting_for(added_user, :disabled)
- end
-
- it 'does not send a notification' do
- group.add_guest(added_user)
- should_not_email_anyone
- end
- end
-
- it_behaves_like 'group emails are disabled' do
- let(:notification_target) { group }
- let(:notification_trigger) { group.add_guest(added_user) }
- end
- end
-
- describe '#updated_group_member_expiration' do
- let_it_be(:group_member) { create(:group_member) }
-
- it 'emails the user that their group membership expiry has changed' do
- expect_next_instance_of(NotificationService) do |notification|
- allow(notification).to receive(:updated_group_member_expiration).with(group_member)
- end
-
- group_member.update!(expires_at: 5.days.from_now)
- end
- end
end
describe 'ProjectMember', :deliver_mails_inline do
@@ -3444,29 +3423,6 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
- describe '#new_project_member' do
- it do
- create_member!
- should_only_email(added_user)
- end
-
- it_behaves_like 'project emails are disabled' do
- let(:notification_target) { project }
- let(:notification_trigger) { create_member! }
- end
-
- context 'when notifications are disabled' do
- before do
- create_global_setting_for(added_user, :disabled)
- end
-
- it do
- create_member!
- should_not_email_anyone
- end
- end
- end
-
describe '#member_about_to_expire' do
let_it_be(:group_member) { create(:group_member, expires_at: 7.days.from_now.to_date) }
let_it_be(:project_member) { create(:project_member, expires_at: 7.days.from_now.to_date) }
@@ -3487,9 +3443,92 @@ RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
end
end
end
+ end
+
+ describe '#new_member', :deliver_mails_inline do
+ let_it_be(:source) { create(:group) }
+ let_it_be(:added_user) { create(:user) }
+
+ subject(:new_member) { notification.new_member(member) }
+
+ shared_examples_for 'new member added' do |source_type|
+ it 'triggers a notification about about the added access', deliver_mails_inline: false do
+ new_member
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email(source_type, member.id, mail: 'member_access_granted_email')
+ end
+ end
+
+ context 'when source is a Group' do
+ it_behaves_like 'new member added', 'Group' do
+ let_it_be(:member) { create(:group_member, source: source) }
+ end
+
+ it_behaves_like 'group emails are disabled' do
+ let(:notification_target) { source }
+ let(:notification_trigger) { notification_target.add_guest(added_user) }
+ end
+ end
+
+ context 'when source is a Project' do
+ let_it_be(:source) { create(:project) }
+
+ it_behaves_like 'new member added', 'Project' do
+ let_it_be(:member) { create(:project_member, source: project) }
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let_it_be(:notification_target) { source }
+ let(:notification_trigger) { source.add_guest(added_user) }
+ end
+ end
- def create_member!
- create(:project_member, user: added_user, project: project)
+ context 'when notifications are disabled' do
+ before do
+ create_global_setting_for(added_user, :disabled)
+ end
+
+ it 'does not send a notification' do
+ source.add_guest(added_user)
+ should_not_email_anyone
+ end
+ end
+ end
+
+ describe '#updated_member_expiration' do
+ subject(:updated_member_expiration) { notification.updated_member_expiration(member) }
+
+ context 'for group member' do
+ let_it_be(:member) { create(:group_member) }
+
+ it 'triggers a notification about the expiration change' do
+ updated_member_expiration
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', member.id, mail: 'member_expiration_date_updated_email')
+ end
+ end
+
+ context 'for project member' do
+ let_it_be(:member) { create(:project_member) }
+
+ it 'does not trigger a notification' do
+ updated_member_expiration
+
+ expect_delivery_jobs_count(0)
+ end
+ end
+ end
+
+ describe '#updated_member_access_level' do
+ let_it_be(:member) { create(:group_member) }
+
+ it 'triggers a notification about the access_level change' do
+ notification.updated_member_access_level(member)
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', member.id, mail: 'member_access_granted_email')
end
end
diff --git a/spec/services/organizations/create_service_spec.rb b/spec/services/organizations/create_service_spec.rb
index aae89517c15..bbc0f3d7515 100644
--- a/spec/services/organizations/create_service_spec.rb
+++ b/spec/services/organizations/create_service_spec.rb
@@ -29,11 +29,13 @@ RSpec.describe Organizations::CreateService, feature_category: :cell do
shared_examples 'creating an organization' do
it 'creates the organization' do
expect { response }.to change { Organizations::Organization.count }
+ .and change { Organizations::OrganizationUser.count }.by(1)
expect(response).to be_success
expect(created_organization.name).to eq(params[:name])
expect(created_organization.path).to eq(params[:path])
expect(created_organization.description).to eq(params[:description])
expect(created_organization.avatar.filename).to eq(avatar_filename)
+ expect(created_organization.owner?(current_user)).to be(true)
end
end
diff --git a/spec/services/organizations/update_service_spec.rb b/spec/services/organizations/update_service_spec.rb
index 148840770db..30c07ae1d13 100644
--- a/spec/services/organizations/update_service_spec.rb
+++ b/spec/services/organizations/update_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Organizations::UpdateService, feature_category: :cell do
context 'when user has permission' do
before_all do
- create(:organization_user, organization: organization, user: current_user)
+ create(:organization_user, :owner, organization: organization, user: current_user)
end
shared_examples 'updating an organization' do
@@ -60,6 +60,14 @@ RSpec.describe Organizations::UpdateService, feature_category: :cell do
it_behaves_like 'updating an organization'
end
+ context 'when avatar is set to nil' do
+ let_it_be(:organization_detail) { create(:organization_detail, organization: organization) }
+ let(:extra_params) { { avatar: nil } }
+ let(:description) { organization_detail.description }
+
+ it_behaves_like 'updating an organization'
+ end
+
include_examples 'updating an organization'
context 'when the organization is not updated' do
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index f02e53b67cb..7a91fdfc5b9 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -25,7 +25,13 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:version_data) { params.dig('versions', version) }
let(:lease_key) { "packages:npm:create_package_service:packages:#{project.id}_#{package_name}_#{version}" }
+ shared_examples 'valid service response' do
+ it { is_expected.to be_success }
+ end
+
shared_examples 'valid package' do
+ let(:package) { subject[:package] }
+
it 'creates a package' do
expect { subject }
.to change { Packages::Package.count }.by(1)
@@ -34,30 +40,27 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
.and change { Packages::Npm::Metadatum.count }.by(1)
end
- it_behaves_like 'assigns the package creator' do
- let(:package) { subject }
- end
-
- it { is_expected.to be_valid }
+ it_behaves_like 'assigns the package creator'
- it 'creates a package with name and version' do
- package = subject
+ it 'returns a valid package' do
+ subject
- expect(package.name).to eq(package_name)
- expect(package.version).to eq(version)
+ expect(package).to be_valid
+ .and have_attributes name: package_name, version: version
+ expect(package.npm_metadatum.package_json).to eq(version_data)
end
- it { expect(subject.npm_metadatum.package_json).to eq(version_data) }
-
- it { expect(subject.name).to eq(package_name) }
- it { expect(subject.version).to eq(version) }
-
context 'with build info' do
let_it_be(:job) { create(:ci_build, user: user) }
let(:params) { super().merge(build: job) }
- it_behaves_like 'assigns build to package'
- it_behaves_like 'assigns status to package'
+ it_behaves_like 'assigns build to package' do
+ subject { super().payload.fetch(:package) }
+ end
+
+ it_behaves_like 'assigns status to package' do
+ subject { super().payload.fetch(:package) }
+ end
it 'creates a package file build info' do
expect { subject }.to change { Packages::PackageFileBuildInfo.count }.by(1)
@@ -163,31 +166,35 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
.and change { Packages::Package.npm.count }.by(1)
.and change { Packages::Tag.count }.by(1)
.and change { Packages::Npm::Metadatum.count }.by(1)
- expect(subject.npm_metadatum.package_json[field]).to be_blank
+ expect(package.npm_metadatum.package_json[field]).to be_blank
end
end
end
end
context 'scoped package' do
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'when user is no project member' do
let_it_be(:user) { create(:user) }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'scoped package not following the naming convention' do
let(:package_name) { '@any-scope/package' }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
context 'unscoped package' do
let(:package_name) { 'unscoped-package' }
+ it_behaves_like 'valid service response'
it_behaves_like 'valid package'
end
@@ -195,8 +202,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:package_name) { "@#{namespace.path}/my_package" }
let!(:existing_package) { create(:npm_package, project: project, name: package_name, version: '1.0.1') }
- it { expect(subject[:http_status]).to eq 403 }
- it { expect(subject[:message]).to be 'Package already exists.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Package already exists.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_EXISTS }
context 'marked as pending_destruction' do
before do
@@ -217,10 +224,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:max_file_size) { 5.bytes }
shared_examples_for 'max file size validation failure' do
- it 'returns a 400 error', :aggregate_failures do
- expect(subject[:http_status]).to eq 400
- expect(subject[:message]).to be 'File is too large.'
- end
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'File is too large.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
before do
@@ -280,8 +285,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
context 'with empty versions' do
let(:params) { super().merge!({ versions: {} }) }
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Version is empty.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Version is empty.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
context 'with invalid versions' do
@@ -303,8 +308,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
context 'with empty attachment data' do
let(:params) { super().merge({ _attachments: { "#{package_name}-#{version}.tgz" => { data: '' } } }) }
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Attachment data is empty.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Attachment data is empty.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_INVALID_PARAMETER }
end
it 'obtains a lease to create a new package' do
@@ -318,8 +323,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
end
- it { expect(subject[:http_status]).to eq 400 }
- it { expect(subject[:message]).to eq 'Could not obtain package lease. Please try again.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Could not obtain package lease. Please try again.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_LEASE_TAKEN }
end
context 'when feature flag :packages_protected_packages disabled' do
@@ -364,7 +369,8 @@ RSpec.describe Packages::Npm::CreatePackageService, feature_category: :package_r
let(:service) { described_class.new(project, current_user, params) }
shared_examples 'protected package' do
- it { is_expected.to include http_status: 403, message: 'Package protected.' }
+ it { is_expected.to be_error }
+ it { is_expected.to have_attributes message: 'Package protected.', reason: ::Packages::Npm::CreatePackageService::ERROR_REASON_PACKAGE_PROTECTED }
it 'does not create any npm-related package records' do
expect { subject }
diff --git a/spec/services/packages/terraform_module/create_package_service_spec.rb b/spec/services/packages/terraform_module/create_package_service_spec.rb
index 3355dfcf5ec..c1a41cd9676 100644
--- a/spec/services/packages/terraform_module/create_package_service_spec.rb
+++ b/spec/services/packages/terraform_module/create_package_service_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category: :package_registry do
- let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:namespace) { create(:group) }
let_it_be(:project) { create(:project, namespace: namespace) }
let_it_be(:user) { create(:user) }
let_it_be(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let(:overrides) { {} }
@@ -36,10 +37,72 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'package already exists elsewhere' do
let(:project2) { create(:project, namespace: namespace) }
- let!(:existing_package) { create(:terraform_module_package, project: project2, name: 'foo/bar', version: '1.0.0') }
+ let!(:existing_package) do
+ create(:terraform_module_package, project: project2, name: 'foo/bar', version: '1.0.0')
+ end
+
+ context 'when duplicates not allowed' do
+ it { expect(subject.reason).to eq :forbidden }
+ it { expect(subject.message).to be 'A package with the same name already exists in the namespace' }
+ end
+
+ context 'when duplicates allowed' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
+ end
+
+ it_behaves_like 'creating a package'
+ end
+
+ context 'with duplicate regex exception' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: regex
+ )
+ end
+
+ context 'when regex matches' do
+ let(:regex) { ".*#{existing_package.name.last(3)}.*" }
+
+ it_behaves_like 'creating a package'
+ end
- it { expect(subject[:http_status]).to eq 403 }
- it { expect(subject[:message]).to be 'Access Denied' }
+ context 'when regex does not match' do
+ let(:regex) { '.*not-a-match.*' }
+
+ it { expect(subject.reason).to eq :forbidden }
+ it { expect(subject.message).to be 'A package with the same name already exists in the namespace' }
+ end
+ end
+
+ context 'for ancestor namespace' do
+ let_it_be(:package_settings) { create(:namespace_package_setting, :group) }
+ let_it_be(:parent_namespace) { package_settings.namespace }
+
+ before do
+ namespace.update!(parent: parent_namespace)
+ end
+
+ context 'when duplicates allowed in an ancestor' do
+ before do
+ package_settings.update_column(:terraform_module_duplicates_allowed, true)
+ end
+
+ it_behaves_like 'creating a package'
+ end
+
+ context 'when duplicates allowed in an ancestor with exception' do
+ before do
+ package_settings.update_columns(
+ terraform_module_duplicates_allowed: false,
+ terraform_module_duplicate_exception_regex: ".*#{existing_package.name.last(3)}.*"
+ )
+ end
+
+ it_behaves_like 'creating a package'
+ end
+ end
context 'marked as pending_destruction' do
before do
@@ -53,7 +116,7 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'version already exists' do
let!(:existing_version) { create(:terraform_module_package, project: project, name: 'foo/bar', version: '1.0.1') }
- it { expect(subject[:http_status]).to eq 403 }
+ it { expect(subject[:reason]).to eq :forbidden }
it { expect(subject[:message]).to be 'Package version already exists.' }
context 'marked as pending_destruction' do
@@ -68,7 +131,7 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
context 'with empty version' do
let(:overrides) { { module_version: '' } }
- it { expect(subject[:http_status]).to eq 400 }
+ it { expect(subject[:reason]).to eq :bad_request }
it { expect(subject[:message]).to eq 'Version is empty.' }
end
end
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 63b5d54a18d..0e46391c0ad 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -188,28 +188,4 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_catego
service.execute
end
end
-
- context 'when the domain URL is longer than 64 characters' do
- let(:long_domain) { "a.b.c.#{'d' * 63}" }
- let(:pages_domain) { create(:pages_domain, :without_certificate, :without_key, domain: long_domain) }
- let(:service) { described_class.new(pages_domain) }
-
- it 'logs an error and does not proceed with certificate acquisition' do
- expect(Gitlab::AppLogger).to receive(:error).with(
- hash_including(
- message: "Domain name too long for Let's Encrypt certificate",
- pages_domain: long_domain,
- pages_domain_bytesize: long_domain.bytesize,
- max_allowed_bytesize: described_class::MAX_DOMAIN_LENGTH,
- project_id: pages_domain.project_id
- )
- )
-
- # Ensure that the certificate acquisition is not attempted
- expect(::PagesDomains::CreateAcmeOrderService).not_to receive(:new)
- expect(PagesDomainSslRenewalWorker).not_to receive(:perform_in)
-
- service.execute
- end
- end
end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 167baed06e7..c0bc8e1cc6e 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe PostReceiveService, feature_category: :team_planning do
let(:repository) { project.repository }
let(:changes) do
- "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
+ "#{Gitlab::Git::SHA1_BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{branch_name}"
end
let(:params) do
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index f6aca9970c8..6c185024d28 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe PreviewMarkdownService, feature_category: :team_planning do
result = service.execute
- expect(result[:text]).to eq "Please do it\n\n/assign #{user.to_reference}"
+ expect(result[:text]).to eq "Please do it\n<p>/assign #{user.to_reference}</p>"
end
end
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
index 533a09f7bc7..90f360c5dbd 100644
--- a/spec/services/projects/cleanup_service_spec.rb
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe Projects::CleanupService, feature_category: :source_code_manageme
Gitaly::ApplyBfgObjectMapStreamResponse::Entry.new(
type: :COMMIT,
old_oid: old_oid,
- new_oid: Gitlab::Git::BLANK_SHA
+ new_oid: Gitlab::Git::SHA1_BLANK_SHA
)
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 3aea329a45f..e5dd17a3c7c 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -166,6 +166,14 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
+ context 'deleting a project with deployments' do
+ let!(:deployment) { create(:deployment, project: project) }
+
+ it 'deletes deployments' do
+ expect { destroy_project(project, user, {}) }.to change(Deployment, :count).by(-1)
+ end
+ end
+
it_behaves_like 'deleting the project'
context 'personal projects count cache' do
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index e6418c7b4ea..949421c205f 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -3,165 +3,159 @@
require 'spec_helper'
RSpec.describe Projects::ForkService, feature_category: :source_code_management do
- include ProjectForksHelper
+ subject(:service) { described_class.new(project, user, params) }
+
+ let_it_be_with_reload(:project) { create(:project, :repository, star_count: 100, description: 'project') }
+ let_it_be_with_reload(:user) { create(:user) }
+
+ let(:params) { { namespace: namespace } }
+ let(:namespace) { user.namespace }
shared_examples 'forks count cache refresh' do
it 'flushes the forks count cache of the source project', :clean_gitlab_redis_cache do
expect(from_project.forks_count).to be_zero
- fork_project(from_project, to_user, using_service: true)
+ described_class.new(from_project, to_user, params).execute
+
BatchLoader::Executor.clear_current
- expect(from_project.forks_count).to eq(1)
+ expect(from_project.reload.forks_count).to eq(1)
end
end
- context 'when forking a new project' do
- describe 'fork by user' do
+ describe '#execute' do
+ subject(:fork_of_project) { service.execute }
+
+ before do
+ # NOTE: Avatar file is dropped after project reload. Explicitly re-add it for each test.
+ project.avatar = fixture_file_upload("spec/fixtures/dk.png", "image/png")
+ end
+
+ context 'when forker is a guest' do
before do
- @from_user = create(:user)
- @from_namespace = @from_user.namespace
- avatar = fixture_file_upload("spec/fixtures/dk.png", "image/png")
- @from_project = create(
- :project,
- :repository,
- creator_id: @from_user.id,
- namespace: @from_namespace,
- star_count: 107,
- avatar: avatar,
- description: 'wow such project',
- external_authorization_classification_label: 'classification-label'
- )
- @to_user = create(:user)
- @to_namespace = @to_user.namespace
- @from_project.add_member(@to_user, :developer)
+ project.add_member(user, :guest)
end
- context 'fork project' do
- context 'when forker is a guest' do
- before do
- @guest = create(:user)
- @from_project.add_member(@guest, :guest)
- end
- subject { fork_project(@from_project, @guest, using_service: true) }
+ it 'does not create a fork' do
+ is_expected.not_to be_persisted
+ expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ end
- it { is_expected.not_to be_persisted }
- it { expect(subject.errors[:forked_from_project_id]).to eq(['is forbidden']) }
+ it 'does not create a fork network' do
+ expect { subject }.not_to change { project.reload.fork_network }
+ end
+ end
- it 'does not create a fork network' do
- expect { subject }.not_to change { @from_project.reload.fork_network }
- end
- end
+ context 'when forker is a developer' do
+ before do
+ project.add_member(user, :developer)
+ end
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { @from_project }
- let(:to_user) { @to_user }
- end
-
- describe "successfully creates project in the user namespace" do
- let(:to_project) { fork_project(@from_project, @to_user, namespace: @to_user.namespace, using_service: true) }
-
- it { expect(to_project).to be_persisted }
- it { expect(to_project.errors).to be_empty }
- it { expect(to_project.first_owner).to eq(@to_user) }
- it { expect(to_project.namespace).to eq(@to_user.namespace) }
- it { expect(to_project.star_count).to be_zero }
- it { expect(to_project.description).to eq(@from_project.description) }
- it { expect(to_project.avatar.file).to be_exists }
- it { expect(to_project.ci_config_path).to eq(@from_project.ci_config_path) }
- it { expect(to_project.external_authorization_classification_label).to eq(@from_project.external_authorization_classification_label) }
- it { expect(to_project.suggestion_commit_message).to eq(@from_project.suggestion_commit_message) }
- it { expect(to_project.merge_commit_template).to eq(@from_project.merge_commit_template) }
- it { expect(to_project.squash_commit_template).to eq(@from_project.squash_commit_template) }
-
- # This test is here because we had a bug where the from-project lost its
- # avatar after being forked.
- # https://gitlab.com/gitlab-org/gitlab-foss/issues/26158
- it "after forking the from-project still has its avatar" do
- # If we do not fork the project first we cannot detect the bug.
- expect(to_project).to be_persisted
-
- expect(@from_project.avatar.file).to be_exists
- end
+ it 'creates a fork of the project' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.errors).to be_empty
+ expect(fork_of_project.first_owner).to eq(user)
+ expect(fork_of_project.namespace).to eq(user.namespace)
+ expect(fork_of_project.star_count).to be_zero
+ expect(fork_of_project.description).to eq(project.description)
+ expect(fork_of_project.avatar.file).to be_exists
+ expect(fork_of_project.ci_config_path).to eq(project.ci_config_path)
+ expect(fork_of_project.external_authorization_classification_label).to eq(project.external_authorization_classification_label)
+ expect(fork_of_project.suggestion_commit_message).to eq(project.suggestion_commit_message)
+ expect(fork_of_project.merge_commit_template).to eq(project.merge_commit_template)
+ expect(fork_of_project.squash_commit_template).to eq(project.squash_commit_template)
+ end
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { @from_project }
- let(:to_user) { @to_user }
- end
+ # This test is here because we had a bug where the from-project lost its
+ # avatar after being forked.
+ # https://gitlab.com/gitlab-org/gitlab-foss/issues/26158
+ it 'after forking the original project still has its avatar' do
+ # If we do not fork the project first we cannot detect the bug.
+ expect(fork_of_project).to be_persisted
- it 'creates a fork network with the new project and the root project set' do
- to_project
- fork_network = @from_project.reload.fork_network
+ expect(project.avatar.file).to be_exists
+ end
- expect(fork_network).not_to be_nil
- expect(fork_network.root_project).to eq(@from_project)
- expect(fork_network.projects).to contain_exactly(@from_project, to_project)
- end
+ it_behaves_like 'forks count cache refresh' do
+ let(:from_project) { project }
+ let(:to_user) { user }
+ end
- it 'imports the repository of the forked project', :sidekiq_might_not_need_inline do
- to_project = fork_project(@from_project, @to_user, repository: true, using_service: true)
+ it 'creates a fork network with the new project and the root project set' do
+ subject
- expect(to_project.empty_repo?).to be_falsy
- end
- end
+ fork_network = project.reload.fork_network
- context 'creating a fork of a fork' do
- let(:from_forked_project) { fork_project(@from_project, @to_user, using_service: true) }
- let(:other_namespace) do
- group = create(:group)
- group.add_owner(@to_user)
- group
- end
+ expect(fork_network).not_to be_nil
+ expect(fork_network.root_project).to eq(project)
+ expect(fork_network.projects).to contain_exactly(project, fork_of_project)
+ end
- let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace, using_service: true) }
+ it 'imports the repository of the forked project', :sidekiq_might_not_need_inline do
+ expect(fork_of_project).to be_persisted
- it 'sets the root of the network to the root project' do
- expect(to_project.fork_network.root_project).to eq(@from_project)
- end
+ # The call to project.repository.after_import in RepositoryForkWorker does
+ # not reset the @exists variable of this fork_of_project.repository
+ # so we have to explicitly call this method to clear the @exists variable.
+ # of the instance we're returning here.
+ fork_of_project.repository.expire_content_cache
- it 'sets the forked_from_project on the membership' do
- expect(to_project.fork_network_member.forked_from_project).to eq(from_forked_project)
- end
+ expect(fork_of_project.empty_repo?).to be_falsey
+ end
- context 'when the forked project has higher visibility than the root project' do
- let(:root_project) { create(:project, :public) }
+ context 'when creating fork of the fork' do
+ let_it_be(:other_namespace) { create(:group).tap { |group| group.add_owner(user) } }
- it 'successfully creates a fork of the fork with correct visibility' do
- forked_project = fork_project(root_project, @to_user, using_service: true)
+ it 'creates a new project' do
+ fork_of_project = described_class.new(project, user, params).execute
+ expect(fork_of_project).to be_persisted
- root_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ fork_of_fork = described_class.new(fork_of_project, user, { namespace: other_namespace }).execute
+ expect(fork_of_fork).to be_persisted
- # Forked project visibility is not affected by root project visibility change
- expect(forked_project).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ expect(fork_of_fork).to be_valid
+ expect(fork_of_fork.fork_network.root_project).to eq(project)
+ expect(fork_of_fork.fork_network_member.forked_from_project).to eq(fork_of_project)
+ end
- fork_of_the_fork = fork_project(forked_project, @to_user, namespace: other_namespace, using_service: true)
+ context 'when the forked project has higher visibility than the root project' do
+ let_it_be(:root_project) { create(:project, :public) }
- expect(fork_of_the_fork).to be_valid
- expect(fork_of_the_fork).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- end
- end
+ it 'successfully creates a fork of the fork with correct visibility' do
+ fork_of_project = described_class.new(root_project, user, params).execute
+ expect(fork_of_project).to be_persisted
- it_behaves_like 'forks count cache refresh' do
- let(:from_project) { from_forked_project }
- let(:to_user) { @to_user }
+ root_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+
+ # Forked project visibility is not affected by root project visibility change
+ expect(fork_of_project).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+
+ fork_of_fork = described_class.new(fork_of_project, user, { namespace: other_namespace }).execute
+ expect(fork_of_fork).to be_persisted
+
+ expect(fork_of_fork).to be_valid
+ expect(fork_of_fork).to have_attributes(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
end
+
+ it_behaves_like 'forks count cache refresh' do
+ let(:from_project) { described_class.new(project, user, { namespace: other_namespace }).execute }
+ let(:to_user) { user }
+ end
end
- context 'project already exists' do
- it "fails due to validation, not transaction failure" do
- @existing_project = create(:project, :repository, creator_id: @to_user.id, path: @from_project.path, namespace: @to_namespace)
- @to_project = fork_project(@from_project, @to_user, namespace: @to_namespace, using_service: true)
- expect(@existing_project).to be_persisted
+ context 'when project already exists' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, namespace: namespace, path: project.path)
+ expect(existing_project).to be_persisted
- expect(@to_project).not_to be_persisted
- expect(@to_project.errors[:path]).to eq(['has already been taken'])
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:path]).to eq(['has already been taken'])
end
end
- context 'repository in legacy storage already exists' do
- let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(@to_user.namespace.full_path, "#{@from_project.path}.git"), nil, nil) }
- let(:params) { { namespace: @to_user.namespace, using_service: true } }
+ context 'when repository in legacy storage already exists' do
+ let(:raw_fake_repo) { Gitlab::Git::Repository.new('default', File.join(user.namespace.full_path, "#{project.path}.git"), nil, nil) }
before do
stub_application_setting(hashed_storage_enabled: false)
@@ -172,59 +166,54 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
raw_fake_repo.remove
end
- subject { fork_project(@from_project, @to_user, params) }
-
it 'does not allow creation' do
- expect(subject).not_to be_persisted
- expect(subject.errors.messages).to have_key(:base)
- expect(subject.errors.messages[:base].first).to match('There is already a repository with that name on disk')
+ fork_of_project
+
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors.messages).to have_key(:base)
+ expect(fork_of_project.errors.messages[:base].first).to match('There is already a repository with that name on disk')
end
context 'when repository disk validation is explicitly skipped' do
let(:params) { super().merge(skip_disk_validation: true) }
it 'allows fork project creation' do
- expect(subject).to be_persisted
- expect(subject.errors.messages).to be_empty
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.errors.messages).to be_empty
end
end
end
- context "CI/CD settings" do
- let(:to_project) { fork_project(@from_project, @to_user, using_service: true) }
+ context 'CI/CD settings' do
+ context 'when origin has git depth specified' do
+ it 'inherits default_git_depth from the origin project' do
+ project.update!(ci_default_git_depth: 42)
- context "when origin has git depth specified" do
- before do
- @from_project.update!(ci_default_git_depth: 42)
- end
-
- it "inherits default_git_depth from the origin project" do
- expect(to_project.ci_default_git_depth).to eq(42)
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.ci_default_git_depth).to eq(42)
end
end
- context "when origin does not define git depth" do
- before do
- @from_project.update!(ci_default_git_depth: nil)
- end
+ context 'when origin does not define git depth' do
+ it 'the fork has git depth set to 0' do
+ project.update!(ci_default_git_depth: nil)
- it "the fork has git depth set to 0" do
- expect(to_project.ci_default_git_depth).to eq(0)
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.ci_default_git_depth).to eq(0)
end
end
end
- context "when project has restricted visibility level" do
- context "and only one visibility level is restricted" do
+ context 'when project has restricted visibility level' do
+ context 'and only one visibility level is restricted' do
before do
- @from_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
end
- it "creates fork with lowest level" do
- forked_project = fork_project(@from_project, @to_user, using_service: true)
-
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ it 'creates fork with lowest level' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
@@ -233,289 +222,284 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE])
end
- it "creates fork with private visibility levels" do
- forked_project = fork_project(@from_project, @to_user, using_service: true)
-
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ it "doesn't create a fork" do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:visibility_level]).to eq ['private has been restricted by your GitLab administrator']
end
end
end
context 'when forking is disabled' do
before do
- @from_project.project_feature.update_attribute(
- :forking_access_level, ProjectFeature::DISABLED)
+ project.project_feature.update_attribute(:forking_access_level, ProjectFeature::DISABLED)
end
- it 'fails' do
- to_project = fork_project(@from_project, @to_user, namespace: @to_user.namespace, using_service: true)
-
- expect(to_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
+ it 'does not create a fork' do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:forked_from_project_id]).to eq(['is forbidden'])
end
end
- end
- describe 'fork to namespace' do
- before do
- @group_owner = create(:user)
- @developer = create(:user)
- @project = create(
- :project, :repository,
- creator_id: @group_owner.id,
- star_count: 777,
- description: 'Wow, such a cool project!',
- ci_config_path: 'debian/salsa-ci.yml'
- )
- @group = create(:group)
- @group.add_member(@group_owner, GroupMember::OWNER)
- @group.add_member(@developer, GroupMember::DEVELOPER)
- @project.add_member(@developer, :developer)
- @project.add_member(@group_owner, :developer)
- @opts = { namespace: @group, using_service: true }
- end
+ context 'when forking to the group namespace' do
+ context 'when user owns a target group' do
+ let_it_be_with_reload(:namespace) { create(:group).tap { |group| group.add_owner(user) } }
+
+ it 'creates a fork in the group' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.first_owner).to eq(user)
+ expect(fork_of_project.namespace).to eq(namespace)
+ end
+
+ context 'when project already exists in group' do
+ it 'fails due to validation, not transaction failure' do
+ existing_project = create(:project, :repository, path: project.path, namespace: namespace)
+ expect(existing_project).to be_persisted
+
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:path]).to eq(['has already been taken'])
+ end
+ end
- context 'fork project for group' do
- it 'group owner successfully forks project into the group' do
- to_project = fork_project(@project, @group_owner, @opts)
+ context 'when the namespace has a lower visibility level than the project' do
+ let_it_be(:namespace) { create(:group, :private).tap { |group| group.add_owner(user) } }
+ let_it_be(:project) { create(:project, :public) }
- expect(to_project).to be_persisted
- expect(to_project.errors).to be_empty
- expect(to_project.first_owner).to eq(@group_owner)
- expect(to_project.namespace).to eq(@group)
- expect(to_project.name).to eq(@project.name)
- expect(to_project.path).to eq(@project.path)
- expect(to_project.description).to eq(@project.description)
- expect(to_project.ci_config_path).to eq(@project.ci_config_path)
- expect(to_project.star_count).to be_zero
+ it 'creates the project with the lower visibility level' do
+ expect(fork_of_project).to be_persisted
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
end
- end
- context 'fork project for group when user not owner' do
- it 'group developer fails to fork project into the group' do
- to_project = fork_project(@project, @developer, @opts)
+ context 'when user is not a group owner' do
+ let_it_be(:namespace) { create(:group).tap { |group| group.add_developer(user) } }
- expect(to_project.errors[:namespace]).to eq(['is not valid'])
+ it 'does not create a fork' do
+ expect(fork_of_project).not_to be_persisted
+ expect(fork_of_project.errors[:namespace]).to eq(['is not valid'])
+ end
end
end
- context 'project already exists in group' do
- it 'fails due to validation, not transaction failure' do
- existing_project = create(:project, :repository, path: @project.path, namespace: @group)
- to_project = fork_project(@project, @group_owner, @opts)
- expect(existing_project.persisted?).to be_truthy
- expect(to_project.errors[:path]).to eq(['has already been taken'])
+ context 'with optional attributes' do
+ let(:params) { super().merge(path: 'forked', name: 'My Fork', description: 'Description', visibility: 'private') }
+
+ it 'sets optional attributes to specified values' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.path).to eq('forked')
+ expect(fork_of_project.name).to eq('My Fork')
+ expect(fork_of_project.description).to eq('Description')
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
- end
- context 'when the namespace has a lower visibility level than the project' do
- it 'creates the project with the lower visibility level' do
- public_project = create(:project, :public)
- private_group = create(:group, :private)
- group_owner = create(:user)
- private_group.add_owner(group_owner)
+ context 'when an unknown visibility is requested' do
+ let_it_be(:project) { create(:project, :public) }
+
+ let(:params) { super().merge(visibility: 'unknown') }
- forked_project = fork_project(public_project, group_owner, namespace: private_group, using_service: true)
+ it 'sets visibility level to private' do
+ expect(fork_of_project).to be_persisted
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
end
- end
- end
- describe 'fork with optional attributes' do
- let(:public_project) { create(:project, :public) }
-
- it 'sets optional attributes to specified values' do
- forked_project = fork_project(
- public_project,
- nil,
- namespace: public_project.namespace,
- path: 'forked',
- name: 'My Fork',
- description: 'Description',
- visibility: 'internal',
- using_service: true
- )
-
- expect(forked_project.path).to eq('forked')
- expect(forked_project.name).to eq('My Fork')
- expect(forked_project.description).to eq('Description')
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
+ context 'when requested visibility level is greater than allowed' do
+ let_it_be(:project) { create(:project, :internal) }
- it 'sets visibility level to private if an unknown visibility is requested' do
- forked_project = fork_project(public_project, nil, using_service: true, visibility: 'unknown')
+ let(:params) { super().merge(visibility: 'public') }
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ it 'sets visibility level to project visibility' do
+ expect(fork_of_project).to be_persisted
- it 'sets visibility level to project visibility level if requested visibility is greater' do
- private_project = create(:project, :private)
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
+ end
+ end
- forked_project = fork_project(private_project, nil, using_service: true, visibility: 'public')
+ context 'when target namespace has lower visibility than a project' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:namespace) { create(:group, :private).tap { |group| group.add_owner(user) } }
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
- end
+ it 'sets visibility level to target namespace visibility level' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
- it 'sets visibility level to target namespace visibility level if requested visibility is greater' do
- private_group = create(:group, :private)
+ context 'when project has custom visibility settings' do
+ let_it_be(:project) { create(:project, :public) }
- forked_project = fork_project(public_project, nil, namespace: private_group, using_service: true, visibility: 'public')
+ let(:attrs) do
+ ProjectFeature::FEATURES.to_h do |f|
+ ["#{f}_access_level", ProjectFeature::PRIVATE]
+ end
+ end
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ before do
+ project.project_feature.update!(attrs)
+ end
+
+ it 'copies project features visibility settings to the fork' do
+ expect(fork_of_project).to be_persisted
+
+ expect(fork_of_project.project_feature.slice(attrs.keys)).to eq(attrs)
+ end
+ end
end
- it 'copies project features visibility settings to the fork', :aggregate_failures do
- attrs = ProjectFeature::FEATURES.to_h do |f|
- ["#{f}_access_level", ProjectFeature::PRIVATE]
+ context 'when a project is already forked' do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:group) { create(:group).tap { |group| group.add_owner(user) } }
+
+ before do
+ # Stub everything required to move a project to a Gitaly shard that does not exist
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+ stub_storage_settings('test_second_storage' => {})
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
+ .and_return(true)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
+ allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
+ .and_return(::Gitlab::Git::SHA1_BLANK_SHA)
+ allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
+ allow(object_pool).to receive(:link)
+ end
end
- public_project.project_feature.update!(attrs)
+ it 'creates a new pool repository after the project is moved to a new shard' do
+ fork_before_move = subject
- user = create(:user, developer_projects: [public_project])
- forked_project = described_class.new(public_project, user).execute
+ storage_move = create(
+ :project_repository_storage_move,
+ :scheduled,
+ container: project,
+ destination_storage_name: 'test_second_storage'
+ )
+ Projects::UpdateRepositoryStorageService.new(storage_move).execute
- expect(forked_project.project_feature.slice(attrs.keys)).to eq(attrs)
- end
- end
- end
+ fork_after_move = described_class.new(project.reload, user, namespace: group).execute
- context 'when a project is already forked' do
- it 'creates a new pool repository after the project is moved to a new shard' do
- project = create(:project, :public, :repository)
- fork_before_move = fork_project(project, nil, using_service: true)
-
- # Stub everything required to move a project to a Gitaly shard that does not exist
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
- stub_storage_settings('test_second_storage' => {})
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
- .and_return(true)
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:replicate)
- allow_any_instance_of(Gitlab::Git::Repository).to receive(:checksum)
- .and_return(::Gitlab::Git::BLANK_SHA)
- allow_next_instance_of(Gitlab::Git::ObjectPool) do |object_pool|
- allow(object_pool).to receive(:link)
+ pool_repository_before_move = PoolRepository.joins(:shard)
+ .find_by(source_project: project, shards: { name: 'default' })
+ pool_repository_after_move = PoolRepository.joins(:shard)
+ .find_by(source_project: project, shards: { name: 'test_second_storage' })
+
+ expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
+ expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
+ end
end
- storage_move = create(
- :project_repository_storage_move,
- :scheduled,
- container: project,
- destination_storage_name: 'test_second_storage'
- )
- Projects::UpdateRepositoryStorageService.new(storage_move).execute
- fork_after_move = fork_project(project.reload, nil, using_service: true)
- pool_repository_before_move = PoolRepository.joins(:shard)
- .find_by(source_project: project, shards: { name: 'default' })
- pool_repository_after_move = PoolRepository.joins(:shard)
- .find_by(source_project: project, shards: { name: 'test_second_storage' })
-
- expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
- expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
- end
- end
+ context 'when forking with object pools' do
+ let_it_be(:project) { create(:project, :public, :repository) }
- context 'when forking with object pools' do
- let(:fork_from_project) { create(:project, :repository, :public) }
- let(:forker) { create(:user) }
+ context 'when no pool exists' do
+ it 'creates a new object pool' do
+ expect { fork_of_project }.to change { PoolRepository.count }.by(1)
- context 'when no pool exists' do
- it 'creates a new object pool' do
- forked_project = fork_project(fork_from_project, forker, using_service: true)
+ expect(fork_of_project.pool_repository).to eq(project.pool_repository)
+ end
- expect(forked_project.pool_repository).to eq(fork_from_project.pool_repository)
- end
- end
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private, :repository) }
+
+ it 'does not create an object pool' do
+ expect { fork_of_project }.not_to change { PoolRepository.count }
- context 'when a pool already exists' do
- let!(:pool_repository) { create(:pool_repository, source_project: fork_from_project) }
+ expect(fork_of_project.pool_repository).to be_nil
+ end
+ end
+ end
- it 'joins the object pool' do
- forked_project = fork_project(fork_from_project, forker, using_service: true)
+ context 'when a pool already exists' do
+ let!(:pool_repository) { create(:pool_repository, source_project: project) }
- expect(forked_project.pool_repository).to eq(fork_from_project.pool_repository)
+ it 'joins the object pool' do
+ expect { fork_of_project }.not_to change { PoolRepository.count }
+
+ expect(fork_of_project.pool_repository).to eq(pool_repository)
+ end
+ end
end
- end
- end
- context 'when linking fork to an existing project' do
- let(:fork_from_project) { create(:project, :public) }
- let(:fork_to_project) { create(:project, :public) }
- let(:user) do
- create(:user).tap { |u| fork_to_project.add_maintainer(u) }
- end
+ context 'when linking fork to an existing project' do
+ let_it_be_with_reload(:unlinked_fork) { create(:project, :public) }
- subject { described_class.new(fork_from_project, user) }
+ before_all do
+ unlinked_fork.add_developer(user)
+ end
- def forked_from_project(project)
- project.fork_network_member&.forked_from_project
- end
+ def forked_from_project(project)
+ project.fork_network_member&.forked_from_project
+ end
- context 'if project is already forked' do
- it 'does not create fork relation' do
- allow(fork_to_project).to receive(:forked?).and_return(true)
- expect(forked_from_project(fork_to_project)).to be_nil
- expect(subject.execute(fork_to_project)).to be_nil
- expect(forked_from_project(fork_to_project)).to be_nil
- end
- end
+ context 'if project is already forked' do
+ it 'does not create fork relation' do
+ allow(unlinked_fork).to receive(:forked?).and_return(true)
- context 'if project is not forked' do
- it 'creates fork relation' do
- expect(fork_to_project.forked?).to be_falsy
- expect(forked_from_project(fork_to_project)).to be_nil
+ expect(forked_from_project(unlinked_fork)).to be_nil
- subject.execute(fork_to_project)
+ expect(service.execute(unlinked_fork)).to be_nil
- fork_to_project.reload
+ expect(forked_from_project(unlinked_fork)).to be_nil
+ end
+ end
- expect(fork_to_project.forked?).to be true
- expect(forked_from_project(fork_to_project)).to eq fork_from_project
- expect(fork_to_project.forked_from_project).to eq fork_from_project
- end
+ context 'if project is not forked' do
+ it 'creates fork relation' do
+ expect(unlinked_fork.forked?).to be_falsy
+ expect(forked_from_project(unlinked_fork)).to be_nil
- it 'flushes the forks count cache of the source project' do
- expect(fork_from_project.forks_count).to be_zero
+ service.execute(unlinked_fork)
- subject.execute(fork_to_project)
- BatchLoader::Executor.clear_current
+ unlinked_fork.reload
- expect(fork_from_project.forks_count).to eq(1)
- end
+ expect(unlinked_fork.forked?).to be true
+ expect(forked_from_project(unlinked_fork)).to eq project
+ expect(unlinked_fork.forked_from_project).to eq project
+ end
+
+ it 'flushes the forks count cache of the source project' do
+ expect(project.forks_count).to be_zero
+
+ service.execute(unlinked_fork)
+ BatchLoader::Executor.clear_current
+
+ expect(project.forks_count).to eq(1)
+ end
- context 'if the fork is not allowed' do
- let(:fork_from_project) { create(:project, :private) }
+ context 'if the fork is not allowed' do
+ let_it_be(:project) { create(:project, :private) }
- it 'does not delete the LFS objects' do
- create(:lfs_objects_project, project: fork_to_project)
+ it 'does not delete the LFS objects' do
+ create(:lfs_objects_project, project: unlinked_fork)
- expect { subject.execute(fork_to_project) }
- .not_to change { fork_to_project.lfs_objects_projects.size }
+ expect { service.execute(unlinked_fork) }
+ .not_to change { unlinked_fork.lfs_objects_projects.size }
+ end
+ end
end
end
end
end
describe '#valid_fork_targets' do
+ subject { service.valid_fork_targets }
+
let(:finder_mock) { instance_double('ForkTargetsFinder', execute: ['finder_return_value']) }
- let(:current_user) { instance_double('User') }
- let(:project) { instance_double('Project') }
before do
- allow(ForkTargetsFinder).to receive(:new).with(project, current_user).and_return(finder_mock)
+ allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
end
it 'returns whatever finder returns' do
- expect(described_class.new(project, current_user).valid_fork_targets).to eq ['finder_return_value']
+ is_expected.to eq ['finder_return_value']
end
end
describe '#valid_fork_branch?' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :small_repo, creator_id: user.id) }
- let_it_be(:branch) { nil }
-
- subject { described_class.new(project, user).valid_fork_branch?(branch) }
+ subject { service.valid_fork_branch?(branch) }
context 'when branch exists' do
let(:branch) { project.default_branch_or_main }
@@ -531,12 +515,11 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
end
describe '#valid_fork_target?' do
- let(:project) { Project.new }
+ subject { service.valid_fork_target? }
+
let(:params) { {} }
context 'when target is not passed' do
- subject { described_class.new(project, user, params).valid_fork_target? }
-
context 'when current user is an admin' do
let(:user) { build(:user, :admin) }
@@ -547,7 +530,6 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
let(:user) { create(:user) }
let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [user.namespace]) }
- let(:project) { create(:project) }
before do
allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
@@ -568,9 +550,9 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management
end
context 'when target is passed' do
- let(:target) { create(:group) }
+ subject { service.valid_fork_target?(target) }
- subject { described_class.new(project, user, params).valid_fork_target?(target) }
+ let(:target) { create(:group) }
context 'when current user is an admin' do
let(:user) { build(:user, :admin) }
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 692f43eb205..167df7996ca 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -8,14 +8,18 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
let_it_be(:project) { create(:project, :public) }
let_it_be(:noteable) { create(:issue, project: project) }
+ let(:params) { {} }
+
before_all do
project.add_developer(user)
+ end
+ before do
stub_feature_flags(disable_all_mention: false)
end
def run_service
- described_class.new(project, user).execute(noteable)
+ described_class.new(project, user, params).execute(noteable)
end
it 'returns results in correct order' do
@@ -39,27 +43,27 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
it 'avoids N+1 UserDetail queries' do
project.add_developer(create(:user))
- control_count = ActiveRecord::QueryRecorder.new { run_service.to_a }.count
+ control = ActiveRecord::QueryRecorder.new { run_service.to_a }
BatchLoader::Executor.clear_current
project.add_developer(create(:user, status: build(:user_status, availability: :busy)))
- expect { run_service.to_a }.not_to exceed_query_limit(control_count)
+ expect { run_service.to_a }.not_to exceed_query_limit(control)
end
it 'avoids N+1 groups queries' do
group_1 = create(:group)
group_1.add_owner(user)
- control_count = ActiveRecord::QueryRecorder.new { run_service }.count
+ control = ActiveRecord::QueryRecorder.new { run_service }
BatchLoader::Executor.clear_current
group_2 = create(:group)
group_2.add_owner(user)
- expect { run_service }.not_to exceed_query_limit(control_count)
+ expect { run_service }.not_to exceed_query_limit(control)
end
end
@@ -129,6 +133,16 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
group_1.full_path, subgroup.full_path, group_2.full_path
])
end
+
+ context 'when search param is given' do
+ let(:params) { { search: 'bb' } }
+
+ it 'only returns matching groups' do
+ expect(group_items.pluck(:username)).to eq([
+ group_1.full_path, subgroup.full_path
+ ])
+ end
+ end
end
end
@@ -229,5 +243,17 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
end
end
end
+
+ context 'when search param is given' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| project.add_guest(u) } }
+ let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| project.add_guest(u) } }
+
+ let(:service) { described_class.new(project, create(:user), search: 'johnd') }
+
+ it 'only returns matching members' do
+ expect(usernames).to eq([member_1.username])
+ end
+ end
end
end
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 2e1a6c03c90..3199614104f 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -70,14 +70,6 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
subject.execute(refresh_statistics: false)
end
- it 'does not refresh project statistics when the feature flag is disabled' do
- stub_feature_flags(refresh_statistics_on_unlink_fork: false)
-
- expect(ProjectCacheWorker).not_to receive(:perform_async)
-
- subject.execute
- end
-
context 'when the original project was deleted' do
it 'does not fail when the original project is deleted' do
source = forked_project.forked_from_project
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index 5311b8daeb1..c90da48af8b 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -17,6 +17,12 @@ RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_
service.execute
end
+
+ it_behaves_like 'does not record an onboarding progress action' do
+ subject do
+ service.execute
+ end
+ end
end
context 'with an existing project' do
@@ -64,5 +70,33 @@ RSpec.describe Projects::UpdateStatisticsService, feature_category: :groups_and_
service.execute
end
end
+
+ context 'with an existing project with project repository' do
+ let_it_be(:project) { create(:project) }
+
+ subject { service.execute }
+
+ context 'when the repository is empty' do
+ it_behaves_like 'does not record an onboarding progress action'
+ end
+
+ context 'when the repository has more than one commit or more than one branch' do
+ where(:commit_count, :branch_count) do
+ 2 | 1
+ 1 | 2
+ 2 | 2
+ end
+
+ with_them do
+ before do
+ allow(project.repository).to receive_messages(commit_count: commit_count, branch_count: branch_count)
+ end
+
+ it_behaves_like 'records an onboarding progress action', :code_added do
+ let(:namespace) { project.namespace }
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/services/push_event_payload_service_spec.rb b/spec/services/push_event_payload_service_spec.rb
index 999b71ff754..ef722fb34e7 100644
--- a/spec/services/push_event_payload_service_spec.rb
+++ b/spec/services/push_event_payload_service_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#commit_from_id' do
it 'returns nil when creating a new ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.commit_from_id).to be_nil
end
@@ -84,7 +84,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#commit_to_id' do
it 'returns nil when removing an existing ref' do
- service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.commit_to_id).to be_nil
end
@@ -156,7 +156,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#create?' do
it 'returns true when creating a new ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.create?).to eq(true)
end
@@ -170,7 +170,7 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#remove?' do
it 'returns true when removing an existing ref' do
- service = described_class.new(event, after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.remove?).to eq(true)
end
@@ -184,13 +184,13 @@ RSpec.describe PushEventPayloadService, feature_category: :source_code_managemen
describe '#action' do
it 'returns :created when creating a ref' do
- service = described_class.new(event, before: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.action).to eq(:created)
end
it 'returns :removed when removing an existing ref' do
- service = described_class.new(event, before: '123', after: Gitlab::Git::BLANK_SHA)
+ service = described_class.new(event, before: '123', after: Gitlab::Git::SHA1_BLANK_SHA)
expect(service.action).to eq(:removed)
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index dc93fd96aee..8de71f2ddf8 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -564,7 +564,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'returns the reaction message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Toggled :100: emoji award.')
+ expect(message).to eq('Toggled :100: emoji reaction.')
end
end
@@ -1911,8 +1911,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:content) { "#{command} :100:" }
let(:issuable) { commit }
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/434446
- it_behaves_like 'failed command', "Could not apply award command."
+ it_behaves_like 'failed command', "Could not apply react command."
end
end
end
@@ -2325,7 +2324,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
- context 'invite_email command' do
+ describe 'invite_email command' do
let_it_be(:issuable) { issue }
it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist." do
@@ -2455,6 +2454,102 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
+ describe 'remove_email command' do
+ let_it_be_with_reload(:issuable) { issue }
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+
+ context 'with existing email participant' do
+ let(:content) { '/remove_email user@example.com' }
+
+ subject(:remove_email) { service.execute(content, issuable) }
+
+ before do
+ issuable.issue_email_participants.create!(email: "user@example.com")
+ end
+
+ it 'returns message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq('Removed user@example.com.')
+ end
+
+ it 'removes 1 participant' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+
+ context 'with mixed case email' do
+ let(:content) { '/remove_email FirstLast@GitLab.com' }
+
+ before do
+ issuable.issue_email_participants.create!(email: "FirstLast@GitLab.com")
+ end
+
+ it 'returns correctly cased message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq('Removed FirstLast@GitLab.com.')
+ end
+
+ it 'removes 1 participant' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+
+ context 'with invalid email' do
+ let(:content) { '/remove_email user@example.com bad_email' }
+
+ it 'only removes valid emails' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+
+ context 'with non-existing email address' do
+ let(:content) { '/remove_email NonExistent@gitlab.com' }
+
+ it 'returns message' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq("No email participants were removed. Either none were provided, or they don't exist.")
+ end
+ end
+
+ context 'with more than the max number of emails' do
+ let(:content) { '/remove_email user@example.com user1@example.com' }
+
+ before do
+ stub_const("IssueEmailParticipants::DestroyService::MAX_NUMBER_OF_EMAILS", 1)
+ # user@example.com has already been added above
+ issuable.issue_email_participants.create!(email: "user1@example.com")
+ end
+
+ it 'only removes the max allowed number of emails' do
+ expect { remove_email }.to change { issue.issue_email_participants.count }.by(-1)
+ end
+ end
+ end
+
+ context 'with non-persisted issue' do
+ let(:issuable) { build(:issue) }
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(issue_email_participants: false)
+ end
+
+ it 'is not part of the available commands' do
+ expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :remove_email))
+ end
+ end
+ end
+
context 'severity command' do
let_it_be_with_reload(:issuable) { create(:incident, project: project) }
@@ -2533,6 +2628,16 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end
end
+ context 'when MR is already merged' do
+ before do
+ merge_request.mark_as_merged!
+ end
+
+ it_behaves_like 'approve command unavailable' do
+ let(:issuable) { merge_request }
+ end
+ end
+
it_behaves_like 'approve command unavailable' do
let(:issuable) { issue }
end
@@ -2574,11 +2679,21 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
expect(merge_request.approved_by_users).to eq([developer])
end
+ end
+
+ context 'when MR is already merged' do
+ before do
+ merge_request.mark_as_merged!
+ end
it_behaves_like 'unapprove command unavailable' do
- let(:issuable) { issue }
+ let(:issuable) { merge_request }
end
end
+
+ it_behaves_like 'unapprove command unavailable' do
+ let(:issuable) { issue }
+ end
end
context 'crm_contact commands' do
@@ -2877,7 +2992,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'includes the emoji' do
_, explanations = service.explain(content, issue)
- expect(explanations).to eq(['Toggles :confetti_ball: emoji award.'])
+ expect(explanations).to eq(['Toggles :confetti_ball: emoji reaction.'])
end
end
@@ -3097,7 +3212,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
it 'keeps quick actions' do
content_result, explanations = service.explain(content, issue, keep_actions: true)
- expect(content_result).to eq("\n/close")
+ expect(content_result).to eq("<p>/close</p>")
expect(explanations).to eq(['Closes this issue.'])
end
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 1b5300672e3..d77a68288a5 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma
RequestStore.clear!
- expect { request.call(sha3) }.not_to exceed_query_limit(control.count)
+ expect { request.call(sha3) }.not_to exceed_query_limit(control)
end
context 'when one of commits does not exist' do
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index 060697cd1df..aab22cb2815 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
it 'removes membership of bot user' do
subject
- expect(resource.reload.users).not_to include(resource_bot)
+ expect(resource.reload).not_to have_user(resource_bot)
end
it 'initiates user removal' do
@@ -56,7 +56,7 @@ RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_ac
it 'does not remove bot from member list' do
subject
- expect(resource.reload.users).to include(resource_bot)
+ expect(resource.reload).to have_user(resource_bot)
end
it 'does not transfer issuables of bot user to ghost user' do
diff --git a/spec/services/routes/rename_descendants_service_spec.rb b/spec/services/routes/rename_descendants_service_spec.rb
new file mode 100644
index 00000000000..72e43ddca26
--- /dev/null
+++ b/spec/services/routes/rename_descendants_service_spec.rb
@@ -0,0 +1,208 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Routes::RenameDescendantsService, feature_category: :groups_and_projects do
+ let_it_be(:parent_group) { create(:group, name: 'old-name', path: 'old-path') }
+ let_it_be(:parent_route) { parent_group.route }
+ let_it_be(:subgroups) { create_list(:group, 4, parent: parent_group) }
+ let_it_be(:subgroup_projects) { subgroups.map { |subgroup| create(:project, group: subgroup) } }
+
+ let(:subgroup_routes) { Route.for_routable(subgroups) }
+ let(:subgroup_projects_routes) { Route.for_routable(subgroup_projects) }
+
+ let(:subgroup_routes_with_old_path) { subgroup_routes.where('path LIKE ?', '%old-path%') }
+ let(:subgroup_projects_routes_with_old_path) { subgroup_projects_routes.where('path LIKE ?', '%old-path%') }
+ let(:subgroup_routes_with_new_path) { subgroup_routes.where('path LIKE ?', '%new-path%') }
+ let(:subgroup_projects_routes_with_new_path) { subgroup_projects_routes.where('path LIKE ?', '%new-path%') }
+
+ let(:subgroup_routes_with_old_name) { subgroup_routes.where('name LIKE ?', '%old-name%') }
+ let(:subgroup_projects_routes_with_old_name) { subgroup_projects_routes.where('name LIKE ?', '%old-name%') }
+ let(:subgroup_routes_with_new_name) { subgroup_routes.where('name LIKE ?', '%new-name%') }
+ let(:subgroup_projects_routes_with_new_name) { subgroup_projects_routes.where('name LIKE ?', '%new-name%') }
+
+ describe '#execute' do
+ shared_examples_for 'descendant paths are updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_path.size
+ }.from(4).to(0).and change {
+ subgroup_projects_routes_with_old_path.size
+ }.from(4).to(0).and change {
+ subgroup_routes_with_new_path.size
+ }.from(0).to(4).and change {
+ subgroup_projects_routes_with_new_path.size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'descendant paths are not updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_path.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_old_path.size
+ }.by(0).and change {
+ subgroup_routes_with_new_path.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_new_path.size
+ }.by(0)
+ end
+ end
+
+ shared_examples_for 'descendant names are updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_name.size
+ }.from(4).to(0).and change {
+ subgroup_projects_routes_with_old_name.size
+ }.from(4).to(0).and change {
+ subgroup_routes_with_new_name.size
+ }.from(0).to(4).and change {
+ subgroup_projects_routes_with_new_name.size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'descendant names are not updated' do
+ it do
+ expect { execute }.to change {
+ subgroup_routes_with_old_name.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_old_name.size
+ }.by(0).and change {
+ subgroup_routes_with_new_name.size
+ }.by(0).and change {
+ subgroup_projects_routes_with_new_name.size
+ }.by(0)
+ end
+ end
+
+ shared_examples_for 'creates redirect_routes for all descendants' do
+ let(:subgroup_redirect_routes) { RedirectRoute.where(source: subgroups) }
+ let(:subgroup_projects_redirect_routes) { RedirectRoute.where(source: subgroup_projects) }
+
+ it do
+ expect { execute }.to change {
+ subgroup_redirect_routes.where('path LIKE ?', '%old-path%').size
+ }.from(0).to(4).and change {
+ subgroup_projects_redirect_routes.where('path LIKE ?', '%old-path%').size
+ }.from(0).to(4)
+ end
+ end
+
+ shared_examples_for 'does not create any redirect_routes' do
+ it do
+ expect { execute }.not_to change { RedirectRoute.count }
+ end
+ end
+
+ subject(:execute) do
+ described_class.new(parent_route).execute(changes)
+ end
+
+ before do
+ parent_route.name = 'new-name'
+ parent_route.path = 'new-path'
+ end
+
+ context 'on updating both name and path' do
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are updated'
+ it_behaves_like 'descendant names are updated'
+ it_behaves_like 'creates redirect_routes for all descendants'
+ end
+
+ context 'on updating only path' do
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: false, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are updated'
+ it_behaves_like 'descendant names are not updated'
+ it_behaves_like 'creates redirect_routes for all descendants'
+ end
+
+ context 'on updating only name' do
+ let!(:changes) do
+ {
+ path: { saved: false, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are not updated'
+ it_behaves_like 'descendant names are updated'
+ it_behaves_like 'does not create any redirect_routes'
+ end
+
+ context 'on not updating both path and name' do
+ let!(:changes) do
+ {
+ path: { saved: false, old_value: 'old-path' },
+ name: { saved: false, old_value: 'old-name' }
+ }
+ end
+
+ it_behaves_like 'descendant paths are not updated'
+ it_behaves_like 'descendant names are not updated'
+ it_behaves_like 'does not create any redirect_routes'
+ end
+
+ context 'when `changes` are not in the expected format' do
+ let!(:changes) do
+ {
+ not_path: { saved: false, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it 'errors out' do
+ expect { execute }.to raise_error(KeyError)
+ end
+ end
+
+ context 'for batching' do
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ let!(:changes) do
+ {
+ path: { saved: true, old_value: 'old-path' },
+ name: { saved: true, old_value: 'old-name' }
+ }
+ end
+
+ it 'bulk updates and bulk inserts records in batches' do
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ execute
+ end
+
+ # There are 8 descendants to this group.
+ # 4 subgroups, and 1 project each in each subgroup == total of 8.
+ # With a batch size of 2, that is
+ # 4 queries to update `routes` and 4 queries to insert `redirect_routes`
+ update_routes_queries = query_recorder.log.grep(
+ /INSERT INTO "routes" .* ON CONFLICT \("id"\) DO UPDATE SET/
+ )
+
+ insert_redirect_routes_queries = query_recorder.log.grep(
+ /INSERT INTO "redirect_routes" .* ON CONFLICT \(lower\(\(path\)::text\) varchar_pattern_ops\) DO NOTHING/
+ )
+
+ expect(update_routes_queries.count).to eq(4)
+ expect(insert_redirect_routes_queries.count).to eq(4)
+ end
+ end
+ end
+end
diff --git a/spec/services/security/merge_reports_service_spec.rb b/spec/services/security/merge_reports_service_spec.rb
index c141bbe5b5a..a65e73bd8ce 100644
--- a/spec/services/security/merge_reports_service_spec.rb
+++ b/spec/services/security/merge_reports_service_spec.rb
@@ -20,7 +20,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
- severity: :low
+ severity: :low,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94610'
)
end
@@ -29,7 +30,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_1_primary, identifier_1_cve],
scanner: scanner_1,
- severity: :low
+ severity: :low,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94611'
)
end
@@ -39,7 +41,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94614'
)
end
@@ -49,7 +52,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 32, end_line: 34),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94613'
)
end
@@ -59,7 +63,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
identifiers: [identifier_2_primary, identifier_2_cve],
location: build(:ci_reports_security_locations_sast, start_line: 42, end_line: 44),
scanner: scanner_2,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94612'
)
end
@@ -68,7 +73,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_3,
- severity: :high
+ severity: :high,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94615'
)
end
@@ -77,7 +83,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_cwe],
scanner: scanner_1,
- severity: :critical
+ severity: :critical,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94616'
)
end
@@ -86,7 +93,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_1,
- severity: :medium
+ severity: :medium,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94617'
)
end
@@ -95,7 +103,8 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
:ci_reports_security_finding,
identifiers: [identifier_wasc],
scanner: scanner_2,
- severity: :critical
+ severity: :critical,
+ uuid: '61eb8e3e-3be1-4d6c-ba26-4e0dd4f94618'
)
end
@@ -226,9 +235,32 @@ RSpec.describe Security::MergeReportsService, '#execute', feature_category: :cod
let(:identifier_cve) { build(:ci_reports_security_identifier, external_id: 'CVE-2019-123', external_type: 'cve') }
let(:identifier_semgrep) { build(:ci_reports_security_identifier, external_id: 'rules.bandit.B105', external_type: 'semgrep_id') }
- let(:finding_id_1) { build(:ci_reports_security_finding, identifiers: [identifier_bandit, identifier_cve], scanner: bandit_scanner, report_type: :sast) }
- let(:finding_id_2) { build(:ci_reports_security_finding, identifiers: [identifier_cve], scanner: semgrep_scanner, report_type: :sast) }
- let(:finding_id_3) { build(:ci_reports_security_finding, identifiers: [identifier_semgrep], scanner: semgrep_scanner, report_type: :sast) }
+ let(:finding_id_1) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_bandit, identifier_cve],
+ scanner: bandit_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5020')
+ end
+
+ let(:finding_id_2) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_cve],
+ scanner: semgrep_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5021')
+ end
+
+ let(:finding_id_3) do
+ build(
+ :ci_reports_security_finding,
+ identifiers: [identifier_semgrep],
+ scanner: semgrep_scanner,
+ report_type: :sast,
+ uuid: '21ab978a-7052-5428-af0b-c7a4b3fe5022')
+ end
let(:bandit_report) do
build(:ci_reports_security_report,
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 361742699b0..8669bca90bd 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -263,11 +263,10 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
end
context 'if the endpoint is accessible' do
- let(:user_scores) { Abuse::UserTrustScore.new(user) }
-
before do
allow(service).to receive(:spamcheck_client).and_return(spam_client)
allow(spam_client).to receive(:spam?).and_return(spam_client_result)
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
end
context 'if the result is a NOOP verdict' do
@@ -275,8 +274,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
let(:verdict_value) { ::Spamcheck::SpamVerdict::Verdict::NOOP }
it 'returns the verdict' do
+ expect(Abuse::TrustScoreWorker).not_to receive(:perform_async)
is_expected.to eq(NOOP)
- expect(user_scores.spam_score).to eq(0.0)
end
end
@@ -286,8 +285,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
context 'the result was evaluated' do
it 'returns the verdict and updates the spam score' do
+ expect(Abuse::TrustScoreWorker).to receive(:perform_async).once.with(user.id, :spamcheck, instance_of(Float), 'cid')
is_expected.to eq(ALLOW)
- expect(user_scores.spam_score).to be_within(0.000001).of(verdict_score)
end
end
@@ -295,8 +294,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
let(:verdict_evaluated) { false }
it 'returns the verdict and does not update the spam score' do
+ expect(Abuse::TrustScoreWorker).not_to receive(:perform_async)
expect(subject).to eq(ALLOW)
- expect(user_scores.spam_score).to eq(0.0)
end
end
end
@@ -317,8 +316,8 @@ RSpec.describe Spam::SpamVerdictService, feature_category: :instance_resiliency
with_them do
it "returns expected spam constant and updates the spam score" do
+ expect(Abuse::TrustScoreWorker).to receive(:perform_async).once.with(user.id, :spamcheck, instance_of(Float), 'cid')
is_expected.to eq(expected)
- expect(user_scores.spam_score).to be_within(0.000001).of(verdict_score)
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 1eb11c80264..2e5545b610a 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -77,6 +77,18 @@ RSpec.describe SystemNoteService, feature_category: :shared do
end
end
+ describe '.request_review' do
+ let(:reviewer) { double }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:request_review).with(reviewer)
+ end
+
+ described_class.request_review(noteable, project, author, reviewer)
+ end
+ end
+
describe '.change_issuable_contacts' do
let(:added_count) { 5 }
let(:removed_count) { 3 }
@@ -515,6 +527,18 @@ RSpec.describe SystemNoteService, feature_category: :shared do
end
end
+ describe '.email_participants' do
+ let(:body) { 'added user@example.com' }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:email_participants).with(body)
+ end
+
+ described_class.email_participants(noteable, project, author, body)
+ end
+ end
+
describe '.discussion_lock' do
let(:issuable) { double }
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 0ba20ee5be1..2b48b24b2b4 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -213,6 +213,21 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
end
+ describe '#request_review' do
+ subject(:request_review) { service.request_review(reviewer) }
+
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:noteable) { create(:merge_request, :simple, source_project: project, reviewers: [reviewer]) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'reviewer' }
+ end
+
+ it 'builds a correct phrase when a reviewer has been requested from a reviewer' do
+ expect(request_review.note).to eq "requested review from #{reviewer.to_reference}"
+ end
+ end
+
describe '#change_issuable_contacts' do
subject { service.change_issuable_contacts(1, 1) }
@@ -770,6 +785,14 @@ RSpec.describe ::SystemNotes::IssuablesService, feature_category: :team_planning
end
end
+ describe '#email_participants' do
+ let(:body) { "added user@example.com" }
+
+ subject(:system_note) { service.email_participants(body) }
+
+ it { expect(system_note.note).to eq(body) }
+ end
+
describe '#discussion_lock' do
subject { service.discussion_lock }
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index cf994220946..6502aa5d2a2 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -198,11 +198,13 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
let(:action) { 'time_tracking' }
end
- context 'with a time estimate' do
- it 'sets the note text' do
+ context 'when adding a time estimate' do
+ before do
noteable.update_attribute(:time_estimate, 277200)
+ end
- expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ it 'sets the note text' do
+ expect(subject.note).to eq "added time estimate of 1w 4d 5h"
end
context 'when time_tracking_limit_to_hours setting is true' do
@@ -211,16 +213,32 @@ RSpec.describe ::SystemNotes::TimeTrackingService, feature_category: :team_plann
end
it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
-
- expect(subject.note).to eq "changed time estimate to 77h"
+ expect(subject.note).to eq "added time estimate of 77h"
end
end
end
- context 'without a time estimate' do
+ context 'when removing a time estimate' do
+ before do
+ noteable.update_attribute(:time_estimate, 277200)
+ noteable.save!
+ noteable.update_attribute(:time_estimate, 0)
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed time estimate of 1w 4d 5h"
+ end
+ end
+
+ context 'when changing a time estimate' do
+ before do
+ noteable.update_attribute(:time_estimate, 277200)
+ noteable.save!
+ noteable.update_attribute(:time_estimate, 3600)
+ end
+
it 'sets the note text' do
- expect(subject.note).to eq "removed time estimate"
+ expect(subject.note).to eq "changed time estimate to 1h from 1w 4d 5h"
end
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 0b4cf9e53db..df00859fd52 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -675,6 +675,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
service.mark_todo(unassigned_issue, author)
should_create_todo(user: author, target: unassigned_issue, action: Todo::MARKED)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
context 'when issue belongs to a group' do
@@ -690,6 +692,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
project: nil,
group: group
)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
end
end
@@ -748,10 +752,13 @@ RSpec.describe TodoService, feature_category: :team_planning do
end
describe 'Work Items' do
- let_it_be(:work_item) { create(:work_item, :task, project: project, author: author) }
+ let(:work_item) { create(:work_item, :objective, project: project, author: author) }
+ let(:activity_counter_class) { Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter }
describe '#mark_todo' do
it 'creates a todo from a work item' do
+ expect(activity_counter_class).to receive(:track_work_item_mark_todo_action).with(author: author)
+
service.mark_todo(work_item, author)
should_create_todo(user: author, target: work_item, action: Todo::MARKED)
@@ -760,6 +767,9 @@ RSpec.describe TodoService, feature_category: :team_planning do
context 'when work item belongs to a group' do
it 'creates a todo from a work item' do
group_work_item = create(:work_item, :group_level, namespace: group)
+
+ expect(activity_counter_class).to receive(:track_work_item_mark_todo_action).with(author: group_work_item.author)
+
service.mark_todo(group_work_item, group_work_item.author)
should_create_todo(
@@ -1120,6 +1130,8 @@ RSpec.describe TodoService, feature_category: :team_planning do
service.mark_todo(unassigned_mr, author)
should_create_todo(user: author, target: unassigned_mr, action: Todo::MARKED)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter)
+ .not_to receive(:track_work_item_todo_marked_action)
end
end
@@ -1264,9 +1276,9 @@ RSpec.describe TodoService, feature_category: :team_planning do
# Excluding queries for user permissions because those do execute N+1 queries
allow_any_instance_of(User).to receive(:can?).and_return(true)
- control_count = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }.count
+ control = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }
- expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control_count)
+ expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
index 63ff189ede5..cccf1a2cfa8 100644
--- a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
+++ b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team
let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: user) }
it 'deletes todos for specified target ID and type' do
- control_count = ActiveRecord::QueryRecorder.new { subject }.count
+ control = ActiveRecord::QueryRecorder.new { subject }
# Create more todos for the target
create(:todo, :pending, project: target.project, target: target, user: user)
@@ -22,7 +22,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team
create(:todo, :done, project: target.project, target: target, user: user)
create(:todo, :done, project: target.project, target: target, user: user)
- expect { subject }.not_to exceed_query_limit(control_count)
+ expect { subject }.not_to exceed_query_limit(control)
end
it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index a50bd3ee2f1..8236d892072 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access
it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do
# Run this once to establish a baseline
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
service.execute
end
@@ -87,7 +87,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access
.with([[1], [2], [3], [4], [5]])
.and_return(10)
- expect { service.execute }.not_to exceed_all_query_limit(control_count.count)
+ expect { service.execute }.not_to exceed_all_query_limit(control)
end
end
end
diff --git a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
index 57378c07dd7..522b793036b 100644
--- a/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+++ b/spec/services/users/migrate_records_to_ghost_user_service_spec.rb
@@ -150,6 +150,16 @@ RSpec.describe Users::MigrateRecordsToGhostUserService, feature_category: :user_
let(:created_record) { create(:user_achievement, awarded_by_user: user, revoked_by_user: user) }
end
end
+
+ context 'when user is a bot user and has associated access tokens' do
+ let_it_be(:user) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+
+ it "deletes the access token" do
+ service.execute
+ expect(PersonalAccessToken.find_by(id: token.id)).to eq nil
+ end
+ end
end
context 'on post-migrate cleanups' do
diff --git a/spec/services/users/update_todo_count_cache_service_spec.rb b/spec/services/users/update_todo_count_cache_service_spec.rb
index eec637cf5b4..d69a4ba99b7 100644
--- a/spec/services/users/update_todo_count_cache_service_spec.rb
+++ b/spec/services/users/update_todo_count_cache_service_spec.rb
@@ -44,9 +44,9 @@ RSpec.describe Users::UpdateTodoCountCacheService, feature_category: :team_plann
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { execute_single }.count
+ control = ActiveRecord::QueryRecorder.new { execute_single }
- expect { execute_all }.not_to exceed_query_limit(control_count)
+ expect { execute_all }.not_to exceed_query_limit(control)
end
it 'executes one query per batch of users' do
diff --git a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb b/spec/services/work_items/callbacks/assignees_spec.rb
index 66e30e2f882..e6f57c54104 100644
--- a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/assignees_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::Assignees, :freeze_time, feature_category: :portfolio_management do
let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:new_assignee) { create(:user) }
@@ -11,7 +11,6 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
create(:work_item, project: project, updated_at: 1.day.ago)
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Assignees) } }
let(:current_user) { reporter }
let(:params) { { assignee_ids: [new_assignee.id] } }
@@ -20,13 +19,13 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
project.add_guest(new_assignee)
end
- describe '#before_update_in_transaction' do
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ describe '#before_update' do
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
- subject { service.before_update_in_transaction(params: params) }
+ subject(:before_update_callback) { service.before_update }
it 'updates the assignees and sets updated_at to the current time' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -40,7 +39,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'removes existing assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -51,7 +50,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:current_user) { create(:user) }
it 'does not update the assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -67,7 +66,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'sets all the given assignees' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id, reporter.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -80,7 +79,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'only sets the first assignee' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(Time.current)
@@ -92,7 +91,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:params) { { assignee_ids: [create(:user).id] } }
it 'does not set the assignee' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -105,7 +104,7 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
end
it 'does not touch updated_at' do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
expect(work_item.updated_at).to be_like_time(1.day.ago)
@@ -116,12 +115,12 @@ RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.assignee_ids = [new_assignee.id]
end
it "resets the work item's assignees" do
- subject
+ before_update_callback
expect(work_item.assignee_ids).to be_empty
end
diff --git a/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb b/spec/services/work_items/callbacks/current_user_todos_spec.rb
index aa7257e9e62..0f16687e620 100644
--- a/spec/services/work_items/widgets/current_user_todos_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/current_user_todos_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, feature_category: :team_planning do
+RSpec.describe WorkItems::Callbacks::CurrentUserTodos, feature_category: :team_planning do
let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:current_user) { reporter }
@@ -25,16 +25,16 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
create(:todo, state: :pending, target: work_item, target_type: work_item.class.name, user: create(:user))
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::CurrentUserTodos) } }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::CurrentUserTodos) } }
before_all do
project.add_reporter(reporter)
end
describe '#before_update_in_transaction' do
- subject do
- described_class.new(widget: widget, current_user: current_user)
- .before_update_in_transaction(params: params)
+ subject(:service) do
+ described_class.new(issuable: work_item, current_user: current_user, params: params)
+ .before_update
end
context 'when adding a todo' do
@@ -44,7 +44,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:current_user) { create(:user) }
it 'does add a todo' do
- expect { subject }.not_to change { Todo.count }
+ expect { service }.not_to change { Todo.count }
end
end
@@ -52,7 +52,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:params) { { action: "add" } }
it 'creates a new todo for the user and the work item' do
- expect { subject }.to change { current_user.todos.count }.by(1)
+ expect { service }.to change { current_user.todos.count }.by(1)
todo = current_user.todos.last
@@ -69,7 +69,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:current_user) { create(:user) }
it 'does not change todo status' do
- subject
+ service
expect(pending_todo1.reload).to be_pending
expect(pending_todo2.reload).to be_pending
@@ -80,7 +80,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
context 'when resolving all todos of the work item', :aggregate_failures do
it 'resolves todos of the user for the work item' do
- subject
+ service
expect(pending_todo1.reload).to be_done
expect(pending_todo2.reload).to be_done
@@ -93,7 +93,7 @@ RSpec.describe WorkItems::Widgets::CurrentUserTodosService::UpdateService, featu
let(:params) { { action: "mark_as_done", todo_id: pending_todo1.id } }
it 'resolves todos of the user for the work item' do
- subject
+ service
expect(pending_todo1.reload).to be_done
expect(pending_todo2.reload).to be_pending
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/callbacks/description_spec.rb
index 84704d3e002..27413c9ab14 100644
--- a/spec/services/work_items/widgets/description_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/description_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::Description, feature_category: :portfolio_management do
let_it_be(:random_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -22,12 +22,10 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
)
end
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Description) } }
-
describe '#update' do
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
- subject(:before_update_callback) { service.before_update_callback(params: params) }
+ subject(:before_update_callback) { service.before_update }
shared_examples 'sets work item description' do
it 'correctly sets work item description value' do
@@ -59,7 +57,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project reporter' do
let(:current_user) { reporter }
- before do
+ before_all do
project.add_reporter(reporter)
end
@@ -91,7 +89,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.update!(description: 'test')
end
@@ -108,7 +106,7 @@ RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService, feature_ca
context 'when user is a project guest' do
let(:current_user) { guest }
- before do
+ before_all do
project.add_guest(guest)
end
diff --git a/spec/services/work_items/widgets/notifications_service/update_service_spec.rb b/spec/services/work_items/callbacks/notifications_spec.rb
index 9615020fe49..2d11dc46fcb 100644
--- a/spec/services/work_items/widgets/notifications_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/notifications_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::NotificationsService::UpdateService, feature_category: :team_planning do
+RSpec.describe WorkItems::Callbacks::Notifications, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :private, group: group) }
let_it_be(:guest) { create(:user).tap { |u| project.add_guest(u) } }
@@ -10,13 +10,13 @@ RSpec.describe WorkItems::Widgets::NotificationsService::UpdateService, feature_
let_it_be_with_reload(:work_item) { create(:work_item, project: project, author: author) }
let_it_be(:current_user) { guest }
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Notifications) } }
- let(:service) { described_class.new(widget: widget, current_user: current_user) }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::Notifications) } }
+ let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
describe '#before_update_in_transaction' do
let(:expected) { params[:subscribed] }
- subject(:update_notifications) { service.before_update_in_transaction(params: params) }
+ subject(:update_notifications) { service.before_update }
shared_examples 'failing to update subscription' do
context 'when user is subscribed with a subscription record' do
diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/callbacks/start_and_due_date_spec.rb
index f9708afd313..b26a33976fa 100644
--- a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
+++ b/spec/services/work_items/callbacks/start_and_due_date_spec.rb
@@ -2,19 +2,19 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, feature_category: :portfolio_management do
+RSpec.describe WorkItems::Callbacks::StartAndDueDate, feature_category: :portfolio_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
- let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::StartAndDueDate) } }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Callbacks::StartAndDueDate) } }
describe '#before_update_callback' do
let(:start_date) { Date.today }
let(:due_date) { 1.week.from_now.to_date }
- let(:service) { described_class.new(widget: widget, current_user: user) }
+ let(:service) { described_class.new(issuable: work_item, current_user: user, params: params) }
- subject(:update_params) { service.before_update_callback(params: params) }
+ subject(:update_params) { service.before_update }
context 'when start and due date params are present' do
let(:params) { { start_date: Date.today, due_date: 1.week.from_now.to_date } }
@@ -22,8 +22,8 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
it 'correctly sets date values' do
expect do
update_params
- end.to change(work_item, :start_date).from(nil).to(start_date).and(
- change(work_item, :due_date).from(nil).to(due_date)
+ end.to change { work_item.start_date }.from(nil).to(start_date).and(
+ change { work_item.due_date }.from(nil).to(due_date)
)
end
@@ -59,7 +59,7 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
it 'sets only one date to null' do
expect do
update_params
- end.to change(work_item, :start_date).from(start_date).to(nil).and(
+ end.to change { work_item.start_date }.from(start_date).to(nil).and(
not_change(work_item, :due_date).from(due_date)
)
end
@@ -70,15 +70,15 @@ RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService, featur
let(:params) { {} }
before do
- allow(service).to receive(:new_type_excludes_widget?).and_return(true)
+ allow(service).to receive(:excluded_in_new_type?).and_return(true)
work_item.update!(start_date: start_date, due_date: due_date)
end
it 'sets both dates to null' do
expect do
update_params
- end.to change(work_item, :start_date).from(start_date).to(nil).and(
- change(work_item, :due_date).from(due_date).to(nil)
+ end.to change { work_item.start_date }.from(start_date).to(nil).and(
+ change { work_item.due_date }.from(due_date).to(nil)
)
end
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index 557617f61bb..591dc1c1034 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -191,14 +191,14 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
let(:supported_widgets) do
[
- { klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :before_update_callback, params: { description: 'foo' } },
+ { klass: WorkItems::Callbacks::Description, callback: :before_update },
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
]
end
end
context 'when updating widgets' do
- let(:widget_service_class) { WorkItems::Widgets::DescriptionService::UpdateService }
+ let(:widget_service_class) { WorkItems::Callbacks::Description }
let(:widget_params) { { description_widget: { description: 'changed' } } }
context 'when widget service is not present' do
@@ -215,8 +215,8 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
before do
allow_next_instance_of(widget_service_class) do |instance|
allow(instance)
- .to receive(:before_update_callback)
- .with(params: { description: 'changed' }).and_return(nil)
+ .to receive(:before_update)
+ .and_return(nil)
end
end
@@ -269,7 +269,10 @@ RSpec.describe WorkItems::UpdateService, feature_category: :team_planning do
expect(service).to receive(:update).and_call_original
expect(service).not_to receive(:execute_widgets).with(callback: :update, widget_params: widget_params)
- expect { update_work_item }.not_to change(work_item, :description)
+ expect do
+ update_work_item
+ work_item.reload
+ end.not_to change(work_item, :description)
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 7317b512ae4..3dfc27b8511 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -203,6 +203,8 @@ RSpec.configure do |config|
config.include PendingDirectUploadHelpers, :direct_uploads
config.include LabelsHelper, type: :feature
config.include UnlockPipelinesHelpers, :unlock_pipelines
+ config.include UserWithNamespaceShim
+ config.include OrphanFinalArtifactsCleanupHelpers, :orphan_final_artifacts_cleanup
config.include_context 'when rendered has no HTML escapes', type: :view
@@ -320,9 +322,6 @@ RSpec.configure do |config|
# Postgres is the primary data source, and ClickHouse only when enabled in certain cases.
stub_feature_flags(clickhouse_data_collection: false)
-
- # This is going to be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/431041
- stub_feature_flags(vite: false)
else
unstub_all_feature_flags
end
@@ -423,7 +422,7 @@ RSpec.configure do |config|
config.after do
Fog.unmock! if Fog.mock?
- Gitlab::CurrentSettings.clear_in_memory_application_settings!
+ Gitlab::ApplicationSettingFetcher.clear_in_memory_application_settings!
# Reset all feature flag stubs to default for testing
stub_all_feature_flags
diff --git a/spec/support/before_all_adapter.rb b/spec/support/before_all_adapter.rb
deleted file mode 100644
index 35846fcecb8..00000000000
--- a/spec/support/before_all_adapter.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-module TestProfBeforeAllAdapter
- module MultipleDatabaseAdapter
- def self.all_connection_classes
- @all_connection_classes ||= [ActiveRecord::Base] + ActiveRecord::Base.descendants.select(&:connection_class?) # rubocop: disable Database/MultipleDatabases
- end
-
- def self.begin_transaction
- self.all_connection_classes.each do |connection_class|
- connection_class.connection.begin_transaction(joinable: false)
- end
- end
-
- def self.rollback_transaction
- self.all_connection_classes.each do |connection_class|
- if connection_class.connection.open_transactions.zero?
- warn "!!! before_all transaction has been already rollbacked and " \
- "could work incorrectly"
- next
- end
-
- connection_class.connection.rollback_transaction
- end
- end
- end
-
- def self.default_adapter
- MultipleDatabaseAdapter
- end
-end
-
-TestProf::BeforeAll.adapter = ::TestProfBeforeAllAdapter.default_adapter
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
index 02572d011f7..948bc21dafa 100644
--- a/spec/support/database/prevent_cross_database_modification.rb
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -68,4 +68,11 @@ RSpec.configure do |config|
::ApplicationRecord.gitlab_transactions_stack.clear
end
+
+ config.before(:suite) do
+ ActiveSupport::Notifications.subscribe("factory_bot.run_factory") do |_name, _start, _finish, _id, payload|
+ strategy = payload[:strategy]
+ Thread.current[:factory_bot_objects] -= 1 if strategy == :create
+ end
+ end
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 0a1d68a744c..b6539d6215b 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -2,7 +2,7 @@
module DbCleaner
def all_connection_classes
- ::TestProfBeforeAllAdapter::MultipleDatabaseAdapter.all_connection_classes
+ ::TestProf::BeforeAll::Adapters::ActiveRecord.all_connections.map(&:connection_class).uniq
end
def delete_from_all_tables!(except: [])
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index d30098a5cc0..00aa54990ef 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -4,13 +4,14 @@ FactoryBot.define do
after(:build) do |object, _|
next unless object.respond_to?(:factory_bot_built=)
+ # This will help the PreventCrossDatabaseModification to temporarily
+ # allow the object table when it's saved later.
object.factory_bot_built = true
end
- before(:create) do |object, _|
- next unless object.respond_to?(:factory_bot_built=)
-
- object.factory_bot_built = false
+ before(:create) do |_object, _|
+ Thread.current[:factory_bot_objects] ||= 0
+ Thread.current[:factory_bot_objects] += 1
end
end
diff --git a/spec/support/finder_collection.rb b/spec/support/finder_collection.rb
index 494dd4bdca1..93363943449 100644
--- a/spec/support/finder_collection.rb
+++ b/spec/support/finder_collection.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'set'
+require 'set' # rubocop:disable Lint/RedundantRequireStatement -- Ruby 3.1 and earlier needs this. Drop this line after Ruby 3.2+ is only supported.
module Support
# Ensure that finders' `execute` method always returns
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index e60cc4278af..37c9da4e4a8 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -63,6 +63,7 @@
- Security::PipelineVulnerabilitiesFinder
- Security::ScanExecutionPoliciesFinder
- Security::ScanResultPoliciesFinder
+- Security::SecurityPoliciesFinder
- SentryIssueFinder
- ServerlessDomainFinder
- TagsFinder
diff --git a/spec/support/formatters/json_formatter.rb b/spec/support/formatters/json_formatter.rb
index a54004b3024..398ff0187a1 100644
--- a/spec/support/formatters/json_formatter.rb
+++ b/spec/support/formatters/json_formatter.rb
@@ -89,7 +89,13 @@ module Support
[metadata[:file_path], metadata[:line_number]]
else
# If there are nested shared examples, the outermost location is last in the array
- metadata[:shared_group_inclusion_backtrace].last.formatted_inclusion_location.split(':')
+ (
+ metadata[:shared_group_inclusion_backtrace].last.formatted_inclusion_location ||
+ # RSpec ignores some paths by default, e.g. bin/, which result in the above being nil.
+ # Source: https://github.com/rspec/rspec-core/blob/v3.12.2/lib/rspec/core/backtrace_formatter.rb#L11
+ # In that case, we fallback to use the raw `#inclusion_location`.
+ metadata[:shared_group_inclusion_backtrace].last.inclusion_location
+ ).split(':')
end
end
diff --git a/spec/support/helpers/admin_mode_helpers.rb b/spec/support/helpers/admin_mode_helpers.rb
index a6e31791127..8b71552f928 100644
--- a/spec/support/helpers/admin_mode_helpers.rb
+++ b/spec/support/helpers/admin_mode_helpers.rb
@@ -5,17 +5,30 @@
module AdminModeHelper
# Administrators are logged in by default in user mode and have to switch to admin
# mode for accessing any administrative functionality. This helper lets a user
- # be in admin mode without requiring a second authentication step (provided
- # the user is an admin)
+ # access the admin area in two different ways:
+ #
+ # * Fast (use_ui: false) and suitable form the most use cases: fakes calls and grants
+ # access to the admin area without requiring a second authentication step (provided the
+ # user is an admin)
+ # * Slow (use_ui: true): visits the admin UI and enters the users password. A second
+ # authentication step may be needed.
#
# See also tag :enable_admin_mode in spec/spec_helper.rb for a spec-wide
# alternative
- def enable_admin_mode!(user)
- fake_user_mode = instance_double(Gitlab::Auth::CurrentUserMode)
+ def enable_admin_mode!(user, use_ui: false)
+ if use_ui
+ visit new_admin_session_path
+ fill_in 'user_password', with: user.password
+ click_button 'Enter admin mode'
+
+ wait_for_requests
+ else
+ fake_user_mode = instance_double(Gitlab::Auth::CurrentUserMode)
- allow(Gitlab::Auth::CurrentUserMode).to receive(:new).and_call_original
+ allow(Gitlab::Auth::CurrentUserMode).to receive(:new).and_call_original
- allow(Gitlab::Auth::CurrentUserMode).to receive(:new).with(user).and_return(fake_user_mode)
- allow(fake_user_mode).to receive(:admin_mode?).and_return(user&.admin?)
+ allow(Gitlab::Auth::CurrentUserMode).to receive(:new).with(user).and_return(fake_user_mode)
+ allow(fake_user_mode).to receive(:admin_mode?).and_return(user&.admin?)
+ end
end
end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 890fefcc7de..d54dcc8a31d 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -14,12 +14,14 @@ module CycleAnalyticsHelpers
page.all('.gl-path-button').collect(&:text).map { |name_with_median| name_with_median.split("\n")[0] }
end
- def fill_in_custom_stage_fields
+ def fill_in_custom_stage_fields(stage_name = nil)
index = page.all('[data-testid="value-stream-stage-fields"]').length
last_stage = page.all('[data-testid="value-stream-stage-fields"]').last
+ stage_name = "Cool custom stage - name #{index}" if stage_name.blank?
+
within last_stage do
- find('[name*="custom-stage-name-"]').fill_in with: "Cool custom stage - name #{index}"
+ find('[name*="custom-stage-name-"]').fill_in with: stage_name
select_dropdown_option_by_value "custom-stage-start-event-", 'Merge request created'
select_dropdown_option_by_value "custom-stage-end-event-", 'Merge request merged'
end
@@ -94,8 +96,8 @@ module CycleAnalyticsHelpers
wait_for_requests
end
- def create_value_stream_aggregation(group_or_project_namespace)
- aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(group_or_project_namespace)
+ def create_value_stream_aggregation(namespace)
+ aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(namespace)
Analytics::CycleAnalytics::AggregatorService.new(aggregation: aggregation).execute
end
@@ -123,7 +125,7 @@ module CycleAnalyticsHelpers
def create_commit(message, project, user, branch_name, count: 1, commit_time: nil, skip_push_handler: false)
repository = project.repository
- oldrev = repository.commit(branch_name)&.sha || Gitlab::Git::BLANK_SHA
+ oldrev = repository.commit(branch_name)&.sha || Gitlab::Git::SHA1_BLANK_SHA
commit_shas = Array.new(count) do |index|
commit_sha = repository.create_file(user, generate(:branch), "content", message: message, branch_name: branch_name)
diff --git a/spec/support/helpers/database/duplicate_indexes.yml b/spec/support/helpers/database/duplicate_indexes.yml
index ab9935a7a98..80d409f233d 100644
--- a/spec/support/helpers/database/duplicate_indexes.yml
+++ b/spec/support/helpers/database/duplicate_indexes.yml
@@ -27,6 +27,9 @@ boards_epic_board_recent_visits:
boards_epic_user_preferences:
index_boards_epic_user_preferences_on_board_user_epic_unique:
- index_boards_epic_user_preferences_on_board_id
+ci_build_trace_metadata:
+ index_ci_build_trace_metadata_on_trace_artifact_id:
+ - index_ci_build_trace_metadata_on_trace_artifact_id_partition_id
ci_job_artifacts:
index_ci_job_artifacts_on_id_project_id_and_created_at:
- index_ci_job_artifacts_on_project_id
@@ -63,9 +66,6 @@ error_tracking_errors:
geo_node_namespace_links:
index_geo_node_namespace_links_on_geo_node_id_and_namespace_id:
- index_geo_node_namespace_links_on_geo_node_id
-in_product_marketing_emails:
- index_in_product_marketing_emails_on_user_track_series:
- - index_in_product_marketing_emails_on_user_id
incident_management_oncall_participants:
index_inc_mgmnt_oncall_participants_on_user_id_and_rotation_id:
- index_inc_mgmnt_oncall_participants_on_oncall_user_id
@@ -102,19 +102,6 @@ ml_models:
p_ci_runner_machine_builds:
index_p_ci_runner_machine_builds_on_runner_machine_id:
- index_ci_runner_machine_builds_on_runner_machine_id
-packages_debian_group_distributions:
- uniq_pkgs_debian_group_distributions_group_id_and_codename:
- - index_packages_debian_group_distributions_on_group_id
- uniq_pkgs_debian_group_distributions_group_id_and_suite:
- - index_packages_debian_group_distributions_on_group_id
-packages_debian_project_distributions:
- uniq_pkgs_debian_project_distributions_project_id_and_codename:
- - index_packages_debian_project_distributions_on_project_id
- uniq_pkgs_debian_project_distributions_project_id_and_suite:
- - index_packages_debian_project_distributions_on_project_id
-packages_tags:
- index_packages_tags_on_package_id_and_updated_at:
- - index_packages_tags_on_package_id
pages_domains:
index_pages_domains_on_project_id_and_enabled_until:
- index_pages_domains_on_project_id
diff --git a/spec/support/helpers/database/multiple_databases_helpers.rb b/spec/support/helpers/database/multiple_databases_helpers.rb
index bccd6979af1..9df0685e46a 100644
--- a/spec/support/helpers/database/multiple_databases_helpers.rb
+++ b/spec/support/helpers/database/multiple_databases_helpers.rb
@@ -85,7 +85,7 @@ module Database
# The usage of this method switches temporarily used `connection_handler`
# allowing full manipulation of ActiveRecord::Base connections without
# having side effects like:
- # - misaligned transactions since this is managed by `BeforeAllAdapter`
+ # - misaligned transactions since this is managed by `TestProf::BeforeAll::Adapters::ActiveRecord`
# - removal of primary connections
#
# The execution within a block ensures safe cleanup of all allocated resources.
diff --git a/spec/support/helpers/debug_with_puts.rb b/spec/support/helpers/debug_with_puts.rb
new file mode 100644
index 00000000000..b8599cc7d40
--- /dev/null
+++ b/spec/support/helpers/debug_with_puts.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+# TODO: Remove the debug_with_puts statements below! Used for debugging purposes.
+# TODO: https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/323#note_1688925316
+module DebugWithPuts
+ def debug_with_puts(message)
+ return unless ENV['CI'] # rubocop:disable RSpec/AvoidConditionalStatements -- Debug information only in the CI
+
+ warn "[#{Time.current}] #{message}"
+ end
+
+ module_function :debug_with_puts
+end
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index be26c80d217..0250e432609 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -1,11 +1,15 @@
# frozen_string_literal: true
module DnsHelpers
+ include ViteHelper
+
def block_dns!
stub_all_dns!
stub_invalid_dns!
permit_local_dns!
permit_postgresql!
+ permit_redis!
+ permit_vite!
end
def permit_dns!
@@ -53,6 +57,26 @@ module DnsHelpers
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
end
+ def permit_redis!
+ # https://github.com/redis-rb/redis-client/blob/v0.11.2/lib/redis_client/ruby_connection.rb#L51 uses Socket.tcp that
+ # calls Addrinfo.getaddrinfo internally.
+ hosts = Gitlab::Redis::ALL_CLASSES.map do |redis_instance|
+ redis_instance.redis_client_params[:host]
+ end.uniq.compact
+
+ hosts.each do |host|
+ allow(Addrinfo).to receive(:getaddrinfo).with(host, anything, nil, :STREAM, anything, anything, any_args).and_call_original
+ end
+ end
+
+ def permit_vite!
+ # https://github.com/ElMassimo/vite_ruby/blob/7d2f558c9760802e5d763bfa40efe87607eb166a/vite_ruby/lib/vite_ruby.rb#L91
+ # uses Socket.tcp to connect to vite dev server - this won't necessarily be localhost
+ return unless vite_enabled?
+
+ allow(Addrinfo).to receive(:getaddrinfo).with(ViteRuby.instance.config.host, ViteRuby.instance.config.port, nil, :STREAM, anything, anything, any_args).and_call_original
+ end
+
def stub_resolver(stubbed_lookups = {})
resolver = instance_double('Resolv::DNS')
allow(resolver).to receive(:timeouts=)
diff --git a/spec/support/helpers/features/invite_members_modal_helpers.rb b/spec/support/helpers/features/invite_members_modal_helpers.rb
index c40e060bc8e..fd37abd7381 100644
--- a/spec/support/helpers/features/invite_members_modal_helpers.rb
+++ b/spec/support/helpers/features/invite_members_modal_helpers.rb
@@ -66,6 +66,7 @@ module Features
def choose_options(role, expires_at)
page.within role_dropdown_selector do
+ wait_for_requests
toggle_listbox
select_listbox_item(role, exact_text: true)
end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 913316c8622..83849df73dc 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -49,18 +49,6 @@ module LoginHelpers
@current_user = user
end
- def gitlab_enable_admin_mode_sign_in(user, use_mock_admin_mode: true)
- if use_mock_admin_mode
- enable_admin_mode!(user)
- else
- visit new_admin_session_path
- fill_in 'user_password', with: user.password
- click_button 'Enter admin mode'
-
- wait_for_requests
- end
- end
-
def gitlab_sign_in_via(provider, user, uid, saml_response = nil)
mock_auth_hash_with_saml_xml(provider, uid, user.email, saml_response)
visit new_user_session_path
@@ -90,8 +78,8 @@ module LoginHelpers
# Requires Javascript driver.
def gitlab_disable_admin_mode
- click_on 'Search or go to…'
- click_on 'Leave admin mode'
+ find_by_testid('user-menu-toggle').click
+ click_on 'Leave Admin Mode'
end
private
@@ -122,7 +110,7 @@ module LoginHelpers
def login_via(provider, user, uid, remember_me: false, additional_info: {})
mock_auth_hash(provider, uid, user.email, additional_info: additional_info)
visit new_user_session_path
- expect(page).to have_css('.omniauth-container')
+ expect(page).to have_css('.js-oauth-login')
check 'remember_me_omniauth' if remember_me
@@ -157,7 +145,7 @@ module LoginHelpers
mock_auth_hash(provider, uid, email, response_object: response_object)
end
- def configure_mock_auth(provider, uid, email, response_object: nil, additional_info: {}, name: 'mockuser')
+ def configure_mock_auth(provider, uid, email, response_object: nil, additional_info: {}, name: 'mockuser', groups: [])
# The mock_auth configuration allows you to set per-provider (or default)
# authentication hashes to return during integration testing.
@@ -180,7 +168,8 @@ module LoginHelpers
name: 'mockuser',
email: email,
image: 'mock_user_thumbnail_url'
- }
+ },
+ 'groups' => groups
}
),
response_object: response_object
@@ -188,9 +177,9 @@ module LoginHelpers
}).merge(additional_info) { |_, old_hash, new_hash| old_hash.merge(new_hash) }
end
- def mock_auth_hash(provider, uid, email, additional_info: {}, response_object: nil, name: 'mockuser')
+ def mock_auth_hash(provider, uid, email, additional_info: {}, response_object: nil, name: 'mockuser', groups: [])
configure_mock_auth(
- provider, uid, email, additional_info: additional_info, response_object: response_object, name: name
+ provider, uid, email, additional_info: additional_info, response_object: response_object, name: name, groups: groups
)
original_env_config_omniauth_auth = Rails.application.env_config['omniauth.auth']
diff --git a/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb b/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb
index aa091095fb6..e139f0c9fb3 100644
--- a/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb
+++ b/spec/support/helpers/models/ci/partitioning_testing/partition_identifiers.rb
@@ -8,6 +8,10 @@ module Ci
def ci_testing_partition_id
99999
end
+
+ def ci_testing_partition_id_for_check_constraints
+ 101
+ end
end
end
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index 5519a6910a2..2d3f7a1b8a9 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -76,6 +76,14 @@ module NavbarStructureHelper
)
end
+ def insert_google_artifact_registry_nav
+ insert_after_sub_nav_item(
+ _('Container Registry'),
+ within: _('Deploy'),
+ new_sub_nav_item_name: _('Google Artifact Registry')
+ )
+ end
+
def insert_dependency_proxy_nav
insert_before_sub_nav_item(
_('Kubernetes'),
@@ -124,6 +132,14 @@ module NavbarStructureHelper
)
end
+ def insert_model_registry_nav(within)
+ insert_after_sub_nav_item(
+ within,
+ within: _('Deploy'),
+ new_sub_nav_item_name: _('Model registry')
+ )
+ end
+
def project_analytics_sub_nav_item
[
_('Value stream analytics'),
diff --git a/spec/support/helpers/orphan_final_artifacts_cleanup_helpers.rb b/spec/support/helpers/orphan_final_artifacts_cleanup_helpers.rb
new file mode 100644
index 00000000000..9711a5d7c54
--- /dev/null
+++ b/spec/support/helpers/orphan_final_artifacts_cleanup_helpers.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module OrphanFinalArtifactsCleanupHelpers
+ def create_fog_file(final: true)
+ path = if final
+ JobArtifactUploader.generate_final_store_path(root_id: 123)
+ else
+ JobArtifactUploader.generate_remote_id
+ end
+
+ fog_connection.directories.new(key: remote_directory)
+ .files
+ .create( # rubocop:disable Rails/SaveBang -- not the AR method
+ key: path_with_bucket_prefix(path),
+ body: 'content'
+ )
+ end
+
+ def path_without_bucket_prefix(path)
+ Pathname.new(path).relative_path_from(bucket_prefix.to_s).to_s
+ end
+
+ def path_with_bucket_prefix(path)
+ File.join([bucket_prefix, path].compact)
+ end
+
+ def expect_object_to_exist(fog_file)
+ expect { fog_connection.get_object(remote_directory, fog_file.key) }.not_to raise_error
+ end
+
+ def expect_object_to_be_deleted(fog_file)
+ expect { fog_connection.get_object(remote_directory, fog_file.key) }.to raise_error(Excon::Error::NotFound)
+ end
+
+ def expect_start_log_message
+ expect_log_message("Looking for orphan job artifact objects")
+ end
+
+ def expect_done_log_message
+ expect_log_message("Done")
+ end
+
+ def expect_first_page_loading_log_message
+ expect_log_message("Loading page (first page)", times: 1)
+ end
+
+ def expect_page_loading_via_marker_log_message(times:)
+ expect_log_message("Loading page (marker:", times: times)
+ end
+
+ def expect_resuming_from_marker_log_message(marker)
+ expect_log_message("Resuming from last page marker: #{marker}", times: 1)
+ end
+
+ def expect_no_resuming_from_marker_log_message
+ expect(Gitlab::AppLogger).not_to have_received(:info).with(a_string_including("Resuming"))
+ end
+
+ def expect_delete_log_message(fog_file)
+ expect_log_message("Delete #{fog_file.key} (#{fog_file.content_length} bytes)")
+ end
+
+ def expect_no_delete_log_message(fog_file)
+ expect_no_log_message("Delete #{fog_file.key} (#{fog_file.content_length} bytes)")
+ end
+
+ def expect_log_message(message, times: 1)
+ message = "[DRY RUN] #{message}" if dry_run
+ expect(Gitlab::AppLogger).to have_received(:info).with(a_string_including(message)).exactly(times).times
+ end
+
+ def expect_no_log_message(message)
+ message = "[DRY RUN] #{message}" if dry_run
+ expect(Gitlab::AppLogger).not_to have_received(:info).with(a_string_including(message))
+ end
+
+ def fetch_saved_marker
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get(described_class::LAST_PAGE_MARKER_REDIS_KEY)
+ end
+ end
+end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index e043d1249b9..e888f858a2d 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+require 'gitlab_edition'
require 'active_support/hash_with_indifferent_access'
require 'active_support/dependencies'
@@ -120,6 +121,10 @@ module StubConfiguration
.to receive(:sentry_clientside_dsn) { clientside_dsn }
end
+ def clear_sentry_settings
+ Sentry.get_current_scope.clear
+ end
+
def stub_microsoft_graph_mailer_setting(messages)
allow(Gitlab.config.microsoft_graph_mailer).to receive_messages(to_settings(messages))
end
@@ -194,6 +199,6 @@ module StubConfiguration
end
require_relative '../../../ee/spec/support/helpers/ee/stub_configuration' if
- Dir.exist?("#{__dir__}/../../../ee")
+ GitlabEdition.ee?
StubConfiguration.prepend_mod_with('StubConfiguration')
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 0c92502ba99..33661ce353c 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -102,7 +102,7 @@ module StubGitlabCalls
end
def stub_commonmark_sourcepos_disabled
- engine = Banzai::Filter::MarkdownFilter.render_engine(nil)
+ engine = Banzai::Filter::MarkdownFilter.new('foo', {}).render_engine
allow_next_instance_of(engine) do |instance|
allow(instance).to receive(:sourcepos_disabled?).and_return(true)
@@ -110,7 +110,7 @@ module StubGitlabCalls
end
def stub_commonmark_sourcepos_enabled
- engine = Banzai::Filter::MarkdownFilter.render_engine(nil)
+ engine = Banzai::Filter::MarkdownFilter.new('foo', {}).render_engine
allow_next_instance_of(engine) do |instance|
allow(instance).to receive(:sourcepos_disabled?).and_return(false)
diff --git a/spec/support/helpers/stub_requests.rb b/spec/support/helpers/stub_requests.rb
index bde5535705e..b77b366e037 100644
--- a/spec/support/helpers/stub_requests.rb
+++ b/spec/support/helpers/stub_requests.rb
@@ -18,15 +18,15 @@ module StubRequests
end
def stub_dns(url, ip_address:, port: 80)
- debug_with_puts "beginning of stub_dns"
+ DebugWithPuts.debug_with_puts "beginning of stub_dns"
url = parse_url(url)
- debug_with_puts "before socket = Socket.sockaddr_in"
+ DebugWithPuts.debug_with_puts "before socket = Socket.sockaddr_in"
socket = Socket.sockaddr_in(port, ip_address)
- debug_with_puts "after socket = Socket.sockaddr_in"
+ DebugWithPuts.debug_with_puts "after socket = Socket.sockaddr_in"
- debug_with_puts "before addr = Addrinfo.new(socket)"
+ DebugWithPuts.debug_with_puts "before addr = Addrinfo.new(socket)"
addr = Addrinfo.new(socket)
- debug_with_puts "after addr = Addrinfo.new(socket)"
+ DebugWithPuts.debug_with_puts "after addr = Addrinfo.new(socket)"
# See Gitlab::UrlBlocker
allow(Addrinfo).to receive(:getaddrinfo)
@@ -58,12 +58,4 @@ module StubRequests
def parse_url(url)
url.is_a?(URI) ? url : URI(url)
end
-
- # TODO: Remove the debug_with_puts statements below! Used for debugging purposes.
- # TODO: https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/323#note_1688925316
- def debug_with_puts(message)
- return unless ENV['CI'] # rubocop:disable RSpec/AvoidConditionalStatements -- Debug information only in the CI
-
- puts "[#{Time.current}] #{message}"
- end
end
diff --git a/spec/support/helpers/user_with_namespace_shim.rb b/spec/support/helpers/user_with_namespace_shim.rb
new file mode 100644
index 00000000000..c4c988b33a0
--- /dev/null
+++ b/spec/support/helpers/user_with_namespace_shim.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+# Modify the FactoryBot user build process to assign a personal namespace.
+# The complement to this shim is in the User factory where we assign_personal_namespace.
+#
+# This is needed to assist with the transition to optional personal namespaces.
+# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/137065
+module UserWithNamespaceShim
+ extend ActiveSupport::Concern
+
+ USER_WITH_NAMESPACE_SHIM_YAML = File.join(__dir__, 'user_with_namespace_shim.yml')
+
+ class << self
+ include Gitlab::Utils::StrongMemoize
+
+ def enabled?
+ self.enabled ||= false
+ end
+
+ def shim(spec_file)
+ self.enabled = spec_file_shimmed?(spec_file)
+ end
+
+ def unshim
+ self.enabled = false
+ end
+
+ # Determine the spec filename from the current backtrace.
+ def get_spec_file
+ caller.find do |line|
+ match = line.match(%r{^(.+_spec\.rb|.+/frontend/fixtures/.+\.rb):\d+:in})
+ match[1] if match
+ end
+
+ path = ::Regexp.last_match(1)
+ return unless path
+
+ Pathname.new(path)
+ .relative_path_from(Rails.root)
+ .to_s
+ end
+
+ private
+
+ def spec_file_shimmed?(spec_file)
+ shimmed_spec_list.include?(spec_file)
+ end
+
+ def shimmed_spec_list
+ YAML.load_file(USER_WITH_NAMESPACE_SHIM_YAML) || []
+ end
+ strong_memoize_attr :shimmed_spec_list
+
+ attr_accessor :enabled
+ end
+
+ included do
+ # This is our only chance to determine the spec filename.
+ spec_file = UserWithNamespaceShim.get_spec_file
+
+ # We need to use before(:all) instead of before_all otherwise we open a transaction before running the example
+ # which interferes with examples using the the table deletion strategy like those marked as `:delete`.
+ # rubocop:disable RSpec/BeforeAll -- reason above
+ before(:all) do
+ UserWithNamespaceShim.shim(spec_file)
+ end
+ # rubocop:enable RSpec/BeforeAll
+
+ after(:all) do
+ UserWithNamespaceShim.unshim
+ end
+ end
+end
diff --git a/spec/support/helpers/user_with_namespace_shim.yml b/spec/support/helpers/user_with_namespace_shim.yml
new file mode 100644
index 00000000000..7b3dc099cf9
--- /dev/null
+++ b/spec/support/helpers/user_with_namespace_shim.yml
@@ -0,0 +1,1000 @@
+---
+- ee/spec/components/namespaces/storage/user_pre_enforcement_alert_component_spec.rb
+- ee/spec/controllers/admin/users_controller_spec.rb
+- ee/spec/controllers/autocomplete_controller_spec.rb
+- ee/spec/controllers/ee/profiles/preferences_controller_spec.rb
+- ee/spec/controllers/ee/search_controller_spec.rb
+- ee/spec/controllers/groups/sso_controller_spec.rb
+- ee/spec/controllers/profiles/billings_controller_spec.rb
+- ee/spec/controllers/projects/iterations_controller_spec.rb
+- ee/spec/controllers/projects_controller_spec.rb
+- ee/spec/controllers/users_controller_spec.rb
+- ee/spec/features/account_recovery_regular_check_spec.rb
+- ee/spec/features/admin/admin_audit_logs_spec.rb
+- ee/spec/features/admin/admin_groups_spec.rb
+- ee/spec/features/admin/admin_reset_pipeline_minutes_spec.rb
+- ee/spec/features/admin/admin_users_spec.rb
+- ee/spec/features/admin/groups/admin_changes_plan_spec.rb
+- ee/spec/features/admin/users/users_spec.rb
+- ee/spec/features/analytics/code_analytics_spec.rb
+- ee/spec/features/billings/billing_plans_spec.rb
+- ee/spec/features/boards/boards_licensed_features_spec.rb
+- ee/spec/features/boards/boards_spec.rb
+- ee/spec/features/boards/swimlanes/epics_swimlanes_spec.rb
+- ee/spec/features/ci/ci_minutes_spec.rb
+- ee/spec/features/ci_shared_runner_warnings_spec.rb
+- ee/spec/features/code_suggestions_ga_non_owner_alert_spec.rb
+- ee/spec/features/dashboards/todos_spec.rb
+- ee/spec/features/epics/gfm_autocomplete_spec.rb
+- ee/spec/features/epics/issue_promotion_spec.rb
+- ee/spec/features/epics/referencing_epics_spec.rb
+- ee/spec/features/epics/update_epic_spec.rb
+- ee/spec/features/groups/analytics/productivity_analytics_spec.rb
+- ee/spec/features/groups/member_roles_spec.rb
+- ee/spec/features/groups/members/list_members_spec.rb
+- ee/spec/features/groups/security/policies_list_spec.rb
+- ee/spec/features/groups/security/policy_editor_spec.rb
+- ee/spec/features/groups/usage_quotas/code_suggestions_usage_tab_spec.rb
+- ee/spec/features/groups/wikis_spec.rb
+- ee/spec/features/incidents/incident_details_spec.rb
+- ee/spec/features/issues/epic_in_issue_sidebar_spec.rb
+- ee/spec/features/issues/issue_sidebar_spec.rb
+- ee/spec/features/issues/user_bulk_edits_issues_spec.rb
+- ee/spec/features/issues/user_sees_empty_state_spec.rb
+- ee/spec/features/issues/user_uses_quick_actions_spec.rb
+- ee/spec/features/issues/user_views_issues_spec.rb
+- ee/spec/features/merge_request/code_owner_approvals_reset_after_merging_to_source_branch_spec.rb
+- ee/spec/features/merge_request/draft_comments_spec.rb
+- ee/spec/features/merge_request/user_approves_with_password_spec.rb
+- ee/spec/features/merge_request/user_approves_with_saml_auth_spec.rb
+- ee/spec/features/merge_request/user_comments_on_merge_request_spec.rb
+- ee/spec/features/merge_request/user_creates_merge_request_spec.rb
+- ee/spec/features/merge_request/user_creates_multiple_assignees_mr_spec.rb
+- ee/spec/features/merge_request/user_creates_multiple_reviewers_mr_spec.rb
+- ee/spec/features/merge_request/user_edits_multiple_assignees_mr_spec.rb
+- ee/spec/features/merge_request/user_edits_multiple_reviewers_mr_spec.rb
+- ee/spec/features/merge_request/user_merges_immediately_spec.rb
+- ee/spec/features/merge_request/user_merges_with_namespace_storage_limits_spec.rb
+- ee/spec/features/merge_request/user_merges_with_push_rules_spec.rb
+- ee/spec/features/merge_request/user_sees_approve_via_custom_role_spec.rb
+- ee/spec/features/merge_request/user_sees_merge_widget_spec.rb
+- ee/spec/features/merge_request/user_sees_security_policy_rules_licence_compliance_spec.rb
+- ee/spec/features/merge_request/user_sees_status_checks_widget_spec.rb
+- ee/spec/features/merge_request/user_sets_approval_rules_spec.rb
+- ee/spec/features/merge_request/user_sets_approvers_spec.rb
+- ee/spec/features/merge_request/user_uses_slash_commands_spec.rb
+- ee/spec/features/merge_request/user_views_blocked_merge_request_spec.rb
+- ee/spec/features/merge_trains/user_adds_to_merge_train_when_pipeline_succeeds_spec.rb
+- ee/spec/features/namespace_user_cap_reached_alert_spec.rb
+- ee/spec/features/pending_project_memberships_spec.rb
+- ee/spec/features/profiles/account_spec.rb
+- ee/spec/features/profiles/password_spec.rb
+- ee/spec/features/profiles/usage_quotas_spec.rb
+- ee/spec/features/profiles/user_visits_profile_spec.rb
+- ee/spec/features/profiles/user_visits_public_profile_spec.rb
+- ee/spec/features/projects/active_tabs_spec.rb
+- ee/spec/features/projects/audit_events_spec.rb
+- ee/spec/features/projects/custom_projects_template_spec.rb
+- ee/spec/features/projects/environments/environment_spec.rb
+- ee/spec/features/projects/feature_flags/feature_flag_issues_spec.rb
+- ee/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
+- ee/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
+- ee/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
+- ee/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
+- ee/spec/features/projects/members/member_leaves_project_spec.rb
+- ee/spec/features/projects/merge_requests/user_approves_merge_request_spec.rb
+- ee/spec/features/projects/milestones/milestone_spec.rb
+- ee/spec/features/projects/navbar_spec.rb
+- ee/spec/features/projects/new_project_from_template_spec.rb
+- ee/spec/features/projects/new_project_spec.rb
+- ee/spec/features/projects/path_locks_spec.rb
+- ee/spec/features/projects/push_rules_spec.rb
+- ee/spec/features/projects/security/policies_list_spec.rb
+- ee/spec/features/projects/security/policy_editor_spec.rb
+- ee/spec/features/projects/settings/ee/repository_mirrors_settings_spec.rb
+- ee/spec/features/projects/settings/merge_requests/user_manages_merge_requests_template_spec.rb
+- ee/spec/features/projects/settings/packages_spec.rb
+- ee/spec/features/projects/settings/user_changes_default_branch_spec.rb
+- ee/spec/features/projects/settings/user_manages_issues_template_spec.rb
+- ee/spec/features/projects/settings/user_manages_merge_requests_template_spec.rb
+- ee/spec/features/projects/show_project_spec.rb
+- ee/spec/features/projects/show_spec.rb
+- ee/spec/features/projects/view_blob_with_code_owners_spec.rb
+- ee/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
+- ee/spec/features/projects_spec.rb
+- ee/spec/features/protected_tags_spec.rb
+- ee/spec/features/remote_development/workspaces_dropdown_group_spec.rb
+- ee/spec/features/search/elastic/global_search_spec.rb
+- ee/spec/features/search/elastic/project_search_spec.rb
+- ee/spec/features/search/elastic/snippet_search_spec.rb
+- ee/spec/features/security/dashboard_access_spec.rb
+- ee/spec/features/security/profile_access_spec.rb
+- ee/spec/features/security/project/discover_spec.rb
+- ee/spec/features/security/project/internal_access_spec.rb
+- ee/spec/features/security/project/private_access_spec.rb
+- ee/spec/features/security/project/public_access_spec.rb
+- ee/spec/features/security/project/snippet/internal_access_spec.rb
+- ee/spec/features/security/project/snippet/public_access_spec.rb
+- ee/spec/features/subscriptions/subscription_flow_for_existing_user_with_eligible_group_spec.rb
+- ee/spec/features/subscriptions_spec.rb
+- ee/spec/features/trials/show_trial_banner_spec.rb
+- ee/spec/features/user_sees_marketing_header_spec.rb
+- ee/spec/features/user_settings/password_spec.rb
+- ee/spec/features/users/login_spec.rb
+- ee/spec/finders/compliance_management/merge_requests/compliance_violations_finder_spec.rb
+- ee/spec/finders/ee/fork_targets_finder_spec.rb
+- ee/spec/finders/issues_finder_spec.rb
+- ee/spec/finders/security/approval_groups_finder_spec.rb
+- ee/spec/frontend/fixtures/namespace.rb
+- ee/spec/frontend/fixtures/search.rb
+- ee/spec/graphql/ee/resolvers/board_lists_resolver_spec.rb
+- ee/spec/graphql/mutations/namespaces/increase_storage_temporarily_spec.rb
+- ee/spec/graphql/mutations/security_policy/assign_security_policy_project_spec.rb
+- ee/spec/graphql/mutations/security_policy/commit_scan_execution_policy_spec.rb
+- ee/spec/graphql/mutations/security_policy/create_security_policy_project_spec.rb
+- ee/spec/graphql/mutations/security_policy/unassign_security_policy_project_spec.rb
+- ee/spec/helpers/ee/users/callouts_helper_spec.rb
+- ee/spec/helpers/projects/security/discover_helper_spec.rb
+- ee/spec/helpers/subscriptions_helper_spec.rb
+- ee/spec/lib/ee/api/entities/user_with_admin_spec.rb
+- ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_spec.rb
+- ee/spec/lib/ee/gitlab/background_migration/migrate_vulnerabilities_feedback_to_vulnerabilities_state_transition_spec.rb
+- ee/spec/lib/ee/gitlab/ci/pipeline/chain/validate/external_spec.rb
+- ee/spec/lib/ee/gitlab/import_export/project/tree_restorer_spec.rb
+- ee/spec/lib/ee/gitlab/issuable_metadata_spec.rb
+- ee/spec/lib/gitlab/auth/smartcard/certificate_spec.rb
+- ee/spec/lib/gitlab/auth/smartcard/ldap_certificate_spec.rb
+- ee/spec/lib/gitlab/background_migration/create_vulnerability_links_spec.rb
+- ee/spec/lib/gitlab/geo_spec.rb
+- ee/spec/lib/gitlab/graphql/aggregations/security_orchestration_policies/lazy_dast_profile_aggregate_spec.rb
+- ee/spec/lib/gitlab/llm/stage_check_spec.rb
+- ee/spec/lib/gitlab/llm/tanuki_bot_spec.rb
+- ee/spec/lib/gitlab/search/zoekt/client_spec.rb
+- ee/spec/lib/gitlab/subscription_portal/clients/graphql_spec.rb
+- ee/spec/lib/sidebars/groups/menus/analytics_menu_spec.rb
+- ee/spec/migrations/20220831132802_delete_approval_rules_for_vulnerability_spec.rb
+- ee/spec/migrations/20220907122648_populate_security_orchestration_policy_configuration_id_spec.rb
+- ee/spec/migrations/20221130192239_fix_approval_project_rules_without_protected_branches_spec.rb
+- ee/spec/migrations/20230113201308_backfill_namespace_ldap_settings_spec.rb
+- ee/spec/migrations/20230127155217_add_id_column_to_package_metadata_join_table_spec.rb
+- ee/spec/migrations/20230310213308_sync_security_policy_rule_schedules_that_may_have_been_deleted_by_a_bug_spec.rb
+- ee/spec/migrations/20230403221928_resync_scan_result_policies_for_namespaces_spec.rb
+- ee/spec/migrations/20230501165244_remove_software_license_policies_without_scan_result_policy_id_spec.rb
+- ee/spec/migrations/20230612162643_pm_checkpoints_remove_advisory_entries_spec.rb
+- ee/spec/migrations/20231030154117_insert_new_ultimate_trial_plan_into_plans_spec.rb
+- ee/spec/migrations/cleanup_orphan_software_licenses_spec.rb
+- ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb
+- ee/spec/migrations/geo/migrate_lfs_objects_to_separate_registry_spec.rb
+- ee/spec/migrations/geo/resync_direct_upload_job_artifact_registry_spec.rb
+- ee/spec/migrations/update_can_create_group_application_setting_spec.rb
+- ee/spec/migrations/update_ci_max_total_yaml_size_bytes_default_value_spec.rb
+- ee/spec/models/concerns/ee/mentionable_spec.rb
+- ee/spec/models/concerns/elastic/note_spec.rb
+- ee/spec/models/ee/member_spec.rb
+- ee/spec/models/ee/namespace/root_storage_statistics_spec.rb
+- ee/spec/models/ee/namespace_spec.rb
+- ee/spec/models/ee/namespace_statistics_spec.rb
+- ee/spec/models/ee/namespaces/user_namespace_spec.rb
+- ee/spec/models/ee/pages_domain_spec.rb
+- ee/spec/models/ee/project_member_spec.rb
+- ee/spec/models/ee/project_spec.rb
+- ee/spec/models/ee/project_wiki_spec.rb
+- ee/spec/models/elastic/migration_record_spec.rb
+- ee/spec/models/epic_spec.rb
+- ee/spec/models/factories_spec.rb
+- ee/spec/models/gitlab_subscription_spec.rb
+- ee/spec/models/iteration_spec.rb
+- ee/spec/models/member_spec.rb
+- ee/spec/models/namespace_setting_spec.rb
+- ee/spec/models/namespaces/free_user_cap/enforcement_spec.rb
+- ee/spec/models/project_member_spec.rb
+- ee/spec/models/projects/compliance_standards/adherence_spec.rb
+- ee/spec/models/push_rule_spec.rb
+- ee/spec/policies/ci/minutes/namespace_monthly_usage_policy_spec.rb
+- ee/spec/policies/dependency_proxy/packages/setting_policy_spec.rb
+- ee/spec/policies/global_policy_spec.rb
+- ee/spec/policies/packages/policies/project_policy_spec.rb
+- ee/spec/policies/project_policy_spec.rb
+- ee/spec/policies/requirements_management/requirement_policy_spec.rb
+- ee/spec/policies/vulnerabilities/feedback_policy_spec.rb
+- ee/spec/policies/vulnerabilities/merge_request_link_policy_spec.rb
+- ee/spec/presenters/audit_event_presenter_spec.rb
+- ee/spec/requests/admin/users_controller_spec.rb
+- ee/spec/requests/api/discussions_spec.rb
+- ee/spec/requests/api/epics_spec.rb
+- ee/spec/requests/api/graphql/ci/minutes/usage_spec.rb
+- ee/spec/requests/api/graphql/compliance_management/merge_requests/compliance_violations_spec.rb
+- ee/spec/requests/api/graphql/mutations/security/finding/create_merge_request_spec.rb
+- ee/spec/requests/api/graphql/mutations/security_policy/assign_security_policy_project_spec.rb
+- ee/spec/requests/api/graphql/mutations/security_policy/commit_scan_execution_policy_spec.rb
+- ee/spec/requests/api/graphql/mutations/security_policy/create_security_policy_project_spec.rb
+- ee/spec/requests/api/graphql/mutations/security_policy/unassign_security_policy_project_spec.rb
+- ee/spec/requests/api/graphql/project/path_locks_spec.rb
+- ee/spec/requests/api/graphql/project/pipelines/dast_profile_spec.rb
+- ee/spec/requests/api/graphql/project/push_rules_spec.rb
+- ee/spec/requests/api/graphql/project/requirements_management/requirements_spec.rb
+- ee/spec/requests/api/graphql/work_item_spec.rb
+- ee/spec/requests/api/groups_spec.rb
+- ee/spec/requests/api/internal/ai/x_ray/scan_spec.rb
+- ee/spec/requests/api/internal/base_spec.rb
+- ee/spec/requests/api/issues_spec.rb
+- ee/spec/requests/api/merge_request_approval_rules_spec.rb
+- ee/spec/requests/api/merge_request_approvals_spec.rb
+- ee/spec/requests/api/merge_requests_spec.rb
+- ee/spec/requests/api/namespaces_spec.rb
+- ee/spec/requests/api/project_approval_rules_spec.rb
+- ee/spec/requests/api/project_approval_settings_spec.rb
+- ee/spec/requests/api/project_approvals_spec.rb
+- ee/spec/requests/api/project_milestones_spec.rb
+- ee/spec/requests/api/project_push_rule_spec.rb
+- ee/spec/requests/api/projects_spec.rb
+- ee/spec/requests/api/resource_weight_events_spec.rb
+- ee/spec/requests/api/search_spec.rb
+- ee/spec/requests/api/users_spec.rb
+- ee/spec/requests/custom_roles/admin_merge_request/request_spec.rb
+- ee/spec/requests/custom_roles/admin_vulnerability/request_spec.rb
+- ee/spec/requests/custom_roles/manage_project_access_tokens/request_spec.rb
+- ee/spec/requests/projects/analytics/code_reviews_controller_spec.rb
+- ee/spec/requests/projects/issues_controller_spec.rb
+- ee/spec/requests/projects/metrics_controller_spec.rb
+- ee/spec/requests/projects/security/policies_controller_spec.rb
+- ee/spec/requests/projects/tracing_controller_spec.rb
+- ee/spec/requests/subscriptions/hand_raise_leads_spec.rb
+- ee/spec/requests/trial_registrations_controller_spec.rb
+- ee/spec/requests/users_controller_spec.rb
+- ee/spec/serializers/clusters/environment_serializer_spec.rb
+- ee/spec/services/analytics/cycle_analytics/data_loader_service_spec.rb
+- ee/spec/services/ci/minutes/additional_packs/change_namespace_service_spec.rb
+- ee/spec/services/ci/minutes/update_project_and_namespace_usage_service_spec.rb
+- ee/spec/services/ee/auth/container_registry_authentication_service_spec.rb
+- ee/spec/services/ee/commits/create_service_spec.rb
+- ee/spec/services/ee/issues/create_service_spec.rb
+- ee/spec/services/ee/issues/update_service_spec.rb
+- ee/spec/services/ee/notes/create_service_spec.rb
+- ee/spec/services/ee/notes/quick_actions_service_spec.rb
+- ee/spec/services/ee/notification_service_spec.rb
+- ee/spec/services/ee/post_receive_service_spec.rb
+- ee/spec/services/ee/projects/remove_paid_features_service_spec.rb
+- ee/spec/services/ee/users/destroy_service_spec.rb
+- ee/spec/services/ee/users/migrate_records_to_ghost_user_service_spec.rb
+- ee/spec/services/epic_issues/create_service_spec.rb
+- ee/spec/services/epics/create_service_spec.rb
+- ee/spec/services/epics/issue_promote_service_spec.rb
+- ee/spec/services/epics/transfer_service_spec.rb
+- ee/spec/services/epics/update_service_spec.rb
+- ee/spec/services/namespaces/service_accounts/create_service_spec.rb
+- ee/spec/services/projects/create_from_template_service_spec.rb
+- ee/spec/services/projects/create_service_spec.rb
+- ee/spec/services/projects/destroy_service_spec.rb
+- ee/spec/services/projects/fork_service_spec.rb
+- ee/spec/services/projects/mark_for_deletion_service_spec.rb
+- ee/spec/services/projects/restore_service_spec.rb
+- ee/spec/services/projects/transfer_service_spec.rb
+- ee/spec/services/projects/update_service_spec.rb
+- ee/spec/services/quick_actions/interpret_service_spec.rb
+- ee/spec/services/todo_service_spec.rb
+- ee/spec/services/users/service_accounts/create_service_spec.rb
+- ee/spec/services/vulnerability_exports/exporters/csv_service_spec.rb
+- ee/spec/tasks/gitlab/elastic_rake_spec.rb
+- ee/spec/views/admin/dashboard/index.html.haml_spec.rb
+- ee/spec/views/admin/users/show.html.haml_spec.rb
+- ee/spec/views/compliance_management/compliance_framework/_project_settings.html.haml_spec.rb
+- ee/spec/views/profiles/preferences/show.html.haml_spec.rb
+- ee/spec/views/shared/billings/_eoa_bronze_plan_banner.html.haml_spec.rb
+- ee/spec/views/shared/promotions/_promotion_link_project.html.haml_spec.rb
+- ee/spec/workers/groups/enterprise_users/associate_worker_spec.rb
+- ee/spec/workers/groups/enterprise_users/bulk_associate_by_domain_worker_spec.rb
+- ee/spec/workers/new_epic_worker_spec.rb
+- spec/controllers/admin/users_controller_spec.rb
+- spec/controllers/concerns/checks_collaboration_spec.rb
+- spec/controllers/concerns/routable_actions_spec.rb
+- spec/controllers/dashboard/snippets_controller_spec.rb
+- spec/controllers/dashboard/todos_controller_spec.rb
+- spec/controllers/explore/snippets_controller_spec.rb
+- spec/controllers/groups/shared_projects_controller_spec.rb
+- spec/controllers/import/bitbucket_controller_spec.rb
+- spec/controllers/import/bitbucket_server_controller_spec.rb
+- spec/controllers/import/gitea_controller_spec.rb
+- spec/controllers/import/github_controller_spec.rb
+- spec/controllers/oauth/applications_controller_spec.rb
+- spec/controllers/oauth/authorizations_controller_spec.rb
+- spec/controllers/profiles/notifications_controller_spec.rb
+- spec/controllers/profiles/preferences_controller_spec.rb
+- spec/controllers/profiles_controller_spec.rb
+- spec/controllers/projects/blame_controller_spec.rb
+- spec/controllers/projects/blob_controller_spec.rb
+- spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
+- spec/controllers/projects/commit_controller_spec.rb
+- spec/controllers/projects/forks_controller_spec.rb
+- spec/controllers/projects/labels_controller_spec.rb
+- spec/controllers/projects/merge_requests/drafts_controller_spec.rb
+- spec/controllers/projects/milestones_controller_spec.rb
+- spec/controllers/projects/project_members_controller_spec.rb
+- spec/controllers/projects/snippets_controller_spec.rb
+- spec/controllers/projects/tree_controller_spec.rb
+- spec/controllers/projects/web_ide_schemas_controller_spec.rb
+- spec/controllers/projects/web_ide_terminals_controller_spec.rb
+- spec/controllers/projects/wikis_controller_spec.rb
+- spec/controllers/projects_controller_spec.rb
+- spec/controllers/search_controller_spec.rb
+- spec/controllers/snippets/notes_controller_spec.rb
+- spec/features/abuse_report_spec.rb
+- spec/features/admin/admin_abuse_reports_spec.rb
+- spec/features/admin/admin_appearance_spec.rb
+- spec/features/admin/admin_disables_two_factor_spec.rb
+- spec/features/admin/admin_groups_spec.rb
+- spec/features/admin/admin_mode/workers_spec.rb
+- spec/features/admin/admin_mode_spec.rb
+- spec/features/admin/admin_projects_spec.rb
+- spec/features/admin/users/admin_impersonates_user_spec.rb
+- spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb
+- spec/features/admin/users/admin_sees_user_spec.rb
+- spec/features/admin/users/user_spec.rb
+- spec/features/admin/users/users_spec.rb
+- spec/features/atom/users_spec.rb
+- spec/features/boards/boards_spec.rb
+- spec/features/breadcrumbs_schema_markup_spec.rb
+- spec/features/calendar_spec.rb
+- spec/features/canonical_link_spec.rb
+- spec/features/commits_spec.rb
+- spec/features/contextual_sidebar_spec.rb
+- spec/features/dashboard/datetime_on_tooltips_spec.rb
+- spec/features/dashboard/issuables_counter_spec.rb
+- spec/features/dashboard/issues_filter_spec.rb
+- spec/features/dashboard/milestones_spec.rb
+- spec/features/dashboard/project_member_activity_index_spec.rb
+- spec/features/dashboard/projects_spec.rb
+- spec/features/dashboard/user_filters_projects_spec.rb
+- spec/features/discussion_comments/merge_request_spec.rb
+- spec/features/discussion_comments/snippets_spec.rb
+- spec/features/expand_collapse_diffs_spec.rb
+- spec/features/explore/catalog/catalog_settings_spec.rb
+- spec/features/file_uploads/project_import_spec.rb
+- spec/features/file_uploads/user_avatar_spec.rb
+- spec/features/frequently_visited_projects_and_groups_spec.rb
+- spec/features/global_search_spec.rb
+- spec/features/groups/group_settings_spec.rb
+- spec/features/groups/participants_autocomplete_spec.rb
+- spec/features/groups_spec.rb
+- spec/features/help_dropdown_spec.rb
+- spec/features/ide/user_opens_merge_request_spec.rb
+- spec/features/ide_spec.rb
+- spec/features/import/manifest_import_spec.rb
+- spec/features/incidents/user_creates_new_incident_spec.rb
+- spec/features/incidents/user_views_incident_spec.rb
+- spec/features/issuables/markdown_references/jira_spec.rb
+- spec/features/issuables/shortcuts_issuable_spec.rb
+- spec/features/issuables/user_sees_sidebar_spec.rb
+- spec/features/issues/confidential_notes_spec.rb
+- spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+- spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+- spec/features/issues/csv_spec.rb
+- spec/features/issues/discussion_lock_spec.rb
+- spec/features/issues/form_spec.rb
+- spec/features/issues/issue_detail_spec.rb
+- spec/features/issues/issue_sidebar_spec.rb
+- spec/features/issues/markdown_toolbar_spec.rb
+- spec/features/issues/move_spec.rb
+- spec/features/issues/note_polling_spec.rb
+- spec/features/issues/notes_on_issues_spec.rb
+- spec/features/issues/related_issues_spec.rb
+- spec/features/issues/user_comments_on_issue_spec.rb
+- spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+- spec/features/issues/user_creates_issue_spec.rb
+- spec/features/issues/user_edits_issue_spec.rb
+- spec/features/issues/user_interacts_with_awards_spec.rb
+- spec/features/issues/user_resets_their_incoming_email_token_spec.rb
+- spec/features/issues/user_sorts_issues_spec.rb
+- spec/features/issues/user_toggles_subscription_spec.rb
+- spec/features/issues/user_uses_quick_actions_spec.rb
+- spec/features/jira_connect/branches_spec.rb
+- spec/features/markdown/gitlab_flavored_markdown_spec.rb
+- spec/features/markdown/markdown_spec.rb
+- spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
+- spec/features/merge_request/batch_comments_spec.rb
+- spec/features/merge_request/close_reopen_report_toggle_spec.rb
+- spec/features/merge_request/hide_default_award_emojis_spec.rb
+- spec/features/merge_request/maintainer_edits_fork_spec.rb
+- spec/features/merge_request/merge_request_discussion_lock_spec.rb
+- spec/features/merge_request/user_accepts_merge_request_spec.rb
+- spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
+- spec/features/merge_request/user_approves_spec.rb
+- spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb
+- spec/features/merge_request/user_assigns_themselves_spec.rb
+- spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
+- spec/features/merge_request/user_comments_on_commit_spec.rb
+- spec/features/merge_request/user_comments_on_diff_spec.rb
+- spec/features/merge_request/user_comments_on_merge_request_spec.rb
+- spec/features/merge_request/user_comments_on_whitespace_hidden_diff_spec.rb
+- spec/features/merge_request/user_creates_custom_emoji_spec.rb
+- spec/features/merge_request/user_creates_discussion_on_diff_file_spec.rb
+- spec/features/merge_request/user_creates_merge_request_spec.rb
+- spec/features/merge_request/user_creates_mr_spec.rb
+- spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+- spec/features/merge_request/user_edits_merge_request_spec.rb
+- spec/features/merge_request/user_edits_mr_spec.rb
+- spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb
+- spec/features/merge_request/user_locks_discussion_spec.rb
+- spec/features/merge_request/user_manages_subscription_spec.rb
+- spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
+- spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
+- spec/features/merge_request/user_opens_context_commits_modal_spec.rb
+- spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+- spec/features/merge_request/user_reverts_merge_request_spec.rb
+- spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
+- spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
+- spec/features/merge_request/user_sees_deployment_widget_spec.rb
+- spec/features/merge_request/user_sees_diff_spec.rb
+- spec/features/merge_request/user_sees_merge_request_file_tree_sidebar_spec.rb
+- spec/features/merge_request/user_sees_merge_widget_spec.rb
+- spec/features/merge_request/user_sees_pipelines_spec.rb
+- spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+- spec/features/merge_request/user_squashes_merge_request_spec.rb
+- spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+- spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
+- spec/features/merge_request/user_uses_quick_actions_spec.rb
+- spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
+- spec/features/merge_request/user_views_comment_on_diff_file_spec.rb
+- spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
+- spec/features/merge_request/user_views_open_merge_request_spec.rb
+- spec/features/merge_requests/user_sees_empty_state_spec.rb
+- spec/features/monitor_sidebar_link_spec.rb
+- spec/features/nav/new_nav_for_everyone_callout_spec.rb
+- spec/features/nav/new_nav_invite_members_spec.rb
+- spec/features/nav/pinned_nav_items_spec.rb
+- spec/features/oauth_provider_authorize_spec.rb
+- spec/features/participants_autocomplete_spec.rb
+- spec/features/profile_spec.rb
+- spec/features/profiles/account_spec.rb
+- spec/features/profiles/active_sessions_spec.rb
+- spec/features/profiles/chat_names_spec.rb
+- spec/features/profiles/emails_spec.rb
+- spec/features/profiles/gpg_keys_spec.rb
+- spec/features/profiles/keys_spec.rb
+- spec/features/profiles/list_users_comment_template_spec.rb
+- spec/features/profiles/oauth_applications_spec.rb
+- spec/features/profiles/password_spec.rb
+- spec/features/profiles/personal_access_tokens_spec.rb
+- spec/features/profiles/two_factor_auths_spec.rb
+- spec/features/profiles/user_changes_notified_of_own_activity_spec.rb
+- spec/features/profiles/user_creates_comment_template_spec.rb
+- spec/features/profiles/user_deletes_comment_template_spec.rb
+- spec/features/profiles/user_edit_preferences_spec.rb
+- spec/features/profiles/user_edit_profile_spec.rb
+- spec/features/profiles/user_manages_applications_spec.rb
+- spec/features/profiles/user_manages_emails_spec.rb
+- spec/features/profiles/user_search_settings_spec.rb
+- spec/features/profiles/user_updates_comment_template_spec.rb
+- spec/features/profiles/user_uses_comment_template_spec.rb
+- spec/features/profiles/user_visits_notifications_tab_spec.rb
+- spec/features/profiles/user_visits_profile_authentication_log_spec.rb
+- spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+- spec/features/profiles/user_visits_profile_spec.rb
+- spec/features/projects/active_tabs_spec.rb
+- spec/features/projects/activity/user_sees_private_activity_spec.rb
+- spec/features/projects/blobs/blame_spec.rb
+- spec/features/projects/blobs/blob_show_spec.rb
+- spec/features/projects/blobs/edit_spec.rb
+- spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
+- spec/features/projects/branches/user_creates_branch_spec.rb
+- spec/features/projects/branches_spec.rb
+- spec/features/projects/commit/cherry_pick_spec.rb
+- spec/features/projects/commit/user_reverts_commit_spec.rb
+- spec/features/projects/commits/user_browses_commits_spec.rb
+- spec/features/projects/confluence/user_views_confluence_page_spec.rb
+- spec/features/projects/environments/environment_spec.rb
+- spec/features/projects/environments/environments_spec.rb
+- spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
+- spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
+- spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
+- spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
+- spec/features/projects/features_visibility_spec.rb
+- spec/features/projects/files/editing_a_file_spec.rb
+- spec/features/projects/files/project_owner_creates_license_file_spec.rb
+- spec/features/projects/files/user_browses_files_spec.rb
+- spec/features/projects/files/user_creates_directory_spec.rb
+- spec/features/projects/files/user_creates_files_spec.rb
+- spec/features/projects/files/user_deletes_files_spec.rb
+- spec/features/projects/files/user_edits_files_spec.rb
+- spec/features/projects/files/user_find_file_spec.rb
+- spec/features/projects/files/user_reads_pipeline_status_spec.rb
+- spec/features/projects/files/user_replaces_files_spec.rb
+- spec/features/projects/files/user_searches_for_files_spec.rb
+- spec/features/projects/files/user_uploads_files_spec.rb
+- spec/features/projects/fork_spec.rb
+- spec/features/projects/forks/fork_list_spec.rb
+- spec/features/projects/graph_spec.rb
+- spec/features/projects/import_export/import_file_spec.rb
+- spec/features/projects/issuable_templates_spec.rb
+- spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
+- spec/features/projects/issues/design_management/user_views_design_spec.rb
+- spec/features/projects/issues/email_participants_spec.rb
+- spec/features/projects/jobs/permissions_spec.rb
+- spec/features/projects/jobs/user_browses_job_spec.rb
+- spec/features/projects/jobs/user_triggers_manual_job_with_variables_spec.rb
+- spec/features/projects/jobs_spec.rb
+- spec/features/projects/labels/issues_sorted_by_priority_spec.rb
+- spec/features/projects/labels/update_prioritization_spec.rb
+- spec/features/projects/labels/user_views_labels_spec.rb
+- spec/features/projects/members/group_member_cannot_request_access_to_his_group_project_spec.rb
+- spec/features/projects/members/group_requester_cannot_request_access_to_project_spec.rb
+- spec/features/projects/members/manage_members_spec.rb
+- spec/features/projects/members/member_leaves_project_spec.rb
+- spec/features/projects/members/sorting_spec.rb
+- spec/features/projects/members/tabs_spec.rb
+- spec/features/projects/members/user_requests_access_spec.rb
+- spec/features/projects/merge_request_button_spec.rb
+- spec/features/projects/milestones/milestone_editing_spec.rb
+- spec/features/projects/milestones/milestone_showing_spec.rb
+- spec/features/projects/milestones/milestone_spec.rb
+- spec/features/projects/milestones/milestones_sorting_spec.rb
+- spec/features/projects/milestones/new_spec.rb
+- spec/features/projects/milestones/user_interacts_with_labels_spec.rb
+- spec/features/projects/navbar_spec.rb
+- spec/features/projects/network_graph_spec.rb
+- spec/features/projects/new_project_from_template_spec.rb
+- spec/features/projects/new_project_spec.rb
+- spec/features/projects/pipeline_schedules_spec.rb
+- spec/features/projects/settings/merge_requests_settings_spec.rb
+- spec/features/projects/settings/monitor_settings_spec.rb
+- spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
+- spec/features/projects/settings/registry_settings_spec.rb
+- spec/features/projects/settings/user_archives_project_spec.rb
+- spec/features/projects/settings/user_changes_default_branch_spec.rb
+- spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+- spec/features/projects/settings/user_renames_a_project_spec.rb
+- spec/features/projects/settings/user_searches_in_settings_spec.rb
+- spec/features/projects/settings/user_tags_project_spec.rb
+- spec/features/projects/settings/user_transfers_a_project_spec.rb
+- spec/features/projects/settings/visibility_settings_spec.rb
+- spec/features/projects/show/clone_button_spec.rb
+- spec/features/projects/show/download_buttons_spec.rb
+- spec/features/projects/show/no_password_spec.rb
+- spec/features/projects/show/redirects_spec.rb
+- spec/features/projects/show/rss_spec.rb
+- spec/features/projects/show/user_interacts_with_auto_devops_banner_spec.rb
+- spec/features/projects/show/user_interacts_with_stars_spec.rb
+- spec/features/projects/show/user_sees_collaboration_links_spec.rb
+- spec/features/projects/show/user_sees_git_instructions_spec.rb
+- spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+- spec/features/projects/show/user_uploads_files_spec.rb
+- spec/features/projects/snippets/show_spec.rb
+- spec/features/projects/snippets/user_updates_snippet_spec.rb
+- spec/features/projects/sourcegraph_csp_spec.rb
+- spec/features/projects/tags/user_views_tag_spec.rb
+- spec/features/projects/tags/user_views_tags_spec.rb
+- spec/features/projects/tree/create_directory_spec.rb
+- spec/features/projects/tree/create_file_spec.rb
+- spec/features/projects/tree/rss_spec.rb
+- spec/features/projects/tree/tree_show_spec.rb
+- spec/features/projects/user_creates_project_spec.rb
+- spec/features/projects/user_sees_sidebar_spec.rb
+- spec/features/projects/user_sees_user_popover_spec.rb
+- spec/features/projects/user_uses_shortcuts_spec.rb
+- spec/features/projects/wiki/user_views_wiki_empty_spec.rb
+- spec/features/projects/wikis_spec.rb
+- spec/features/projects/work_items/work_item_spec.rb
+- spec/features/projects_spec.rb
+- spec/features/reportable_note/merge_request_spec.rb
+- spec/features/search/user_searches_for_code_spec.rb
+- spec/features/search/user_searches_for_comments_spec.rb
+- spec/features/search/user_searches_for_commits_spec.rb
+- spec/features/search/user_searches_for_issues_spec.rb
+- spec/features/search/user_searches_for_merge_requests_spec.rb
+- spec/features/search/user_searches_for_milestones_spec.rb
+- spec/features/search/user_searches_for_wiki_pages_spec.rb
+- spec/features/search/user_uses_header_search_field_spec.rb
+- spec/features/search/user_uses_search_filters_spec.rb
+- spec/features/security/dashboard_access_spec.rb
+- spec/features/security/profile_access_spec.rb
+- spec/features/security/project/internal_access_spec.rb
+- spec/features/security/project/private_access_spec.rb
+- spec/features/security/project/public_access_spec.rb
+- spec/features/security/project/snippet/internal_access_spec.rb
+- spec/features/security/project/snippet/private_access_spec.rb
+- spec/features/security/project/snippet/public_access_spec.rb
+- spec/features/snippets/notes_on_personal_snippets_spec.rb
+- spec/features/tags/developer_views_tags_spec.rb
+- spec/features/task_lists_spec.rb
+- spec/features/unsubscribe_links_spec.rb
+- spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+- spec/features/uploads/user_uploads_file_to_note_spec.rb
+- spec/features/user_opens_link_to_comment_spec.rb
+- spec/features/user_sees_active_nav_items_spec.rb
+- spec/features/user_settings/active_sessions_spec.rb
+- spec/features/user_settings/password_spec.rb
+- spec/features/user_settings/personal_access_tokens_spec.rb
+- spec/features/users/login_spec.rb
+- spec/features/users/overview_spec.rb
+- spec/features/users/rss_spec.rb
+- spec/features/users/show_spec.rb
+- spec/features/users/signup_spec.rb
+- spec/features/users/snippets_spec.rb
+- spec/features/users/user_browses_projects_on_user_page_spec.rb
+- spec/features/webauthn_spec.rb
+- spec/features/whats_new_spec.rb
+- spec/finders/admin/projects_finder_spec.rb
+- spec/finders/autocomplete/move_to_project_finder_spec.rb
+- spec/finders/autocomplete/routes_finder_spec.rb
+- spec/finders/events_finder_spec.rb
+- spec/finders/fork_targets_finder_spec.rb
+- spec/finders/issues_finder_spec.rb
+- spec/finders/members_finder_spec.rb
+- spec/finders/notes_finder_spec.rb
+- spec/finders/packages/npm/package_finder_spec.rb
+- spec/finders/personal_projects_finder_spec.rb
+- spec/finders/projects/topics_finder_spec.rb
+- spec/finders/work_items/work_items_finder_spec.rb
+- spec/frontend/fixtures/autocomplete_sources.rb
+- spec/frontend/fixtures/issues.rb
+- spec/frontend/fixtures/namespaces.rb
+- spec/frontend/fixtures/pipeline_details.rb
+- spec/frontend/fixtures/snippet.rb
+- spec/frontend/fixtures/users.rb
+- spec/frontend/fixtures/webauthn.rb
+- spec/graphql/mutations/users/set_namespace_commit_email_spec.rb
+- spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+- spec/graphql/resolvers/board_lists_resolver_spec.rb
+- spec/graphql/resolvers/board_resolver_spec.rb
+- spec/graphql/resolvers/boards_resolver_spec.rb
+- spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+- spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
+- spec/graphql/resolvers/recent_boards_resolver_spec.rb
+- spec/graphql/types/project_type_spec.rb
+- spec/helpers/avatars_helper_spec.rb
+- spec/helpers/award_emoji_helper_spec.rb
+- spec/helpers/blob_helper_spec.rb
+- spec/helpers/boards_helper_spec.rb
+- spec/helpers/events_helper_spec.rb
+- spec/helpers/gitlab_routing_helper_spec.rb
+- spec/helpers/markup_helper_spec.rb
+- spec/helpers/notes_helper_spec.rb
+- spec/helpers/projects_helper_spec.rb
+- spec/helpers/search_helper_spec.rb
+- spec/helpers/submodule_helper_spec.rb
+- spec/helpers/tree_helper_spec.rb
+- spec/lib/banzai/filter/references/user_reference_filter_spec.rb
+- spec/lib/banzai/filter/truncate_visible_filter_spec.rb
+- spec/lib/banzai/reference_parser/project_parser_spec.rb
+- spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+- spec/lib/constraints/user_url_constrainer_spec.rb
+- spec/lib/feature_spec.rb
+- spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb
+- spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
+- spec/lib/gitlab/background_migration/backfill_environment_tiers_spec.rb
+- spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb
+- spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+- spec/lib/gitlab/background_migration/backfill_user_details_fields_spec.rb
+- spec/lib/gitlab/background_migration/job_coordinator_spec.rb
+- spec/lib/gitlab/checks/container_moved_spec.rb
+- spec/lib/gitlab/checks/project_created_spec.rb
+- spec/lib/gitlab/ci/trace/archive_spec.rb
+- spec/lib/gitlab/database/decomposition/migrate_spec.rb
+- spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
+- spec/lib/gitlab/database/load_balancing_spec.rb
+- spec/lib/gitlab/database/lock_writes_manager_spec.rb
+- spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
+- spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+- spec/lib/gitlab/database/migration_helpers_spec.rb
+- spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb
+- spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+- spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb
+- spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb
+- spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+- spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+- spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+- spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+- spec/lib/gitlab/database/reflection_spec.rb
+- spec/lib/gitlab/database/transaction/observer_spec.rb
+- spec/lib/gitlab/database/with_lock_retries_spec.rb
+- spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+- spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
+- spec/lib/gitlab/git_access_project_spec.rb
+- spec/lib/gitlab/git_access_spec.rb
+- spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb
+- spec/lib/gitlab/import_export/importer_spec.rb
+- spec/lib/gitlab/import_export/project/export_task_spec.rb
+- spec/lib/gitlab/import_export/project/import_task_spec.rb
+- spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+- spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
+- spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
+- spec/lib/gitlab/issuable_metadata_spec.rb
+- spec/lib/gitlab/legacy_github_import/project_creator_spec.rb
+- spec/lib/gitlab/noteable_metadata_spec.rb
+- spec/lib/gitlab/patch/redis_cache_store_spec.rb
+- spec/lib/gitlab/project_search_results_spec.rb
+- spec/lib/gitlab/project_template_spec.rb
+- spec/lib/gitlab/reference_extractor_spec.rb
+- spec/lib/gitlab/relative_positioning/mover_spec.rb
+- spec/lib/gitlab/sample_data_template_spec.rb
+- spec/lib/gitlab/themes_spec.rb
+- spec/lib/gitlab/utils/username_and_email_generator_spec.rb
+- spec/mailers/emails/service_desk_spec.rb
+- spec/mailers/notify_spec.rb
+- spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
+- spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
+- spec/migrations/20221018062308_schedule_backfill_project_namespace_details_spec.rb
+- spec/migrations/20221018095434_schedule_disable_legacy_open_source_license_for_projects_less_than_five_mb_spec.rb
+- spec/migrations/20221018193635_ensure_task_note_renaming_background_migration_finished_spec.rb
+- spec/migrations/20221021145820_create_routing_table_for_builds_metadata_v2_spec.rb
+- spec/migrations/20221025043930_change_default_value_on_password_last_changed_at_to_user_details_spec.rb
+- spec/migrations/20221028022627_add_index_on_password_last_changed_at_to_user_details_spec.rb
+- spec/migrations/20221101032521_add_default_preferred_language_to_application_settings_spec.rb
+- spec/migrations/20221101032600_add_text_limit_to_default_preferred_language_on_application_settings_spec.rb
+- spec/migrations/20221102090943_create_second_partition_for_builds_metadata_spec.rb
+- spec/migrations/20221102231130_finalize_backfill_user_details_fields_spec.rb
+- spec/migrations/20221104115712_backfill_project_statistics_storage_size_without_uploads_size_spec.rb
+- spec/migrations/20221110152133_delete_orphans_approval_rules_spec.rb
+- spec/migrations/20221115173607_ensure_work_item_type_backfill_migration_finished_spec.rb
+- spec/migrations/20221122132812_schedule_prune_stale_project_export_jobs_spec.rb
+- spec/migrations/20221123133054_queue_reset_status_on_container_repositories_spec.rb
+- spec/migrations/20221209110934_update_import_sources_on_application_settings_spec.rb
+- spec/migrations/20221209110935_fix_update_import_sources_on_application_settings_spec.rb
+- spec/migrations/20221209235940_cleanup_o_auth_access_tokens_with_null_expires_in_spec.rb
+- spec/migrations/20221210154044_update_active_billable_users_index_spec.rb
+- spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb
+- spec/migrations/20221219122320_copy_clickhouse_connection_string_to_encrypted_var_spec.rb
+- spec/migrations/20221220131020_bump_default_partition_id_value_for_ci_tables_spec.rb
+- spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb
+- spec/migrations/20221223123019_delete_queued_jobs_for_vulnerabilities_feedback_migration_spec.rb
+- spec/migrations/20221226153252_queue_fix_incoherent_packages_size_on_project_statistics_spec.rb
+- spec/migrations/20230116111252_finalize_todo_sanitization_spec.rb
+- spec/migrations/20230117114739_clear_duplicate_jobs_cookies_spec.rb
+- spec/migrations/20230118144623_schedule_migration_for_remediation_spec.rb
+- spec/migrations/20230125195503_queue_backfill_compliance_violations_spec.rb
+- spec/migrations/20230130182412_schedule_create_vulnerability_links_migration_spec.rb
+- spec/migrations/20230131125844_add_project_id_name_id_version_index_to_installable_npm_packages_spec.rb
+- spec/migrations/20230201171450_finalize_backfill_environment_tier_migration_spec.rb
+- spec/migrations/20230208125736_schedule_migration_for_links_spec.rb
+- spec/migrations/20230209222452_schedule_remove_project_group_link_with_missing_groups_spec.rb
+- spec/migrations/20230220102212_swap_columns_ci_build_needs_big_int_conversion_spec.rb
+- spec/migrations/20230221093533_add_tmp_partial_index_on_vulnerability_report_types_spec.rb
+- spec/migrations/20230223065753_finalize_nullify_creator_id_of_orphaned_projects_spec.rb
+- spec/migrations/20230224085743_update_issues_internal_id_scope_spec.rb
+- spec/migrations/20230224144233_migrate_evidences_from_raw_metadata_spec.rb
+- spec/migrations/20230228142350_add_notifications_work_item_widget_spec.rb
+- spec/migrations/20230302811133_re_migrate_redis_slot_keys_spec.rb
+- spec/migrations/20230309071242_delete_security_policy_bot_users_spec.rb
+- spec/migrations/20230313142631_backfill_ml_candidates_package_id_spec.rb
+- spec/migrations/20230313150531_reschedule_migration_for_remediation_spec.rb
+- spec/migrations/20230317004428_migrate_daily_redis_hll_events_to_weekly_aggregation_spec.rb
+- spec/migrations/20230317162059_add_current_user_todos_work_item_widget_spec.rb
+- spec/migrations/20230321170823_backfill_ml_candidates_internal_id_spec.rb
+- spec/migrations/20230322085041_remove_user_namespace_records_from_vsa_aggregation_spec.rb
+- spec/migrations/20230327123333_backfill_product_analytics_data_collector_host_spec.rb
+- spec/migrations/20230328030101_add_secureflag_training_provider_spec.rb
+- spec/migrations/20230328100534_truncate_error_tracking_tables_spec.rb
+- spec/migrations/20230412141541_reschedule_links_avoiding_duplication_spec.rb
+- spec/migrations/20230412185837_queue_populate_vulnerability_dismissal_fields_spec.rb
+- spec/migrations/20230419105225_remove_phabricator_from_application_settings_spec.rb
+- spec/migrations/20230426085615_queue_backfill_resource_link_events_spec.rb
+- spec/migrations/20230426102200_fix_import_sources_on_application_settings_after_phabricator_removal_spec.rb
+- spec/migrations/20230428085332_remove_shimo_zentao_integration_records_spec.rb
+- spec/migrations/20230515153600_finalize_back_fill_prepared_at_merge_requests_spec.rb
+- spec/migrations/20230522220709_ensure_incident_work_item_type_backfill_is_finished_spec.rb
+- spec/migrations/add_namespaces_emails_enabled_column_data_spec.rb
+- spec/migrations/add_okr_hierarchy_restrictions_spec.rb
+- spec/migrations/add_projects_emails_enabled_column_data_spec.rb
+- spec/migrations/cleanup_bigint_conversion_for_merge_request_metrics_for_self_hosts_spec.rb
+- spec/migrations/cleanup_vulnerability_state_transitions_with_same_from_state_to_state_spec.rb
+- spec/migrations/delete_migrate_shared_vulnerability_scanners_spec.rb
+- spec/migrations/drop_packages_events_table_spec.rb
+- spec/migrations/ensure_commit_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_design_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_epic_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_issue_user_mentions_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_merge_request_metrics_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_mr_user_mentions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_suggestions_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_timelogs_note_id_bigint_backfill_is_finished_for_gitlab_dot_com_spec.rb
+- spec/migrations/ensure_unique_debian_packages_spec.rb
+- spec/migrations/ensure_vum_bigint_backfill_is_finished_for_gl_dot_com_spec.rb
+- spec/migrations/finalize_invalid_member_cleanup_spec.rb
+- spec/migrations/finalize_issues_iid_scoping_to_namespace_spec.rb
+- spec/migrations/finalize_issues_namespace_id_backfilling_spec.rb
+- spec/migrations/insert_daily_invites_trial_plan_limits_spec.rb
+- spec/migrations/nullify_last_error_from_project_mirror_data_spec.rb
+- spec/migrations/queue_backfill_prepared_at_data_spec.rb
+- spec/migrations/queue_backfill_user_details_fields_spec.rb
+- spec/migrations/queue_populate_projects_star_count_spec.rb
+- spec/migrations/recount_epic_cache_counts_spec.rb
+- spec/migrations/recount_epic_cache_counts_v3_spec.rb
+- spec/migrations/remove_flowdock_integration_records_spec.rb
+- spec/migrations/requeue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb
+- spec/migrations/reschedule_incident_work_item_type_id_backfill_spec.rb
+- spec/migrations/reschedule_migrate_shared_vulnerability_scanners_spec.rb
+- spec/migrations/schedule_fixing_security_scan_statuses_spec.rb
+- spec/migrations/second_recount_epic_cache_counts_spec.rb
+- spec/migrations/set_email_confirmation_setting_before_removing_send_user_confirmation_email_column_spec.rb
+- spec/migrations/set_email_confirmation_setting_from_send_user_confirmation_email_setting_spec.rb
+- spec/migrations/set_email_confirmation_setting_from_soft_email_confirmation_ff_spec.rb
+- spec/migrations/swap_epic_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb
+- spec/migrations/swap_merge_request_user_mentions_note_id_to_bigint_2_spec.rb
+- spec/migrations/swap_note_diff_files_note_id_to_bigint_for_gitlab_dot_com_spec.rb
+- spec/migrations/swap_sent_notifications_id_columns_spec.rb
+- spec/migrations/swap_snippet_user_mentions_note_id_to_bigint_for_gitlab_dot_com_spec.rb
+- spec/migrations/swap_suggestions_note_id_to_bigint_for_gitlab_dot_com_spec.rb
+- spec/migrations/swap_timelogs_note_id_to_bigint_for_gitlab_dot_com_spec.rb
+- spec/models/abuse_report_spec.rb
+- spec/models/analytics/cycle_analytics/aggregation_spec.rb
+- spec/models/analytics/cycle_analytics/issue_stage_event_spec.rb
+- spec/models/analytics/cycle_analytics/merge_request_stage_event_spec.rb
+- spec/models/application_record_spec.rb
+- spec/models/concerns/mentionable_spec.rb
+- spec/models/concerns/pg_full_text_searchable_spec.rb
+- spec/models/concerns/reset_on_column_errors_spec.rb
+- spec/models/concerns/routable_spec.rb
+- spec/models/container_repository_spec.rb
+- spec/models/deploy_key_spec.rb
+- spec/models/design_management/design_spec.rb
+- spec/models/event_spec.rb
+- spec/models/hooks/system_hook_spec.rb
+- spec/models/issue_spec.rb
+- spec/models/member_spec.rb
+- spec/models/namespace/root_storage_statistics_spec.rb
+- spec/models/namespace_setting_spec.rb
+- spec/models/namespace_spec.rb
+- spec/models/namespace_statistics_spec.rb
+- spec/models/note_spec.rb
+- spec/models/notification_recipient_spec.rb
+- spec/models/project_authorization_spec.rb
+- spec/models/project_spec.rb
+- spec/models/project_team_spec.rb
+- spec/models/project_wiki_spec.rb
+- spec/models/review_spec.rb
+- spec/models/snippet_spec.rb
+- spec/models/user_spec.rb
+- spec/policies/ci/build_policy_spec.rb
+- spec/policies/design_management/design_policy_spec.rb
+- spec/policies/global_policy_spec.rb
+- spec/policies/namespace/root_storage_statistics_policy_spec.rb
+- spec/policies/note_policy_spec.rb
+- spec/policies/packages/policies/project_policy_spec.rb
+- spec/policies/project_policy_spec.rb
+- spec/presenters/merge_request_presenter_spec.rb
+- spec/requests/admin/users_controller_spec.rb
+- spec/requests/api/access_requests_spec.rb
+- spec/requests/api/alert_management_alerts_spec.rb
+- spec/requests/api/boards_spec.rb
+- spec/requests/api/clusters/agent_tokens_spec.rb
+- spec/requests/api/clusters/agents_spec.rb
+- spec/requests/api/commits_spec.rb
+- spec/requests/api/discussions_spec.rb
+- spec/requests/api/doorkeeper_access_spec.rb
+- spec/requests/api/environments_spec.rb
+- spec/requests/api/events_spec.rb
+- spec/requests/api/files_spec.rb
+- spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
+- spec/requests/api/graphql/ci/config_spec.rb
+- spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
+- spec/requests/api/graphql/current_user_query_spec.rb
+- spec/requests/api/graphql/merge_request/merge_request_spec.rb
+- spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+- spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+- spec/requests/api/graphql/namespace/projects_spec.rb
+- spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
+- spec/requests/api/graphql/project/fork_targets_spec.rb
+- spec/requests/api/graphql/project/merge_request_spec.rb
+- spec/requests/api/graphql/project/merge_requests_spec.rb
+- spec/requests/api/graphql/project/project_members_spec.rb
+- spec/requests/api/graphql/project/recent_issue_boards_query_spec.rb
+- spec/requests/api/graphql/users/set_namespace_commit_email_spec.rb
+- spec/requests/api/groups_spec.rb
+- spec/requests/api/import_bitbucket_server_spec.rb
+- spec/requests/api/import_github_spec.rb
+- spec/requests/api/integrations_spec.rb
+- spec/requests/api/internal/base_spec.rb
+- spec/requests/api/invitations_spec.rb
+- spec/requests/api/issues/get_project_issues_spec.rb
+- spec/requests/api/issues/issues_spec.rb
+- spec/requests/api/issues/post_projects_issues_spec.rb
+- spec/requests/api/issues/put_projects_issues_spec.rb
+- spec/requests/api/keys_spec.rb
+- spec/requests/api/labels_spec.rb
+- spec/requests/api/maven_packages_spec.rb
+- spec/requests/api/merge_request_approvals_spec.rb
+- spec/requests/api/merge_requests_spec.rb
+- spec/requests/api/metrics/dashboard/annotations_spec.rb
+- spec/requests/api/metrics/user_starred_dashboards_spec.rb
+- spec/requests/api/namespaces_spec.rb
+- spec/requests/api/notes_spec.rb
+- spec/requests/api/project_events_spec.rb
+- spec/requests/api/project_hooks_spec.rb
+- spec/requests/api/project_import_spec.rb
+- spec/requests/api/project_milestones_spec.rb
+- spec/requests/api/project_packages_spec.rb
+- spec/requests/api/projects_spec.rb
+- spec/requests/api/protected_tags_spec.rb
+- spec/requests/api/resource_access_tokens_spec.rb
+- spec/requests/api/resource_label_events_spec.rb
+- spec/requests/api/resource_milestone_events_spec.rb
+- spec/requests/api/resource_state_events_spec.rb
+- spec/requests/api/submodules_spec.rb
+- spec/requests/api/task_completion_status_spec.rb
+- spec/requests/api/users_preferences_spec.rb
+- spec/requests/api/users_spec.rb
+- spec/requests/git_http_spec.rb
+- spec/requests/ide_controller_spec.rb
+- spec/requests/lfs_http_spec.rb
+- spec/requests/oauth/applications_controller_spec.rb
+- spec/requests/oauth/authorizations_controller_spec.rb
+- spec/requests/profiles/comment_templates_controller_spec.rb
+- spec/requests/profiles/notifications_controller_spec.rb
+- spec/requests/projects/harbor/repositories_controller_spec.rb
+- spec/requests/projects/settings/packages_and_registries_controller_spec.rb
+- spec/requests/projects/tags_controller_spec.rb
+- spec/requests/projects/wikis_controller_spec.rb
+- spec/requests/recursive_webhook_detection_spec.rb
+- spec/requests/search_controller_spec.rb
+- spec/requests/user_settings_spec.rb
+- spec/requests/users_controller_spec.rb
+- spec/requests/warden_spec.rb
+- spec/serializers/admin/abuse_report_details_entity_spec.rb
+- spec/serializers/ci/pipeline_entity_spec.rb
+- spec/serializers/diff_file_base_entity_spec.rb
+- spec/serializers/merge_request_current_user_entity_spec.rb
+- spec/services/admin/plan_limits/update_service_spec.rb
+- spec/services/admin/set_feature_flag_service_spec.rb
+- spec/services/auth/container_registry_authentication_service_spec.rb
+- spec/services/award_emojis/add_service_spec.rb
+- spec/services/ci/abort_pipelines_service_spec.rb
+- spec/services/draft_notes/publish_service_spec.rb
+- spec/services/environments/schedule_to_delete_review_apps_service_spec.rb
+- spec/services/import/bitbucket_server_service_spec.rb
+- spec/services/import/fogbugz_service_spec.rb
+- spec/services/import/github_service_spec.rb
+- spec/services/import/gitlab_projects/create_project_service_spec.rb
+- spec/services/issuable/bulk_update_service_spec.rb
+- spec/services/issues/create_service_spec.rb
+- spec/services/issues/update_service_spec.rb
+- spec/services/merge_requests/build_service_spec.rb
+- spec/services/merge_requests/create_service_spec.rb
+- spec/services/merge_requests/push_options_handler_service_spec.rb
+- spec/services/merge_requests/update_service_spec.rb
+- spec/services/milestones/promote_service_spec.rb
+- spec/services/milestones/transfer_service_spec.rb
+- spec/services/namespace_settings/update_service_spec.rb
+- spec/services/notes/build_service_spec.rb
+- spec/services/notes/copy_service_spec.rb
+- spec/services/notes/create_service_spec.rb
+- spec/services/notes/update_service_spec.rb
+- spec/services/notification_service_spec.rb
+- spec/services/packages/npm/create_package_service_spec.rb
+- spec/services/post_receive_service_spec.rb
+- spec/services/preview_markdown_service_spec.rb
+- spec/services/projects/create_from_template_service_spec.rb
+- spec/services/projects/create_service_spec.rb
+- spec/services/projects/destroy_service_spec.rb
+- spec/services/projects/download_service_spec.rb
+- spec/services/projects/fork_service_spec.rb
+- spec/services/projects/move_access_service_spec.rb
+- spec/services/projects/move_deploy_keys_projects_service_spec.rb
+- spec/services/projects/move_forks_service_spec.rb
+- spec/services/projects/move_lfs_objects_projects_service_spec.rb
+- spec/services/projects/move_notification_settings_service_spec.rb
+- spec/services/projects/move_project_authorizations_service_spec.rb
+- spec/services/projects/move_project_group_links_service_spec.rb
+- spec/services/projects/move_project_members_service_spec.rb
+- spec/services/projects/move_users_star_projects_service_spec.rb
+- spec/services/projects/overwrite_project_service_spec.rb
+- spec/services/projects/participants_service_spec.rb
+- spec/services/projects/transfer_service_spec.rb
+- spec/services/projects/update_service_spec.rb
+- spec/services/repositories/destroy_service_spec.rb
+- spec/services/resource_access_tokens/create_service_spec.rb
+- spec/services/snippets/create_service_spec.rb
+- spec/services/todo_service_spec.rb
+- spec/services/upload_service_spec.rb
+- spec/services/users/destroy_service_spec.rb
+- spec/services/users/migrate_records_to_ghost_user_service_spec.rb
+- spec/services/users/set_namespace_commit_email_service_spec.rb
+- spec/services/users/update_service_spec.rb
+- spec/services/work_items/update_service_spec.rb
+- spec/tasks/gitlab/backup_rake_spec.rb
+- spec/tasks/gitlab/check_rake_spec.rb
+- spec/tasks/import_rake_spec.rb
+- spec/views/layouts/header/_new_dropdown.haml_spec.rb
+- spec/views/layouts/profile.html.haml_spec.rb
+- spec/views/profiles/preferences/show.html.haml_spec.rb
+- spec/workers/new_issue_worker_spec.rb
+- spec/workers/new_merge_request_worker_spec.rb
diff --git a/spec/support/matchers/have_user.rb b/spec/support/matchers/have_user.rb
new file mode 100644
index 00000000000..64fc84a75cf
--- /dev/null
+++ b/spec/support/matchers/have_user.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define :have_user do |user|
+ match do |resource|
+ raise ArgumentError, 'Unknown resource type' unless resource.is_a?(Group) || resource.is_a?(Project)
+
+ expect(resource.has_user?(user)).to be_truthy
+ end
+
+ failure_message do |group|
+ "Expected #{group} to have the user #{user} among its members"
+ end
+end
diff --git a/spec/support/migration.rb b/spec/support/migration.rb
index fc8a4bb12fb..dadcbb1941e 100644
--- a/spec/support/migration.rb
+++ b/spec/support/migration.rb
@@ -17,7 +17,7 @@ RSpec.configure do |config|
end
config.after(:context, :migration) do
- Gitlab::CurrentSettings.clear_in_memory_application_settings!
+ Gitlab::ApplicationSettingFetcher.clear_in_memory_application_settings!
end
config.append_after(:context, :migration) do
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 81196fdcbfa..7256fdb4018 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -94,7 +94,6 @@
- './ee/spec/controllers/groups/security/merge_commit_reports_controller_spec.rb'
- './ee/spec/controllers/groups/security/policies_controller_spec.rb'
- './ee/spec/controllers/groups/security/vulnerabilities_controller_spec.rb'
-- './ee/spec/controllers/groups/sso_controller_spec.rb'
- './ee/spec/controllers/groups/todos_controller_spec.rb'
- './ee/spec/controllers/groups/wikis_controller_spec.rb'
- './ee/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb'
@@ -284,7 +283,6 @@
- './ee/spec/features/groups/settings/user_configures_insights_spec.rb'
- './ee/spec/features/groups/settings/user_searches_in_settings_spec.rb'
- './ee/spec/features/groups_spec.rb'
-- './ee/spec/features/groups/sso_spec.rb'
- './ee/spec/features/groups/wikis_spec.rb'
- './ee/spec/features/groups/wiki/user_views_wiki_empty_spec.rb'
- './ee/spec/features/ide/user_opens_ide_spec.rb'
@@ -918,7 +916,6 @@
- './ee/spec/helpers/gitlab_subscriptions/upcoming_reconciliation_helper_spec.rb'
- './ee/spec/helpers/groups/ldap_sync_helper_spec.rb'
- './ee/spec/helpers/groups/security_features_helper_spec.rb'
-- './ee/spec/helpers/groups/sso_helper_spec.rb'
- './ee/spec/helpers/kerberos_helper_spec.rb'
- './ee/spec/helpers/license_helper_spec.rb'
- './ee/spec/helpers/license_monitoring_helper_spec.rb'
@@ -1691,7 +1688,7 @@
- './ee/spec/models/gitlab_subscription_spec.rb'
- './ee/spec/models/gitlab_subscriptions/upcoming_reconciliation_spec.rb'
- './ee/spec/models/group_deletion_schedule_spec.rb'
-- './ee/spec/models/group_member_spec.rb'
+- './ee/spec/models/ee/group_member_spec.rb'
- './ee/spec/models/group_merge_request_approval_setting_spec.rb'
- './ee/spec/models/groups/repository_storage_move_spec.rb'
- './ee/spec/models/group_wiki_repository_spec.rb'
@@ -1715,7 +1712,7 @@
- './ee/spec/models/label_note_spec.rb'
- './ee/spec/models/ldap_group_link_spec.rb'
- './ee/spec/models/license_spec.rb'
-- './ee/spec/models/member_spec.rb'
+- './ee/spec/models/ee/member_spec.rb'
- './ee/spec/models/merge_request/blocking_spec.rb'
- './ee/spec/models/merge_request_block_spec.rb'
- './ee/spec/models/merge_requests/compliance_violation_spec.rb'
@@ -1740,7 +1737,7 @@
- './ee/spec/models/project_feature_spec.rb'
- './ee/spec/models/project_import_data_spec.rb'
- './ee/spec/models/project_import_state_spec.rb'
-- './ee/spec/models/project_member_spec.rb'
+- './ee/spec/models/ee/project_member_spec.rb'
- './ee/spec/models/project_repository_state_spec.rb'
- './ee/spec/models/project_security_setting_spec.rb'
- './ee/spec/models/protected_branch/required_code_owners_section_spec.rb'
@@ -2619,14 +2616,11 @@
- './ee/spec/services/gitlab_subscriptions/preview_billable_user_change_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/reconciliations/calculate_seat_count_data_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/reconciliations/check_seat_usage_alerts_eligibility_service_spec.rb'
-- './ee/spec/services/group_saml/group_managed_accounts/clean_up_members_service_spec.rb'
-- './ee/spec/services/group_saml/group_managed_accounts/transfer_membership_service_spec.rb'
- './ee/spec/services/group_saml/identity/destroy_service_spec.rb'
- './ee/spec/services/group_saml/saml_group_links/create_service_spec.rb'
- './ee/spec/services/group_saml/saml_group_links/destroy_service_spec.rb'
- './ee/spec/services/group_saml/saml_provider/create_service_spec.rb'
- './ee/spec/services/group_saml/saml_provider/update_service_spec.rb'
-- './ee/spec/services/group_saml/sign_up_service_spec.rb'
- './ee/spec/services/groups/create_service_spec.rb'
- './ee/spec/services/groups/destroy_service_spec.rb'
- './ee/spec/services/groups/epics_count_service_spec.rb'
@@ -3338,7 +3332,6 @@
- './spec/db/schema_spec.rb'
- './spec/dependencies/omniauth_saml_spec.rb'
- './spec/experiments/application_experiment_spec.rb'
-- './spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb'
- './spec/features/abuse_report_spec.rb'
- './spec/features/action_cable_logging_spec.rb'
- './spec/features/admin/admin_abuse_reports_spec.rb'
@@ -6116,7 +6109,6 @@
- './spec/lib/gitlab/github_import/markdown_text_spec.rb'
- './spec/lib/gitlab/github_import/milestone_finder_spec.rb'
- './spec/lib/gitlab/github_import/object_counter_spec.rb'
-- './spec/lib/gitlab/github_import/page_counter_spec.rb'
- './spec/lib/gitlab/github_import/parallel_importer_spec.rb'
- './spec/lib/gitlab/github_import/parallel_scheduling_spec.rb'
- './spec/lib/gitlab/github_import/representation/diff_note_spec.rb'
@@ -6244,6 +6236,7 @@
- './spec/lib/gitlab/i18n/translation_entry_spec.rb'
- './spec/lib/gitlab/identifier_spec.rb'
- './spec/lib/gitlab/import/database_helpers_spec.rb'
+- './spec/lib/gitlab/import/page_counter_spec.rb'
- './spec/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy_spec.rb'
- './spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb'
- './spec/lib/gitlab/import_export/after_export_strategy_builder_spec.rb'
@@ -7116,7 +7109,6 @@
- './spec/models/concerns/counter_attribute_spec.rb'
- './spec/models/concerns/cron_schedulable_spec.rb'
- './spec/models/concerns/cross_database_modification_spec.rb'
-- './spec/models/concerns/database_event_tracking_spec.rb'
- './spec/models/concerns/database_reflection_spec.rb'
- './spec/models/concerns/delete_with_limit_spec.rb'
- './spec/models/concerns/deployment_platform_spec.rb'
@@ -8117,7 +8109,6 @@
- './spec/requests/api/tags_spec.rb'
- './spec/requests/api/task_completion_status_spec.rb'
- './spec/requests/api/templates_spec.rb'
-- './spec/requests/api/terraform/modules/v1/packages_spec.rb'
- './spec/requests/api/terraform/state_spec.rb'
- './spec/requests/api/terraform/state_version_spec.rb'
- './spec/requests/api/todos_spec.rb'
@@ -9469,7 +9460,6 @@
- './spec/workers/concerns/cronjob_queue_spec.rb'
- './spec/workers/concerns/gitlab/github_import/object_importer_spec.rb'
- './spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb'
-- './spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb'
- './spec/workers/concerns/limited_capacity/job_tracker_spec.rb'
- './spec/workers/concerns/limited_capacity/worker_spec.rb'
- './spec/workers/concerns/packages/cleanup_artifact_worker_spec.rb'
diff --git a/spec/support/rspec_run_time.rb b/spec/support/rspec_run_time.rb
index 977d4885624..2cfa4008fd3 100644
--- a/spec/support/rspec_run_time.rb
+++ b/spec/support/rspec_run_time.rb
@@ -87,7 +87,8 @@ module Support
unless @last_elapsed_seconds.nil? || elapsed_seconds - @last_elapsed_seconds < 1
output.puts \
"# [RSpecRunTime] RSpec elapsed time: #{readable_duration(elapsed_seconds)}. " \
- "#{current_rss_in_megabytes}\n\n"
+ "#{current_rss_in_megabytes}. " \
+ "#{load_average}\n\n"
end
@last_elapsed_seconds = elapsed_seconds
@@ -98,6 +99,14 @@ module Support
"Current RSS: ~#{rss_in_megabytes.round}M"
end
+
+ def load_average
+ if File.exist?('/proc/loadavg')
+ "load average: #{File.read('/proc/loadavg')}"
+ else
+ `uptime`[/(load average:[^\n]+)/, 1] || '(uptime failed)'
+ end
+ end
end
end
end
diff --git a/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb b/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
index 3eeaa52d221..a451608a5cc 100644
--- a/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
+++ b/spec/support/shared_contexts/ci/catalog/resources/version_shared_context.rb
@@ -1,33 +1,42 @@
# frozen_string_literal: true
+# This context generates two catalog resources, each with two releases/versions.
+# `resource1` has versions `v1.0` and `v1.1`, with releases that have real commit shas.
+# `resource2` has versions `v2.0` and `v2.1`.
RSpec.shared_context 'when there are catalog resources with versions' do
let_it_be(:current_user) { create(:user) }
- let_it_be(:project1) { create(:project, :repository, name: 'A') }
- let_it_be(:project2) { create(:project, :repository, name: 'Z') }
- let_it_be(:project3) { create(:project, :repository, name: 'L', description: 'Z') }
+ let_it_be(:project1) { create(:project, :custom_repo, files: { 'README.md' => 'Readme v1.0' }) }
+ let_it_be(:project2) { create(:project, :repository) }
+
let_it_be_with_reload(:resource1) { create(:ci_catalog_resource, project: project1) }
let_it_be_with_reload(:resource2) { create(:ci_catalog_resource, project: project2) }
- let_it_be(:resource3) { create(:ci_catalog_resource, project: project3) }
- let_it_be_with_reload(:release_v1_0) { create(:release, project: project1, tag: 'v1.0', released_at: 4.days.ago) }
- let_it_be(:release_v1_1) { create(:release, project: project1, tag: 'v1.1', released_at: 3.days.ago) }
- let_it_be(:release_v2_0) { create(:release, project: project2, tag: 'v2.0', released_at: 2.days.ago) }
- let_it_be(:release_v2_1) { create(:release, project: project2, tag: 'v2.1', released_at: 1.day.ago) }
+ let(:v1_0) { resource1.versions.by_name('v1.0').first }
+ let(:v1_1) { resource1.versions.by_name('v1.1').first }
+ let(:v2_0) { resource2.versions.by_name('v2.0').first }
+ let(:v2_1) { resource2.versions.by_name('v2.1').first }
- let_it_be(:v1_0) do
- create(:ci_catalog_resource_version, catalog_resource: resource1, release: release_v1_0, created_at: 1.day.ago)
- end
+ before_all do
+ project1.repository.create_branch('branch_v1.1', project1.default_branch)
- let_it_be(:v1_1) do
- create(:ci_catalog_resource_version, catalog_resource: resource1, release: release_v1_1, created_at: 2.days.ago)
- end
+ project1.repository.update_file(
+ current_user, 'README.md', 'Readme v1.1', message: 'Update readme', branch_name: 'branch_v1.1')
- let_it_be(:v2_0) do
- create(:ci_catalog_resource_version, catalog_resource: resource2, release: release_v2_0, created_at: 3.days.ago)
- end
+ tag_v1_0 = project1.repository.add_tag(current_user, 'v1.0', project1.default_branch)
+ tag_v1_1 = project1.repository.add_tag(current_user, 'v1.1', 'branch_v1.1')
+
+ release_v1_0 = create(:release, project: project1, tag: 'v1.0', released_at: 4.days.ago,
+ sha: tag_v1_0.dereferenced_target.sha)
+ release_v1_1 = create(:release, project: project1, tag: 'v1.1', released_at: 3.days.ago,
+ sha: tag_v1_1.dereferenced_target.sha)
+
+ release_v2_0 = create(:release, project: project2, tag: 'v2.0', released_at: 2.days.ago)
+ release_v2_1 = create(:release, project: project2, tag: 'v2.1', released_at: 1.day.ago)
- let_it_be(:v2_1) do
+ create(:ci_catalog_resource_version, catalog_resource: resource1, release: release_v1_0, created_at: 1.day.ago)
+ create(:ci_catalog_resource_version, catalog_resource: resource1, release: release_v1_1, created_at: 2.days.ago)
+ create(:ci_catalog_resource_version, catalog_resource: resource2, release: release_v2_0, created_at: 3.days.ago)
create(:ci_catalog_resource_version, catalog_resource: resource2, release: release_v2_1, created_at: 4.days.ago)
end
end
diff --git a/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
index c740917cec4..678199a35ea 100644
--- a/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/instance_integrations_shared_context.rb
@@ -7,7 +7,7 @@ RSpec.shared_context 'instance integration activation' do
before do
sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
+ enable_admin_mode!(user)
end
def visit_instance_integrations
diff --git a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
index 6ab41d87f44..391336526e3 100644
--- a/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
+++ b/spec/support/shared_contexts/graphql/types/query_type_shared_context.rb
@@ -26,6 +26,8 @@ RSpec.shared_context 'with FOSS query type fields' do
:milestone,
:namespace,
:note,
+ :organization,
+ :organizations,
:package,
:project,
:projects,
diff --git a/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
index 3d978a6fde4..fec11349b62 100644
--- a/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/database/partitioning/list_partitioning_shared_context.rb
@@ -13,6 +13,8 @@ RSpec.shared_context 'with a table structure for converting a table to a list pa
let(:table_identifier) { "#{connection.current_schema}.#{table_name}" }
let(:partitioning_column) { :partition_number }
let(:partitioning_default) { 1 }
+ let(:single_partitioning_value) { 1 }
+ let(:multiple_partitioning_values) { [1, 2, 3, 4] }
let(:referenced_table_name) { '_test_referenced_table' }
let(:other_referenced_table_name) { '_test_other_referenced_table' }
let(:referencing_table_name) { '_test_referencing_table' }
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 69c20a00c5a..060976eba2d 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -93,8 +93,6 @@ RSpec.shared_context 'structured_logger' do
end
before do
- allow(Sidekiq).to receive(:logger).and_return(logger)
-
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
@@ -103,7 +101,7 @@ RSpec.shared_context 'structured_logger' do
.and_return(clock_thread_cputime_start, clock_thread_cputime_end)
end
- subject { described_class.new }
+ subject { described_class.new(logger) }
def call_subject(job, queue)
# This structured logger strongly depends on execution of `InstrumentationLogger`
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index 85ee3ed4183..d541dee438e 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -55,6 +55,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
allow(concurrency_metric).to receive(:set)
+ allow(completion_seconds_metric).to receive(:get)
end
end
diff --git a/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb
index 5fd0e685c67..a6f028a5a04 100644
--- a/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb
+++ b/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb
@@ -14,15 +14,14 @@ RSpec.shared_examples 'value stream analytics namespace models' do
end
end
- context 'when Namespace is given' do
- it 'fails' do
- namespace = create(:namespace)
+ context 'when personal namespace is given' do
+ it 'is valid' do
+ namespace = create(:namespace, owner: create(:user))
model = build(factory_name, namespace: namespace)
- expect(model).to be_invalid
-
- error_message = s_('CycleAnalytics|the assigned object is not supported')
- expect(model.errors.messages_for(:namespace)).to eq([error_message])
+ expect(model).to be_valid
+ expect(model.save).to be(true)
+ expect(model.namespace).to eq(namespace)
end
end
end
diff --git a/spec/support/shared_examples/ci/runner_with_status_scope_shared_examples.rb b/spec/support/shared_examples/ci/runner_with_status_scope_shared_examples.rb
new file mode 100644
index 00000000000..510721d66b2
--- /dev/null
+++ b/spec/support/shared_examples/ci/runner_with_status_scope_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'runner with status scope' do
+ describe '.with_status' do
+ subject(:scope) { described_class.with_status(status) }
+
+ described_class::AVAILABLE_STATUSES.each do |status|
+ context "with #{status} status" do
+ let(:status) { status }
+
+ it "calls corresponding :#{status} scope" do
+ expect(described_class).to receive(status.to_sym).and_call_original
+
+ scope
+ end
+ end
+ end
+
+ context 'with invalid status' do
+ let(:status) { :invalid_status }
+
+ it 'returns all records' do
+ expect(described_class).to receive(:all).at_least(:once).and_call_original
+
+ expect { scope }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
index 2eab533ef7f..97748fe9e89 100644
--- a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
@@ -53,7 +53,7 @@ RSpec.shared_examples 'Base action controller' do
skip: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
before do
stub_rails_env('development')
- stub_feature_flags(vite: true)
+ allow(ViteHelper).to receive(:vite_enabled?).and_return(true)
end
it 'adds vite csp' do
@@ -65,7 +65,7 @@ RSpec.shared_examples 'Base action controller' do
context 'when vite disabled' do
before do
- stub_feature_flags(vite: false)
+ allow(ViteHelper).to receive(:vite_enabled?).and_return(false)
end
it "doesn't add vite csp" do
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index c921da10347..94208e29d77 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -125,9 +125,9 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
group_a.add_owner(user)
create(:project, :import_started, import_type: provider, namespace: user.namespace)
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get :status, format: :json
- end.count
+ end
stub_client(repos: [repo, org_repo], orgs: [])
group_b = create(:group)
@@ -135,7 +135,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
create(:project, :import_started, import_type: provider, namespace: user.namespace)
expect { get :status, format: :json }
- .not_to exceed_all_query_limit(control_count)
+ .not_to exceed_all_query_limit(control)
end
context 'when user is not allowed to import projects' do
diff --git a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
index 446bc4cd92f..461dcf2fcb6 100644
--- a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
@@ -65,7 +65,7 @@ RSpec.shared_examples 'issuables list meta-data' do |issuable_type, action = nil
issuable.update!(source_project: fork_project(project))
end
- expect { get_action(action, project) }.not_to exceed_query_limit(control.count)
+ expect { get_action(action, project) }.not_to exceed_query_limit(control)
end
describe "when given empty collection" do
diff --git a/spec/support/shared_examples/controllers/snippet_shared_examples.rb b/spec/support/shared_examples/controllers/snippet_shared_examples.rb
index f49cc979368..bf8183525a9 100644
--- a/spec/support/shared_examples/controllers/snippet_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/snippet_shared_examples.rb
@@ -17,12 +17,12 @@ RSpec.shared_examples 'snippets views' do
project = create(:project, namespace: user.namespace)
create(:project_snippet, project: project, author: user)
- control_count = ActiveRecord::QueryRecorder.new { get(:index, params: params) }.count
+ control = ActiveRecord::QueryRecorder.new { get(:index, params: params) }
project = create(:project, namespace: user.namespace)
create(:project_snippet, project: project, author: user)
- expect { get(:index, params: params) }.not_to exceed_query_limit(control_count)
+ expect { get(:index, params: params) }.not_to exceed_query_limit(control)
end
end
end
diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb
index d979683cce7..c807ffaba6f 100644
--- a/spec/support/shared_examples/controllers/variables_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb
@@ -82,7 +82,7 @@ RSpec.shared_examples 'PATCH #update updates variables' do
context 'with valid new variable parameters' do
let(:variables_attributes) do
[
- variable_attributes.merge(secret_value: 'other_value'),
+ variable_attributes.merge(secret_value: 'other_value', description: 'other_description'),
new_variable_attributes
]
end
@@ -94,6 +94,7 @@ RSpec.shared_examples 'PATCH #update updates variables' do
variable.reload
expect(variable.value).to eq('other_value')
+ expect(variable.description).to eq('other_description')
expect(variable.raw?).not_to be(old_raw)
end
diff --git a/spec/support/shared_examples/features/inviting_groups_shared_examples.rb b/spec/support/shared_examples/features/inviting_groups_shared_examples.rb
index 4921676a065..d21e69b72e1 100644
--- a/spec/support/shared_examples/features/inviting_groups_shared_examples.rb
+++ b/spec/support/shared_examples/features/inviting_groups_shared_examples.rb
@@ -9,7 +9,7 @@ RSpec.shared_examples 'inviting groups search results' do
before do
sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ enable_admin_mode!(admin)
end
it 'shows groups where the admin has no direct membership' do
diff --git a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
index 04e73cfeee7..1fb4c42ee16 100644
--- a/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_drawer_shared_examples.rb
@@ -99,7 +99,7 @@ RSpec.shared_examples 'variable list drawer' do
toggle_protected
toggle_masked
toggle_expanded
- click_button('Edit variable')
+ find_by_testid('ci-variable-confirm-button').click
wait_for_requests
diff --git a/spec/support/shared_examples/features/work_items_shared_examples.rb b/spec/support/shared_examples/features/work_items_shared_examples.rb
index 3dfd7604914..0f35681ca7d 100644
--- a/spec/support/shared_examples/features/work_items_shared_examples.rb
+++ b/spec/support/shared_examples/features/work_items_shared_examples.rb
@@ -1,16 +1,45 @@
# frozen_string_literal: true
+RSpec.shared_context 'with work_items_mvc_2' do |flag|
+ before do
+ stub_feature_flags(work_items_mvc_2: flag)
+
+ page.refresh
+ wait_for_all_requests
+ end
+end
+
RSpec.shared_examples 'work items title' do
let(:title_selector) { '[data-testid="work-item-title"]' }
+ let(:title_with_edit_selector) { '[data-testid="work-item-title-with-edit"]' }
- it 'successfully shows and changes the title of the work item' do
- expect(work_item.reload.title).to eq work_item.title
+ context 'when the work_items_mvc_2 FF is disabled' do
+ include_context 'with work_items_mvc_2', false
- find(title_selector).set("Work item title")
- find(title_selector).native.send_keys(:return)
- wait_for_requests
+ it 'successfully shows and changes the title of the work item' do
+ expect(work_item.reload.title).to eq work_item.title
+
+ find(title_selector).set("Work item title")
+ find(title_selector).native.send_keys(:return)
+ wait_for_requests
+
+ expect(work_item.reload.title).to eq 'Work item title'
+ end
+ end
+
+ context 'when the work_items_mvc_2 FF is enabled' do
+ include_context 'with work_items_mvc_2', true
+
+ it 'successfully shows and changes the title of the work item' do
+ expect(work_item.reload.title).to eq work_item.title
+
+ click_button 'Edit', match: :first
+ find(title_with_edit_selector).set("Work item title")
+ send_keys([:command, :enter])
+ wait_for_requests
- expect(work_item.reload.title).to eq 'Work item title'
+ expect(work_item.reload.title).to eq 'Work item title'
+ end
end
end
@@ -299,54 +328,62 @@ RSpec.shared_examples 'work items labels' do
end
RSpec.shared_examples 'work items description' do
- it 'shows GFM autocomplete', :aggregate_failures do
- click_button "Edit description"
- fill_in _('Description'), with: "@#{user.username}"
+ context 'for work_items_mvc_2 FF' do
+ [true, false].each do |work_items_mvc_2_flag| # rubocop:disable RSpec/UselessDynamicDefinition -- check it for both off and on
+ let(:edit_button) { work_items_mvc_2_flag ? 'Edit' : 'Edit description' }
- page.within('.atwho-container') do
- expect(page).to have_text(user.name)
- end
- end
+ include_context 'with work_items_mvc_2', work_items_mvc_2_flag
- it 'autocompletes available quick actions', :aggregate_failures do
- click_button "Edit description"
- fill_in _('Description'), with: '/'
+ it 'shows GFM autocomplete', :aggregate_failures do
+ click_button edit_button, match: :first
+ fill_in _('Description'), with: "@#{user.username}"
- page.within('#at-view-commands') do
- expect(page).to have_text("title")
- expect(page).to have_text("shrug")
- expect(page).to have_text("tableflip")
- expect(page).to have_text("close")
- expect(page).to have_text("cc")
- end
- end
+ page.within('.atwho-container') do
+ expect(page).to have_text(user.name)
+ end
+ end
- context 'on conflict' do
- let_it_be(:other_user) { create(:user) }
- let(:expected_warning) { 'Someone edited the description at the same time you did.' }
+ it 'autocompletes available quick actions', :aggregate_failures do
+ click_button edit_button, match: :first
+ fill_in _('Description'), with: '/'
- before do
- project.add_developer(other_user)
- end
+ page.within('#at-view-commands') do
+ expect(page).to have_text("title")
+ expect(page).to have_text("shrug")
+ expect(page).to have_text("tableflip")
+ expect(page).to have_text("close")
+ expect(page).to have_text("cc")
+ end
+ end
- it 'shows conflict message when description changes', :aggregate_failures do
- click_button "Edit description"
+ context 'on conflict' do
+ let_it_be(:other_user) { create(:user) }
+ let(:expected_warning) { 'Someone edited the description at the same time you did.' }
- ::WorkItems::UpdateService.new(
- container: work_item.project,
- current_user: other_user,
- params: { description: "oh no!" }
- ).execute(work_item)
+ before do
+ project.add_developer(other_user)
+ end
- wait_for_requests
+ it 'shows conflict message when description changes', :aggregate_failures do
+ click_button edit_button, match: :first
+
+ ::WorkItems::UpdateService.new(
+ container: work_item.project,
+ current_user: other_user,
+ params: { description: "oh no!" }
+ ).execute(work_item)
+
+ wait_for_requests
- fill_in _('Description'), with: 'oh yeah!'
+ fill_in _('Description'), with: 'oh yeah!'
- expect(page).to have_text(expected_warning)
+ expect(page).to have_text(expected_warning)
- click_button s_('WorkItem|Save and overwrite')
+ click_button s_('WorkItem|Save and overwrite')
- expect(page.find('[data-testid="work-item-description"]')).to have_text("oh yeah!")
+ expect(page.find('[data-testid="work-item-description"]')).to have_text("oh yeah!")
+ end
+ end
end
end
end
@@ -368,17 +405,61 @@ RSpec.shared_examples 'work items invite members' do
end
RSpec.shared_examples 'work items milestone' do
- it 'searches and sets or removes milestone for the work item' do
- click_button s_('WorkItem|Add to milestone')
- send_keys "\"#{milestone.title}\""
- select_listbox_item(milestone.title, exact_text: true)
+ context 'on work_items_mvc_2 FF off' do
+ include_context 'with work_items_mvc_2', false
+
+ it 'searches and sets or removes milestone for the work item' do
+ click_button s_('WorkItem|Add to milestone')
+ send_keys "\"#{milestone.title}\""
+ select_listbox_item(milestone.title, exact_text: true)
+
+ expect(page).to have_button(milestone.title)
+
+ click_button milestone.title
+ select_listbox_item(s_('WorkItem|No milestone'), exact_text: true)
+
+ expect(page).to have_button(s_('WorkItem|Add to milestone'))
+ end
+ end
+
+ context 'on work_items_mvc_2 FF on' do
+ let(:work_item_milestone_selector) { '[data-testid="work-item-milestone-with-edit"]' }
+
+ include_context 'with work_items_mvc_2', true
- expect(page).to have_button(milestone.title)
+ it 'passes axe automated accessibility testing in closed state' do
+ expect(page).to be_axe_clean.within(work_item_milestone_selector)
+ end
+
+ context 'when edit is clicked' do
+ it 'selects and updates the right milestone', :aggregate_failures do
+ find_and_click_edit(work_item_milestone_selector)
+
+ select_listbox_item(milestones[10].title)
+
+ wait_for_requests
+ within(work_item_milestone_selector) do
+ expect(page).to have_text(milestones[10].title)
+ end
+
+ find_and_click_edit(work_item_milestone_selector)
- click_button milestone.title
- select_listbox_item(s_('WorkItem|No milestone'), exact_text: true)
+ find_and_click_clear(work_item_milestone_selector)
- expect(page).to have_button(s_('WorkItem|Add to milestone'))
+ expect(find(work_item_milestone_selector)).to have_content('None')
+ end
+
+ it 'searches and sets or removes milestone for the work item' do
+ find_and_click_edit(work_item_milestone_selector)
+ within(work_item_milestone_selector) do
+ send_keys "\"#{milestones[11].title}\""
+ wait_for_requests
+
+ select_listbox_item(milestones[11].title)
+ expect(page).to have_text(milestones[11].title)
+ end
+ end
+ end
end
end
@@ -522,3 +603,101 @@ RSpec.shared_examples 'work items parent' do |type|
expect(find_by_testid('work-item-parent-none')).to have_text('None')
end
end
+
+def find_and_click_edit(selector)
+ within(selector) do
+ click_button 'Edit'
+ end
+end
+
+def find_and_click_clear(selector)
+ within(selector) do
+ click_button 'Clear'
+ end
+end
+
+RSpec.shared_examples 'work items iteration' do
+ let(:work_item_iteration_selector) { '[data-testid="work-item-iteration-with-edit"]' }
+ let_it_be(:iteration_cadence) { create(:iterations_cadence, group: group, active: true) }
+ let_it_be(:iteration) do
+ create(
+ :iteration,
+ iterations_cadence: iteration_cadence,
+ group: group,
+ start_date: 1.day.from_now,
+ due_date: 2.days.from_now
+ )
+ end
+
+ let_it_be(:iteration2) do
+ create(
+ :iteration,
+ iterations_cadence: iteration_cadence,
+ group: group,
+ start_date: 2.days.ago,
+ due_date: 1.day.ago,
+ state: 'closed',
+ skip_future_date_validation: true
+ )
+ end
+
+ include_context 'with work_items_mvc_2', true
+
+ context 'for accessibility' do
+ it 'has the work item iteration with edit' do
+ expect(page).to have_selector(work_item_iteration_selector)
+ end
+
+ it 'passes axe automated accessibility testing in closed state' do
+ expect(page).to be_axe_clean.within(work_item_iteration_selector)
+ end
+
+ # TODO, add test for automated accessibility after it is fixed in GlCollapsibleListBox
+ # Invalid ARIA attribute value: aria-owns="listbox-##" when searchable
+ # it 'passes axe automated accessibility testing in open state' do
+ # within(work_item_iteration) do
+ # click_button _('Edit')
+ # wait_for_requests
+
+ # expect(page).to be_axe_clean.within(work_item_iteration)
+ # end
+ # end
+ end
+
+ context 'when edit is clicked' do
+ it 'selects and updates the right iteration', :aggregate_failures do
+ find_and_click_edit(work_item_iteration_selector)
+
+ within(work_item_iteration_selector) do
+ expect(page).to have_text(iteration_cadence.title)
+ expect(page).to have_text(iteration.period)
+ end
+
+ select_listbox_item(iteration.period)
+
+ wait_for_requests
+
+ within(work_item_iteration_selector) do
+ expect(page).to have_text(iteration_cadence.title)
+ expect(page).to have_text(iteration.period)
+ end
+
+ find_and_click_edit(work_item_iteration_selector)
+
+ find_and_click_clear(work_item_iteration_selector)
+
+ expect(find(work_item_iteration_selector)).to have_content('None')
+ end
+
+ it 'searches and sets or removes iteration for the work item' do
+ find_and_click_edit(work_item_iteration_selector)
+ within(work_item_iteration_selector) do
+ send_keys(iteration.title)
+ wait_for_requests
+
+ select_listbox_item(iteration.period)
+ expect(page).to have_text(iteration.period)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
index 67afd2035c4..afb1bfb6dc9 100644
--- a/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/import_export/import_failure_service_shared_examples.rb
@@ -34,6 +34,7 @@ RSpec.shared_examples 'log import failure' do |importable_column|
expect(import_failure.exception_message).to eq(standard_error_message)
expect(import_failure.correlation_id_value).to eq(correlation_id)
expect(import_failure.retry_count).to eq(retry_count)
+ expect(import_failure.external_identifiers).to eq("iid" => 1234)
end
end
end
diff --git a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
index 37c338a7712..5681dbc158d 100644
--- a/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
+++ b/spec/support/shared_examples/migrations/add_work_item_widget_shared_examples.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-RSpec.shared_examples 'migration that adds widget to work items definitions' do |widget_name:|
+RSpec.shared_examples 'migration that adds widget to work items definitions' do |widget_name:, work_item_types:|
let(:migration) { described_class.new }
let(:work_item_definitions) { table(:work_item_widget_definitions) }
- let(:work_item_type_count) { 7 }
+ let(:work_item_type_count) { work_item_types.size }
describe '#up' do
it "creates widget definition in all types" do
@@ -14,11 +14,13 @@ RSpec.shared_examples 'migration that adds widget to work items definitions' do
end
it 'logs a warning if the type is missing' do
+ type_name = work_item_types.first
+
allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id).and_call_original
allow(described_class::WorkItemType).to receive(:find_by_name_and_namespace_id)
- .with('Issue', nil).and_return(nil)
+ .with(type_name, nil).and_return(nil)
- expect(Gitlab::AppLogger).to receive(:warn).with('type Issue is missing, not adding widget')
+ expect(Gitlab::AppLogger).to receive(:warn).with("type #{type_name} is missing, not adding widget")
migrate!
end
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/base_slack_notification_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/base_slack_notification_shared_examples.rb
index 2dad35dc46e..d05c29c39c3 100644
--- a/spec/support/shared_examples/models/concerns/integrations/base_slack_notification_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/base_slack_notification_shared_examples.rb
@@ -111,7 +111,7 @@ RSpec.shared_examples Integrations::BaseSlackNotification do |factory:|
end
context 'for tag_push notification' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { '8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b' } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { 'refs/tags/v1.1.0' }
let(:data) do
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index 2985763426f..9e5b5ecfb48 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -143,7 +143,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
context 'tag_push events' do
- let(:oldrev) { Gitlab::Git::BLANK_SHA }
+ let(:oldrev) { Gitlab::Git::SHA1_BLANK_SHA }
let(:newrev) { '8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b' } # gitlab-test: git rev-parse refs/tags/v1.1.0
let(:ref) { 'refs/tags/v1.1.0' }
let(:data) { Git::TagHooksService.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }).send(:push_data) }
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index 993c94e2695..ff1d1f66ac4 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -76,7 +76,7 @@ RSpec.shared_examples 'handles repository moves' do
context 'and transitions to scheduled' do
it 'triggers the corresponding repository storage worker' do
- expect(repository_storage_worker).to receive(:perform_async).with(container.id, 'test_second_storage', storage_move.id)
+ expect(repository_storage_worker).to receive(:perform_async).with(storage_move.id)
storage_move.schedule!
diff --git a/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb b/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb
deleted file mode 100644
index 56b36b3ea07..00000000000
--- a/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'database events tracking' do
- describe 'events tracking' do
- # required definitions:
- # :record, :update_params
- #
- # other available attributes:
- # :project, :namespace
-
- let(:user) { nil }
- let(:category) { described_class.to_s }
- let(:label) { described_class.table_name }
- let(:action) { "database_event_#{property}" }
- let(:record_tracked_attributes) { record.attributes.slice(*described_class::SNOWPLOW_ATTRIBUTES.map(&:to_s)) }
- let(:base_extra) { record_tracked_attributes.merge(project: try(:project), namespace: try(:namespace)) }
-
- before do
- allow(Gitlab::Tracking).to receive(:database_event).and_call_original
- end
-
- describe '#create' do
- it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do
- subject(:create_record) { record }
-
- let(:extra) { base_extra }
- let(:property) { 'create' }
- end
- end
-
- describe '#update', :freeze_time do
- it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do
- subject(:update_record) { record.update!(update_params) }
-
- let(:extra) { base_extra.merge(update_params.stringify_keys) }
- let(:property) { 'update' }
- end
- end
-
- describe '#destroy' do
- it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do
- subject(:delete_record) { record.destroy! }
-
- let(:extra) { base_extra }
- let(:property) { 'destroy' }
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index 731500c4510..01d6642e814 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -54,6 +54,25 @@ RSpec.shared_examples 'inherited access level as a member of entity' do
expect { non_member.update!(access_level: Gitlab::Access::GUEST) }
.to change { non_member.reload.access_level }
end
+
+ context 'when access request to entity is pending' do
+ before do
+ parent_entity.members.where(user: user).update!(requested_at: Time.current)
+ end
+
+ it 'is allowed to be a reporter of the entity' do
+ entity.add_reporter(user)
+
+ expect(member.access_level).to eq(Gitlab::Access::REPORTER)
+ end
+
+ it 'is allowed to change to be a guest of the entity' do
+ entity.add_maintainer(user)
+
+ expect { member.update!(access_level: Gitlab::Access::GUEST) }
+ .to change { member.reload.access_level }.from(Gitlab::Access::MAINTAINER).to(Gitlab::Access::GUEST)
+ end
+ end
end
end
@@ -63,10 +82,9 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
let(:entity) { create(entity_name) } # rubocop:disable Rails/SaveBang
let(:entity_member) { create("#{entity_name}_member", :developer, source: entity, user: member_user) }
let(:presenter) { described_class.new(entity_member, current_user: member_user) }
+ let(:all_permissible_roles) { entity_member.class.permissible_access_level_roles(member_user, entity) }
context 'when no parent member is present' do
- let(:all_permissible_roles) { entity_member.class.permissible_access_level_roles(member_user, entity) }
-
it 'returns all permissible roles' do
expect(presenter.valid_level_roles).to eq(all_permissible_roles)
end
@@ -80,6 +98,16 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
it 'returns higher roles when a parent member is present' do
expect(presenter.valid_level_roles).to eq(expected_roles)
end
+
+ context 'when access request to parent is pending' do
+ before do
+ group.members.with_user(member_user).update!(requested_at: Time.current)
+ end
+
+ it 'returns all permissible roles' do
+ expect(presenter.valid_level_roles).to eq(all_permissible_roles)
+ end
+ end
end
end
@@ -108,7 +136,7 @@ RSpec.shared_examples_for "member creation" do
it 'does not update the member' do
member = described_class.add_member(source, project_bot, :maintainer, current_user: user)
- expect(source.users.reload).to include(project_bot)
+ expect(source.reload).to have_user(project_bot)
expect(member).to be_persisted
expect(member.access_level).to eq(Gitlab::Access::DEVELOPER)
expect(member.errors.full_messages).to include(/not authorized to update member/)
@@ -119,7 +147,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the member' do
member = described_class.add_member(source, project_bot, :maintainer, current_user: user)
- expect(source.users.reload).to include(project_bot)
+ expect(source.reload).to have_user(project_bot)
expect(member).to be_persisted
end
end
@@ -130,7 +158,7 @@ RSpec.shared_examples_for "member creation" do
member = described_class.add_member(source, user, :maintainer, current_user: admin)
expect(member).to be_persisted
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
expect(member.created_by).to eq(admin)
end
end
@@ -140,7 +168,7 @@ RSpec.shared_examples_for "member creation" do
member = described_class.add_member(source, user, :maintainer, current_user: admin)
expect(member).not_to be_persisted
- expect(source.users.reload).not_to include(user)
+ expect(source).not_to have_user(user)
expect(member.errors.full_messages).to include(/not authorized to create member/)
end
end
@@ -153,52 +181,52 @@ RSpec.shared_examples_for "member creation" do
described_class.access_levels.each do |sym_key, int_access_level|
it "accepts the :#{sym_key} symbol as access level", :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
member = described_class.add_member(source, user.id, sym_key)
expect(member.access_level).to eq(int_access_level)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
it "accepts the #{int_access_level} integer as access level", :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
member = described_class.add_member(source, user.id, int_access_level)
expect(member.access_level).to eq(int_access_level)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
end
context 'with no current_user' do
context 'when called with a known user id' do
it 'adds the user as a member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, user.id, :maintainer)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
end
context 'when called with an unknown user id' do
it 'does not add the user as a member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, non_existing_record_id, :maintainer)
- expect(source.users.reload).not_to include(user)
+ expect(source.reload).not_to have_user(user)
end
end
context 'when called with a user object' do
it 'adds the user as a member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, user, :maintainer)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
end
@@ -208,29 +236,29 @@ RSpec.shared_examples_for "member creation" do
end
it 'adds the requester as a member', :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source.reload).not_to have_user(user)
expect(source.requesters.exists?(user_id: user)).to eq(true)
described_class.add_member(source, user, :maintainer)
- expect(source.users.reload).to include(user)
- expect(source.requesters.reload.exists?(user_id: user)).to eq(false)
+ expect(source.reload).to have_user(user)
+ expect(source.requesters.exists?(user_id: user)).to eq(false)
end
end
context 'when called with a known user email' do
it 'adds the user as a member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, user.email, :maintainer)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
end
context 'when called with an unknown user email' do
it 'creates an invited member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, 'user@example.com', :maintainer)
@@ -245,18 +273,18 @@ RSpec.shared_examples_for "member creation" do
described_class.add_member(source, email_starting_with_number, :maintainer)
expect(source.members.invite.pluck(:invite_email)).to include(email_starting_with_number)
- expect(source.users.reload).not_to include(user)
+ expect(source.reload).not_to have_user(user)
end
end
end
context 'when current_user can update member', :enable_admin_mode do
it 'creates the member' do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
described_class.add_member(source, user, :maintainer, current_user: admin)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
end
context 'when called with a requester user object' do
@@ -265,12 +293,12 @@ RSpec.shared_examples_for "member creation" do
end
it 'adds the requester as a member', :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
described_class.add_member(source, user, :maintainer, current_user: admin)
- expect(source.users.reload).to include(user)
+ expect(source.reload).to have_user(user)
expect(source.requesters.reload.exists?(user_id: user)).to be_falsy
end
end
@@ -278,11 +306,11 @@ RSpec.shared_examples_for "member creation" do
context 'when current_user cannot update member' do
it 'does not create the member', :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
member = described_class.add_member(source, user, :maintainer, current_user: user)
- expect(source.users.reload).not_to include(user)
+ expect(source.reload).not_to have_user(user)
expect(member).not_to be_persisted
end
@@ -292,12 +320,12 @@ RSpec.shared_examples_for "member creation" do
end
it 'does not destroy the requester', :aggregate_failures do
- expect(source.users).not_to include(user)
+ expect(source).not_to have_user(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
described_class.add_member(source, user, :maintainer, current_user: user)
- expect(source.users.reload).not_to include(user)
+ expect(source.reload).not_to have_user(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
end
end
@@ -311,7 +339,7 @@ RSpec.shared_examples_for "member creation" do
context 'with no current_user' do
it 'updates the member' do
- expect(source.users).to include(user)
+ expect(source).to have_user(user)
described_class.add_member(source, user, :maintainer)
@@ -321,7 +349,7 @@ RSpec.shared_examples_for "member creation" do
context 'when current_user can update member', :enable_admin_mode do
it 'updates the member' do
- expect(source.users).to include(user)
+ expect(source).to have_user(user)
described_class.add_member(source, user, :maintainer, current_user: admin)
@@ -331,7 +359,7 @@ RSpec.shared_examples_for "member creation" do
context 'when current_user cannot update member' do
it 'does not update the member' do
- expect(source.users).to include(user)
+ expect(source).to have_user(user)
described_class.add_member(source, user, :maintainer, current_user: user)
diff --git a/spec/support/shared_examples/models/members_notifications_shared_example.rb b/spec/support/shared_examples/models/members_notifications_shared_example.rb
index 5c783b5cfa7..01212ed950d 100644
--- a/spec/support/shared_examples/models/members_notifications_shared_example.rb
+++ b/spec/support/shared_examples/models/members_notifications_shared_example.rb
@@ -9,32 +9,6 @@ RSpec.shared_examples 'members notifications' do |entity_type|
allow(member).to receive(:notification_service).and_return(notification_service)
end
- describe "#after_create" do
- let(:member) { build(:"#{entity_type}_member", "#{entity_type}": create(entity_type.to_s), user: user) }
-
- it "sends email to user" do
- expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
-
- member.save!
- end
- end
-
- describe "#after_update" do
- let(:member) { create(:"#{entity_type}_member", :developer) }
-
- it "calls NotificationService.update_#{entity_type}_member" do
- expect(notification_service).to receive(:"update_#{entity_type}_member").with(member)
-
- member.update_attribute(:access_level, Member::MAINTAINER)
- end
-
- it "does not send an email when the access level has not changed" do
- expect(notification_service).not_to receive(:"update_#{entity_type}_member")
-
- member.touch
- end
- end
-
describe '#after_commit' do
context 'on creation of a member requesting access' do
let(:member) do
@@ -52,27 +26,17 @@ RSpec.shared_examples 'members notifications' do |entity_type|
describe '#accept_request' do
let(:member) { create(:"#{entity_type}_member", :access_request) }
- it "calls NotificationService.new_#{entity_type}_member" do
- expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
+ it "calls NotificationService.new_member" do
+ expect(notification_service).to receive(:new_member).with(member)
member.accept_request(create(:user))
end
end
- describe "#accept_invite!" do
- let(:member) { create(:"#{entity_type}_member", :invited) }
-
- it "calls NotificationService.accept_#{entity_type}_invite" do
- expect(notification_service).to receive(:"accept_#{entity_type}_invite").with(member)
-
- member.accept_invite!(build(:user))
- end
- end
-
describe "#decline_invite!" do
let(:member) { create(:"#{entity_type}_member", :invited) }
- it "calls NotificationService.decline_#{entity_type}_invite" do
+ it "calls NotificationService.decline_invite" do
expect(notification_service).to receive(:decline_invite).with(member)
member.decline_invite!
diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index 2b46c8c8fb9..692320d45d5 100644
--- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
@@ -175,15 +175,15 @@ RSpec.shared_examples 'a class that supports relative positioning' do
create_items_with_positions(10..12)
a, b, c, d, e, f, *xs = create_items_with_positions([nil] * 10)
- baseline = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new do
described_class.move_nulls_to_end([a, b])
end
expect { described_class.move_nulls_to_end([c, d, e, f]) }
- .not_to exceed_query_limit(baseline)
+ .not_to exceed_query_limit(control)
expect { described_class.move_nulls_to_end(xs) }
- .not_to exceed_query_limit(baseline.count)
+ .not_to exceed_query_limit(control)
end
end
diff --git a/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb
index 92705fc1b4d..2c38c716736 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/rebase_quick_action_shared_examples.rb
@@ -59,7 +59,7 @@ RSpec.shared_examples 'rebase quick action' do
it 'tells the user a rebase is in progress' do
add_note('/rebase')
- expect(page).to have_content 'A rebase is already in progress.'
+ expect(page).to have_content Gitlab::QuickActions::MergeRequestActions::REBASE_FAILURE_REBASE_IN_PROGRESS
expect(page).not_to have_content 'Scheduled a rebase'
end
end
@@ -70,7 +70,7 @@ RSpec.shared_examples 'rebase quick action' do
it 'does not rebase the MR' do
add_note("/rebase")
- expect(page).to have_content 'This merge request cannot be rebased while there are conflicts.'
+ expect(page).to have_content Gitlab::QuickActions::MergeRequestActions::REBASE_FAILURE_UNMERGEABLE
end
end
@@ -89,7 +89,7 @@ RSpec.shared_examples 'rebase quick action' do
it 'does not rebase the MR' do
add_note("/rebase")
- expect(page).to have_content 'This merge request branch is protected from force push.'
+ expect(page).to have_content Gitlab::QuickActions::MergeRequestActions::REBASE_FAILURE_PROTECTED_BRANCH
end
end
end
diff --git a/spec/support/shared_examples/redis/multi_store_wrapper_shared_examples.rb b/spec/support/shared_examples/redis/multi_store_wrapper_shared_examples.rb
new file mode 100644
index 00000000000..a53af59f1e6
--- /dev/null
+++ b/spec/support/shared_examples/redis/multi_store_wrapper_shared_examples.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples "multi_store_wrapper_shared_examples" do
+ let(:config_file_name) { instance_specific_config_file }
+ let_it_be(:pool_name) { "#{described_class.store_name.underscore}_multi_store" }
+
+ before do
+ allow(described_class).to receive(:config_file_name).and_return(Rails.root.join(config_file_name).to_s)
+ allow(described_class).to receive(:redis_yml_path).and_return('/dev/null')
+
+ clear_multistore_pool
+ end
+
+ after do
+ clear_multistore_pool
+ end
+
+ describe '.with' do
+ it 'yields a MultiStore' do
+ described_class.with do |conn|
+ expect(conn).to be_instance_of(Gitlab::Redis::MultiStore)
+ end
+ end
+
+ it 'borrows connection' do
+ described_class.with do |conn|
+ expect(Thread.current[conn.borrow_counter]).to eq(1)
+ end
+ end
+
+ context 'when running on single-threaded runtime' do
+ before do
+ allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(false)
+ end
+
+ it 'instantiates a connection pool with size 5' do
+ expect(ConnectionPool).to receive(:new).with(size: 5, name: pool_name).and_call_original
+
+ described_class.with { |_redis_shared_example| true }
+ end
+ end
+
+ context 'when running on multi-threaded runtime' do
+ before do
+ allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true)
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(18)
+ end
+
+ it 'instantiates a connection pool with a size based on the concurrency of the worker' do
+ expect(ConnectionPool).to receive(:new).with(size: 18 + 5, name: pool_name).and_call_original
+
+ described_class.with { |_redis_shared_example| true }
+ end
+ end
+
+ context 'when there is no config at all' do
+ before do
+ # Undo top-level stub of config_file_name because we are testing that method now.
+ allow(described_class).to receive(:config_file_name).and_call_original
+ allow(described_class).to receive(:rails_root).and_return(rails_root)
+ end
+
+ it 'can run an empty block' do
+ expect { described_class.with { nil } }.not_to raise_error
+ end
+ end
+ end
+
+ def clear_multistore_pool
+ described_class.remove_instance_variable(:@multistore_pool)
+ rescue NameError
+ # raised if @pool was not set; ignore
+ end
+end
diff --git a/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb b/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb
index 4a3732efe13..9b774449379 100644
--- a/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_new_instance_shared_examples.rb
@@ -15,6 +15,37 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl
it_behaves_like "redis_shared_examples"
+ describe '.pool' do
+ before do
+ allow(described_class).to receive(:config_file_name).and_call_original
+ allow(fallback_class).to receive(:params).and_return({})
+
+ clear_class_pool(described_class)
+ clear_class_pool(fallback_class)
+ end
+
+ after do
+ clear_class_pool(described_class)
+ clear_class_pool(fallback_class)
+ end
+
+ context 'when not using fallback config' do
+ it 'creates its own connection pool' do
+ expect(fallback_class.pool == described_class.pool).to eq(false)
+ end
+ end
+
+ context 'when using fallback config' do
+ before do
+ allow(described_class).to receive(:params).and_return({})
+ end
+
+ it 'uses the fallback class connection pool' do
+ expect(fallback_class.pool == described_class.pool).to eq(true)
+ end
+ end
+ end
+
describe '#fetch_config' do
subject { described_class.new('test').send(:fetch_config) }
@@ -81,4 +112,10 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl
end
end
end
+
+ def clear_class_pool(klass)
+ klass.remove_instance_variable(:@pool)
+ rescue NameError
+ # raised if @pool was not set; ignore
+ end
end
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 796b483820b..1f7834a4d7c 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -86,6 +86,67 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
+ describe '.redis_client_params' do
+ # .redis_client_params wraps over `.redis_store_options` by modifying its outputs
+ # to be compatible with `RedisClient`. We test for compatibility in this block while
+ # the contents of redis_store_options are tested in the `.params` block.
+
+ subject { described_class.new(rails_env).redis_client_params }
+
+ let(:rails_env) { 'development' }
+ let(:config_file_name) { config_old_format_socket }
+
+ shared_examples 'instrumentation_class in custom key' do
+ it 'moves instrumentation class into custom' do
+ expect(subject[:custom][:instrumentation_class]).to eq(described_class.store_name)
+ expect(subject[:instrumentation_class]).to be_nil
+ end
+ end
+
+ context 'when url is host based' do
+ context 'with old format' do
+ let(:config_file_name) { config_old_format_host }
+
+ it 'does not raise ArgumentError for invalid keywords' do
+ expect { RedisClient.config(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+
+ context 'with new format' do
+ let(:config_file_name) { config_new_format_host }
+
+ where(:rails_env, :host) do
+ [
+ %w[development development-host],
+ %w[test test-host],
+ %w[production production-host]
+ ]
+ end
+
+ with_them do
+ it 'does not raise ArgumentError for invalid keywords in SentinelConfig' do
+ expect(subject[:name]).to eq(host)
+ expect { RedisClient.sentinel(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+ end
+ end
+
+ context 'when url contains unix socket reference' do
+ let(:config_file_name) { config_old_format_socket }
+
+ it 'does not raise ArgumentError for invalid keywords' do
+ expect { RedisClient.config(**subject) }.not_to raise_error
+ end
+
+ it_behaves_like 'instrumentation_class in custom key'
+ end
+ end
+
describe '.params' do
subject { described_class.new(rails_env).params }
@@ -206,7 +267,7 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
- describe '.with' do
+ describe '.with', if: !(described_class <= Gitlab::Redis::MultiStoreWrapper) do
let(:config_file_name) { config_old_format_socket }
before do
@@ -217,6 +278,10 @@ RSpec.shared_examples "redis_shared_examples" do
clear_pool
end
+ it 'yields a ::Redis' do
+ described_class.with { |conn| expect(conn).to be_instance_of(::Redis) }
+ end
+
context 'when running on single-threaded runtime' do
before do
allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(false)
diff --git a/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
index ae2855083f6..b56de050d1e 100644
--- a/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
@@ -150,8 +150,15 @@ RSpec.shared_examples 'grants terraform module package file access' do |user_typ
project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
end
- it_behaves_like 'returning response status', status
it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
+
+ it 'returns a valid response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+ expect(response.media_type).to eq('application/octet-stream')
+ expect(response.body).to eq(package.package_files.last.file.read)
+ end
end
end
@@ -273,3 +280,169 @@ RSpec.shared_examples 'process terraform module upload' do |user_type, status, a
end
end
end
+
+RSpec.shared_examples 'handling project level terraform module download requests' do
+ using RSpec::Parameterized::TableSyntax
+ let(:project_id) { project.id }
+ let(:package_name) { package.name }
+ let(:url) { "/projects/#{project_id}/packages/terraform/modules/#{package_name}/#{module_version}?archive=tgz" }
+
+ subject { get api(url), headers: headers }
+
+ it { is_expected.to have_request_urgency(:low) }
+
+ context 'with valid project' do
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :anonymous | false | nil | 'grants terraform module package file access' | :success
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :internal | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :internal | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :internal | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :internal | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+
+ :public | :developer | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :guest | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :developer | false | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :guest | false | :personal_access_token | 'grants terraform module package file access' | :success
+ :private | :developer | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :internal | :developer | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :internal | :guest | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :internal | :developer | false | :personal_access_token | 'grants terraform module package file access' | :success
+ :internal | :guest | false | :personal_access_token | 'grants terraform module package file access' | :success
+
+ :public | :developer | true | :job_token | 'grants terraform module package file access' | :success
+ :public | :guest | true | :job_token | 'grants terraform module package file access' | :success
+ :public | :developer | false | :job_token | 'grants terraform module package file access' | :success
+ :public | :guest | false | :job_token | 'grants terraform module package file access' | :success
+ :private | :developer | true | :job_token | 'grants terraform module package file access' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :not_found
+ :internal | :developer | true | :job_token | 'grants terraform module package file access' | :success
+ :internal | :guest | true | :job_token | 'grants terraform module package file access' | :success
+ :internal | :developer | false | :job_token | 'grants terraform module package file access' | :success
+ :internal | :guest | false | :job_token | 'grants terraform module package file access' | :success
+
+ :public | :anonymous | false | :deploy_token | 'grants terraform module package file access' | :success
+ :private | :anonymous | false | :deploy_token | 'grants terraform module package file access' | :success
+ :internal | :anonymous | false | :deploy_token | 'grants terraform module package file access' | :success
+ end
+
+ with_them do
+ let(:headers) do
+ case token_type
+ when :personal_access_token, :invalid
+ basic_auth_headers(user.username, token)
+ when :deploy_token
+ basic_auth_headers(deploy_token.username, token)
+ when :job_token
+ basic_auth_headers(::Gitlab::Auth::CI_JOB_USER, token)
+ else
+ {}
+ end
+ end
+
+ let(:snowplow_gitlab_standard_context) do
+ {
+ project: project,
+ namespace: project.namespace,
+ property: 'i_package_terraform_module_user'
+ }.tap do |context|
+ context[:user] = user if token_type && token_type != :deploy_token
+ context[:user] = deploy_token if token_type == :deploy_token
+ end
+ end
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+
+ context 'with/without module version' do
+ let(:headers) { basic_auth_headers }
+ let(:finder_params) do
+ { package_name: package_name }.tap do |p|
+ p[:package_version] = module_version if module_version
+ end
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'calls the finder with the correct params' do
+ expect_next_instance_of(::Packages::TerraformModule::PackagesFinder, project, finder_params) do |finder|
+ expect(finder).to receive(:execute).and_call_original
+ end
+
+ subject
+ end
+ end
+
+ context 'with non-existent module version' do
+ let(:headers) { basic_auth_headers }
+ let(:module_version) { '1.99.322' }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'returning response status', :not_found
+ end
+
+ context 'with invalid project' do
+ let(:project_id) { '123456' }
+
+ let(:headers) { basic_auth_headers }
+
+ it_behaves_like 'rejects terraform module packages access', :anonymous, :not_found
+ end
+
+ context 'with invalid package name' do
+ let(:headers) { basic_auth_headers }
+
+ [nil, '', '%20', 'unknown', '..%2F..', '../..'].each do |pkg_name|
+ context "with package name #{pkg_name}" do
+ let(:package_name) { pkg_name }
+
+ it_behaves_like 'rejects terraform module packages access', :anonymous, :not_found
+ end
+ end
+ end
+
+ context 'when terraform-get param is received' do
+ let(:headers) { basic_auth_headers }
+ let(:url) { "#{super().split('?').first}?terraform-get=1" }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns a valid response' do
+ subject
+
+ expect(response.headers).to include 'X-Terraform-Get'
+ expect(response.headers['X-Terraform-Get']).to include '?archive=tgz'
+ expect(response.headers['X-Terraform-Get']).not_to include 'terraform-get=1'
+ end
+ end
+
+ def basic_auth_headers(username = user.username, password = personal_access_token.token)
+ { Authorization: "Basic #{Base64.strict_encode64("#{username}:#{password}")}" }
+ end
+end
diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
index b7247f1f243..2976018b60f 100644
--- a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
@@ -12,7 +12,7 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
# See also: https://gitlab.com/gitlab-org/gitlab/-/issues/373151
relax_count = 4
- expect { serialize(grouping: true) }.not_to exceed_query_limit(control.count + relax_count)
+ expect { serialize(grouping: true) }.not_to exceed_query_limit(control).with_threshold(relax_count)
end
it 'avoids N+1 database queries without grouping', :request_store do
@@ -27,7 +27,7 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
# See also: https://gitlab.com/gitlab-org/gitlab/-/issues/373151
relax_count = 5
- expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count + relax_count)
+ expect { serialize(grouping: false) }.not_to exceed_query_limit(control).with_threshold(relax_count)
end
it 'does not preload for environments that does not exist in the page', :request_store do
diff --git a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
index 9d016e4830e..38c0670880d 100644
--- a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
@@ -43,11 +43,11 @@ RSpec.shared_examples 'issues move service' do |group|
described_class.new(parent, user, params).execute(issue)
end
- it 'removes all list-labels from boards and close the issue' do
+ it 'does not change labels and close the issue' do
described_class.new(parent, user, params).execute(issue)
issue.reload
- expect(issue.labels).to contain_exactly(bug, regression)
+ expect(issue.labels).to contain_exactly(bug, development, testing, regression)
expect(issue).to be_closed
end
end
@@ -59,13 +59,11 @@ RSpec.shared_examples 'issues move service' do |group|
let(:issue) { create(:labeled_issue, project: project, labels: [bug, development, testing, regression], milestone: milestone) }
let(:params) { { board_id: board1.id, from_list_id: list2.id, to_list_id: backlog.id } }
- it_behaves_like 'updating timestamps'
-
it 'keeps labels and milestone' do
described_class.new(parent, user, params).execute(issue)
issue.reload
- expect(issue.labels).to contain_exactly(bug, regression)
+ expect(issue.labels).to contain_exactly(bug, development, testing, regression)
expect(issue.milestone).to eq(milestone)
end
end
diff --git a/spec/support/shared_examples/services/common_system_notes_shared_examples.rb b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
index 1887b38b50e..14b0aa1ab08 100644
--- a/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
+++ b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
-RSpec.shared_examples 'system note creation' do |update_params, note_text|
- subject { described_class.new(project: project, current_user: user).execute(issuable, old_labels: []) }
+RSpec.shared_examples 'system note creation' do |update_params, note_text, is_update = true|
+ subject do
+ described_class.new(project: project, current_user: user).execute(issuable, old_labels: [], is_update: is_update)
+ end
before do
issuable.assign_attributes(update_params)
diff --git a/spec/support/shared_examples/services/count_service_shared_examples.rb b/spec/support/shared_examples/services/count_service_shared_examples.rb
index 54c6ff79976..42fe170d2c4 100644
--- a/spec/support/shared_examples/services/count_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/count_service_shared_examples.rb
@@ -10,10 +10,10 @@ RSpec.shared_examples 'a counter caching service' do
describe '#count' do
it 'caches the count', :request_store do
subject.delete_cache
- control_count = ActiveRecord::QueryRecorder.new { subject.count }.count
+ control = ActiveRecord::QueryRecorder.new { subject.count }
subject.delete_cache
- expect { 2.times { subject.count } }.not_to exceed_query_limit(control_count)
+ expect { 2.times { subject.count } }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb
index d2b52468c25..459c957091c 100644
--- a/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb
@@ -8,13 +8,13 @@ RSpec.shared_examples_for 'service deleting label links of an issuable' do
end
it 'deletes label links for specified target ID and type' do
- control_count = ActiveRecord::QueryRecorder.new { execute }.count
+ control = ActiveRecord::QueryRecorder.new { execute }
# Create more label links for the target
create(:label_link, target: target)
create(:label_link, target: target)
- expect { execute }.not_to exceed_query_limit(control_count)
+ expect { execute }.not_to exceed_query_limit(control)
expect(target.reload.label_links.count).to eq(0)
end
end
diff --git a/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb b/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb
index 5e49bdd706c..a51215f6c89 100644
--- a/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb
+++ b/spec/support/shared_examples/services/jira/requests/base_shared_examples.rb
@@ -11,6 +11,7 @@ RSpec.shared_examples 'a service that handles Jira API errors' do
Timeout::Error | '' | 'A timeout error occurred'
URI::InvalidURIError | '' | 'The Jira API URL'
SocketError | '' | 'The Jira API URL'
+ Gitlab::HTTP::BlockedUrlError | '' | 'Unable to connect to the Jira URL. Please verify your'
OpenSSL::SSL::SSLError | 'foo' | 'An SSL error occurred while connecting to Jira: foo'
JIRA::HTTPError | 'Unauthorized' | 'The credentials for accessing Jira are not valid'
JIRA::HTTPError | 'Forbidden' | 'The credentials for accessing Jira are not allowed'
diff --git a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
index d288c74ae4b..f4bab4d0ad6 100644
--- a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
+++ b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
@@ -12,6 +12,8 @@ RSpec.shared_examples 'updating the namespace package setting attributes' do |to
.and change { namespace.package_settings.reload.nuget_duplicates_allowed }.from(from[:nuget_duplicates_allowed]).to(to[:nuget_duplicates_allowed])
.and change { namespace.package_settings.reload.nuget_duplicate_exception_regex }.from(from[:nuget_duplicate_exception_regex]).to(to[:nuget_duplicate_exception_regex])
.and change { namespace.package_settings.reload.nuget_symbol_server_enabled }.from(from[:nuget_symbol_server_enabled]).to(to[:nuget_symbol_server_enabled])
+ .and change { namespace.package_settings.reload.terraform_module_duplicates_allowed }.from(from[:terraform_module_duplicates_allowed]).to(to[:terraform_module_duplicates_allowed])
+ .and change { namespace.package_settings.reload.terraform_module_duplicate_exception_regex }.from(from[:terraform_module_duplicate_exception_regex]).to(to[:terraform_module_duplicate_exception_regex])
end
end
@@ -36,6 +38,8 @@ RSpec.shared_examples 'creating the namespace package setting' do
expect(namespace.package_setting_relation.nuget_duplicates_allowed).to eq(package_settings[:nuget_duplicates_allowed])
expect(namespace.package_setting_relation.nuget_duplicate_exception_regex).to eq(package_settings[:nuget_duplicate_exception_regex])
expect(namespace.package_setting_relation.nuget_symbol_server_enabled).to eq(package_settings[:nuget_symbol_server_enabled])
+ expect(namespace.package_setting_relation.terraform_module_duplicates_allowed).to eq(package_settings[:terraform_module_duplicates_allowed])
+ expect(namespace.package_setting_relation.terraform_module_duplicate_exception_regex).to eq(package_settings[:terraform_module_duplicate_exception_regex])
end
it_behaves_like 'returning a success'
diff --git a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
index cb544f42765..97dd2aa96d4 100644
--- a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb
@@ -244,10 +244,10 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do
end
create_list(:debian_package, 10, project: project, published_in: project_distribution)
- control_count = ActiveRecord::QueryRecorder.new { subject2 }.count
+ control = ActiveRecord::QueryRecorder.new { subject2 }
create_list(:debian_package, 10, project: project, published_in: project_distribution)
- expect { subject3 }.not_to exceed_query_limit(control_count)
+ expect { subject3 }.not_to exceed_query_limit(control)
end
end
diff --git a/spec/support/shared_examples/services/protected_branches_shared_examples.rb b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
index 6d4b82730da..980241ad586 100644
--- a/spec/support/shared_examples/services/protected_branches_shared_examples.rb
+++ b/spec/support/shared_examples/services/protected_branches_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_context 'with scan result policy blocking protected branches' do
+RSpec.shared_context 'with scan result policy' do
include RepoHelpers
let(:policy_path) { Security::OrchestrationPolicyConfiguration::POLICY_PATH }
@@ -8,12 +8,10 @@ RSpec.shared_context 'with scan result policy blocking protected branches' do
let(:default_branch) { policy_project.default_branch }
let(:policy_yaml) do
- build(:orchestration_policy_yaml, scan_execution_policy: [], scan_result_policy: [scan_result_policy])
+ build(:orchestration_policy_yaml, scan_execution_policy: [], scan_result_policy: scan_result_policies)
end
- let(:scan_result_policy) do
- build(:scan_result_policy, branches: [branch_name], approval_settings: { block_branch_modification: true })
- end
+ let(:scan_result_policies) { [scan_result_policy] }
before do
policy_configuration.update_attribute(:security_policy_management_project, policy_project)
@@ -24,25 +22,26 @@ RSpec.shared_context 'with scan result policy blocking protected branches' do
end
end
-RSpec.shared_context 'with scan result policy preventing force pushing' do
- include RepoHelpers
-
- let(:policy_path) { Security::OrchestrationPolicyConfiguration::POLICY_PATH }
- let(:default_branch) { policy_project.default_branch }
- let(:prevent_pushing_and_force_pushing) { true }
-
- let(:scan_result_policy) do
- build(:scan_result_policy, branches: [branch_name],
- approval_settings: { prevent_pushing_and_force_pushing: prevent_pushing_and_force_pushing })
+RSpec.shared_context 'with scan result policy blocking protected branches' do
+ include_context 'with scan result policy' do
+ let(:scan_result_policy) do
+ build(:scan_result_policy, branches: [branch_name], approval_settings: { block_branch_modification: true })
+ end
end
+end
- let(:policy_yaml) do
- build(:orchestration_policy_yaml, scan_result_policy: [scan_result_policy])
- end
+RSpec.shared_context 'with scan result policy preventing force pushing' do
+ include_context 'with scan result policy' do
+ let(:prevent_pushing_and_force_pushing) { true }
- before do
- create_file_in_repo(policy_project, default_branch, default_branch, policy_path, policy_yaml)
- stub_licensed_features(security_orchestration_policies: true)
+ let(:scan_result_policy) do
+ build(:scan_result_policy, branches: [branch_name],
+ approval_settings: { prevent_pushing_and_force_pushing: prevent_pushing_and_force_pushing })
+ end
+
+ let(:policy_yaml) do
+ build(:orchestration_policy_yaml, scan_result_policy: [scan_result_policy])
+ end
end
after do
diff --git a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
index 0545be7c741..4f6b27a99c6 100644
--- a/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
+++ b/spec/support/shared_examples/work_item_hierarchy_restrictions_importer.rb
@@ -7,12 +7,23 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
end
end
+ shared_examples 'clears type reactive cache' do
+ specify do
+ expect_next_found_instances_of(WorkItems::Type, 7) do |instance|
+ expect(instance).to receive(:clear_reactive_cache!)
+ end
+
+ subject
+ end
+ end
+
context 'when restrictions are missing' do
before do
WorkItems::HierarchyRestriction.delete_all
end
it_behaves_like 'adds restrictions'
+ it_behaves_like 'clears type reactive cache'
end
context 'when base types are missing' do
@@ -41,6 +52,8 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
change { restriction.maximum_depth }.from(depth + 1).to(depth)
)
end
+
+ it_behaves_like 'clears type reactive cache'
end
context 'when some restrictions are missing' do
@@ -55,6 +68,8 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
)
expect(WorkItems::HierarchyRestriction.count).to eq(7)
end
+
+ it_behaves_like 'clears type reactive cache'
end
context 'when restrictions contain attributes not present in the table' do
@@ -70,5 +85,7 @@ RSpec.shared_examples 'work item hierarchy restrictions importer' do
subject
end
+
+ it_behaves_like 'clears type reactive cache'
end
end
diff --git a/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb b/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb
index 491662d17d3..26a5be5aea4 100644
--- a/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb
+++ b/spec/support/shared_examples/work_items/widgetable_service_shared_examples.rb
@@ -4,7 +4,11 @@ RSpec.shared_examples_for 'work item widgetable service' do
it 'executes callbacks for expected widgets' do
supported_widgets.each do |widget|
expect_next_instance_of(widget[:klass]) do |widget_instance|
- expect(widget_instance).to receive(widget[:callback]).with(params: widget[:params])
+ if widget[:params].present?
+ expect(widget_instance).to receive(widget[:callback]).with(params: widget[:params])
+ else
+ expect(widget_instance).to receive(widget[:callback])
+ end
end
end
diff --git a/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb b/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
index af5bf33a9a6..b5e3589d86c 100644
--- a/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
+++ b/spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples Gitlab::GithubImport::StageMethods do
+ let_it_be(:project) { create(:project, :import_started, import_url: 'https://t0ken@github.com/repo/repo.git') }
+
describe '.sidekiq_retries_exhausted' do
it 'tracks the exception and marks the import as failed' do
expect(Gitlab::Import::ImportFailureService).to receive(:track)
@@ -14,4 +16,183 @@ RSpec.shared_examples Gitlab::GithubImport::StageMethods do
described_class.sidekiq_retries_exhausted_block.call({ 'args' => [1] }, StandardError.new)
end
end
+
+ describe '.sidekiq_options' do
+ subject(:sidekiq_options) { worker.class.sidekiq_options }
+
+ it 'has a status_expiration' do
+ is_expected.to include('status_expiration' => Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
+ end
+
+ it 'has a retry of 6' do
+ is_expected.to include('retry' => 6)
+ end
+ end
+
+ describe '#perform' do
+ it 'returns if no project could be found' do
+ expect(worker).not_to receive(:import)
+
+ worker.perform(-1)
+ end
+
+ it 'returns if the import state is no longer in progress' do
+ project.import_state.fail_op!
+
+ expect(worker).not_to receive(:import)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: described_class.name
+ }
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'Project import is no longer running. Stopping worker.',
+ project_id: project.id,
+ import_stage: described_class.name,
+ import_status: 'failed'
+ }
+ )
+
+ worker.perform(project.id)
+ end
+
+ it 'imports the data when the project exists' do
+ expect(worker)
+ .to receive(:import)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Client),
+ an_instance_of(Project)
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: described_class.name
+ }
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'stage finished',
+ project_id: project.id,
+ import_stage: described_class.name
+ }
+ )
+
+ worker.perform(project.id)
+ end
+
+ it 'queues RefreshImportJidWorker' do
+ allow(worker).to receive(:import)
+ allow(worker).to receive(:jid).and_return('mock_jid')
+
+ expect(Gitlab::GithubImport::RefreshImportJidWorker)
+ .to receive(:perform_in_the_future)
+ .with(project.id, 'mock_jid')
+
+ worker.perform(project.id)
+ end
+
+ describe 'rescheduling the worker on certain errors' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:error) { [Gitlab::GithubImport::RateLimitError, Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError] }
+
+ with_them do
+ it 'reschedules the worker' do
+ rate_limit_reset = 10
+ client = instance_double(Gitlab::GithubImport::Client, rate_limit_resets_in: rate_limit_reset)
+
+ allow(Gitlab::GithubImport)
+ .to receive(:new_client_for)
+ .and_return(client)
+
+ expect(worker)
+ .to receive(:import)
+ .with(client, project)
+ .and_raise(error)
+
+ expect(worker.class)
+ .to receive(:perform_in)
+ .with(rate_limit_reset, project.id)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: described_class.name
+ }
+ )
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'stage retrying',
+ project_id: project.id,
+ import_stage: described_class.name,
+ exception_class: error.name
+ }
+ )
+
+ worker.perform(project.id)
+ end
+ end
+ end
+
+ it 'logs error when import fails with a StandardError' do
+ exception = StandardError.new('some error')
+
+ expect(worker)
+ .to receive(:import)
+ .and_raise(exception)
+
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(
+ {
+ message: 'starting stage',
+ project_id: project.id,
+ import_stage: described_class.name
+ }
+ )
+
+ expect(Gitlab::Import::ImportFailureService)
+ .to receive(:track)
+ .with(
+ {
+ project_id: project.id,
+ exception: exception,
+ error_source: described_class.name,
+ fail_import: false,
+ metrics: true
+ }
+ ).and_call_original
+
+ expect { worker.perform(project.id) }
+ .to raise_error(exception)
+
+ expect(project.import_state.reload.status).to eq('started')
+
+ expect(project.import_failures).not_to be_empty
+ expect(project.import_failures.last.exception_class).to eq('StandardError')
+ expect(project.import_failures.last.exception_message).to eq('some error')
+ end
+ end
end
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index b25f39c5e74..6c354c780b2 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -1,13 +1,19 @@
# frozen_string_literal: true
RSpec.configure do |config|
- def gitlab_sidekiq_inline(&block)
+ def gitlab_sidekiq_inline
# We need to cleanup the queues before running jobs in specs because the
# middleware might have written to redis
redis_queues_cleanup!
redis_queues_metadata_cleanup!
- Sidekiq::Testing.inline!(&block)
+
+ # Scoped inline! is thread-safe which breaks capybara specs
+ # see https://github.com/sidekiq/sidekiq/issues/6069
+ Sidekiq::Testing.inline!
+
+ yield
ensure
+ Sidekiq::Testing.fake! # fake is the default so we reset it to that
redis_queues_cleanup!
redis_queues_metadata_cleanup!
end
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index f4d90ff5151..cbd6163d46b 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -6,15 +6,6 @@ require 'sidekiq/testing'
module SidekiqMiddleware
def with_sidekiq_server_middleware(&block)
Sidekiq::Testing.server_middleware.clear
-
- if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
- raise 'New version of sidekiq detected, please remove this line'
- end
-
- # This line is a workaround for a Sidekiq bug that is already fixed in v7.0.0
- # https://github.com/mperham/sidekiq/commit/1b83a152786ed382f07fff12d2608534f1e3c922
- Sidekiq::Testing.server_middleware.instance_variable_set(:@config, Sidekiq)
-
Sidekiq::Testing.server_middleware(&block)
ensure
Sidekiq::Testing.server_middleware.clear
diff --git a/spec/tasks/gitlab/cleanup_rake_spec.rb b/spec/tasks/gitlab/cleanup_rake_spec.rb
index 2e41c4f1219..56a9dcae4c9 100644
--- a/spec/tasks/gitlab/cleanup_rake_spec.rb
+++ b/spec/tasks/gitlab/cleanup_rake_spec.rb
@@ -383,4 +383,88 @@ RSpec.describe 'gitlab:cleanup rake tasks', :silence_stdout do
end
end
end
+
+ describe 'cleanup:orphan_job_artifact_final_objects' do
+ subject(:rake_task) { run_rake_task('gitlab:cleanup:orphan_job_artifact_final_objects', provider) }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ shared_examples_for 'running the cleaner' do
+ it 'runs the task without errors' do
+ expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
+ .to receive(:new)
+ .with(
+ dry_run: true,
+ force_restart: false,
+ provider: provider,
+ logger: anything
+ )
+ .and_call_original
+
+ expect { rake_task }.not_to raise_error
+ end
+
+ context 'with FORCE_RESTART defined' do
+ before do
+ stub_env('FORCE_RESTART', '1')
+ end
+
+ it 'passes force_restart correctly' do
+ expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
+ .to receive(:new)
+ .with(
+ dry_run: true,
+ force_restart: true,
+ provider: provider,
+ logger: anything
+ )
+ .and_call_original
+
+ expect { rake_task }.not_to raise_error
+ end
+ end
+
+ context 'with DRY_RUN set to false' do
+ before do
+ stub_env('DRY_RUN', 'false')
+ end
+
+ it 'passes dry_run correctly' do
+ expect(Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner)
+ .to receive(:new)
+ .with(
+ dry_run: false,
+ force_restart: false,
+ provider: provider,
+ logger: anything
+ )
+ .and_call_original
+
+ expect { rake_task }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when provider is not specified' do
+ let(:provider) { nil }
+
+ it_behaves_like 'running the cleaner'
+ end
+
+ context 'when provider is specified' do
+ let(:provider) { 'aws' }
+
+ it_behaves_like 'running the cleaner'
+ end
+
+ context 'when unsupported provider is given' do
+ let(:provider) { 'somethingelse' }
+
+ it 'exits with error' do
+ expect { rake_task }.to raise_error(SystemExit)
+ end
+ end
+ end
end
diff --git a/spec/tooling/danger/bulk_database_actions_spec.rb b/spec/tooling/danger/bulk_database_actions_spec.rb
index eba3eacb212..18a46f663c0 100644
--- a/spec/tooling/danger/bulk_database_actions_spec.rb
+++ b/spec/tooling/danger/bulk_database_actions_spec.rb
@@ -8,115 +8,87 @@ require_relative '../../../tooling/danger/bulk_database_actions'
require_relative '../../../tooling/danger/project_helper'
RSpec.describe Tooling::Danger::BulkDatabaseActions, feature_category: :tooling do
- include_context "with dangerfile"
+ include_context 'with dangerfile'
- let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:fake_danger) { DangerSpecHelper.fake_danger }
let(:fake_project_helper) { instance_double(Tooling::Danger::ProjectHelper) }
-
- let(:mr_url) { 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1' }
- let(:doc_link) { described_class::DOCUMENTATION_LINK }
-
- let(:comment_text) { "\n#{described_class::COMMENT_TEXT}" }
-
- let(:file_lines) do
- file_diff.map { |line| line.delete_prefix('+') }
- end
+ let(:comment_text) { "\n#{described_class::SUGGESTION}" }
+ let(:file_lines) { file_diff.map { |line| line.delete_prefix('+') } }
before do
allow(bulk_database_actions).to receive(:project_helper).and_return(fake_project_helper)
allow(bulk_database_actions.project_helper).to receive(:file_lines).and_return(file_lines)
allow(bulk_database_actions.helper).to receive(:added_files).and_return([filename])
allow(bulk_database_actions.helper).to receive(:changed_lines).with(filename).and_return(file_diff)
- allow(bulk_database_actions.helper).to receive(:mr_web_url).and_return(mr_url)
+
+ bulk_database_actions.define_singleton_method(:add_suggestions_for) do |filename|
+ Tooling::Danger::BulkDatabaseActions.new(filename, context: self).suggest
+ end
end
subject(:bulk_database_actions) { fake_danger.new(helper: fake_helper) }
- shared_examples 'no Danger comment' do
- it 'does not comment on the bulk update action usage' do
- expect(bulk_database_actions).not_to receive(:markdown)
-
- bulk_database_actions.add_comment_for_bulk_database_action_method_usage
+ context 'for single line method call' do
+ let(:file_diff) do
+ <<~DIFF.split("\n")
+ + def execute
+ + #{code}
+ +
+ + ServiceResponse.success
+ + end
+ DIFF
end
- end
- describe '#add_comment_for_bulk_database_action_method_usage' do
- context 'for single line method call' do
- let(:file_diff) do
- [
- "+ def execute",
- "+ pat_family.active.#{method_call}",
- "+",
- "+ ServiceResponse.success",
- "+ end"
- ]
- end
+ context 'when file is a non-spec Ruby file' do
+ let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
- context 'when file is a non-spec Ruby file' do
- let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
-
- using RSpec::Parameterized::TableSyntax
-
- where(:method_call, :expect_comment?) do
- 'update_all(revoked: true)' | true
- 'destroy_all' | true
- 'delete_all' | true
- 'update(revoked: true)' | true
- 'delete' | true
- 'update_two_factor' | false
- 'delete_keys(key)' | false
- 'destroy_hook(hook)' | false
- 'destroy_all_merged' | false
- 'update_all_mirrors' | false
+ using RSpec::Parameterized::TableSyntax
+
+ context 'when comment is expected' do
+ where(:code) do
+ [
+ 'update_all(revoked: true)',
+ 'destroy_all',
+ 'delete_all',
+ 'update(revoked: true)',
+ 'delete',
+ 'upsert',
+ 'upsert_all',
+ 'User.upsert',
+ 'User.last.destroy',
+ ' .destroy'
+ ]
end
with_them do
- it "correctly handles potential bulk database action" do
- if expect_comment?
- expect(bulk_database_actions).to receive(:markdown).with(comment_text, file: filename, line: 2)
- else
- expect(bulk_database_actions).not_to receive(:markdown)
- end
-
- bulk_database_actions.add_comment_for_bulk_database_action_method_usage
+ specify do
+ expect(bulk_database_actions).to receive(:markdown).with(comment_text.chomp, file: filename, line: 2)
+
+ bulk_database_actions.add_suggestions_for(filename)
end
end
end
- context 'for spec directories' do
- let(:method_call) { 'update_all(revoked: true)' }
-
- context 'for FOSS spec file' do
- let(:filename) { 'spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
-
- it_behaves_like 'no Danger comment'
- end
-
- context 'for EE spec file' do
- let(:filename) { 'ee/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
-
- it_behaves_like 'no Danger comment'
+ context 'when no comment is expected' do
+ where(:code) do
+ [
+ 'we update bob',
+ 'update_two_factor',
+ 'delete_keys(key)',
+ 'destroy_hook(hook)',
+ 'destroy_all_merged',
+ 'update_all_mirrors'
+ ]
end
- context 'for JiHu spec file' do
- let(:filename) { 'jh/spec/services/personal_access_tokens/revoke_token_family_service_spec.rb' }
+ with_them do
+ specify do
+ expect(bulk_database_actions).not_to receive(:markdown)
- it_behaves_like 'no Danger comment'
+ bulk_database_actions.add_suggestions_for(filename)
+ end
end
end
end
-
- context 'for strings' do
- let(:filename) { 'app/services/personal_access_tokens/revoke_token_family_service.rb' }
- let(:file_diff) do
- [
- '+ expect { subject }.to output(',
- '+ "ERROR: Could not update tag"',
- '+ ).to_stderr'
- ]
- end
-
- it_behaves_like 'no Danger comment'
- end
end
end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index a41aba17f56..90409ff5559 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -251,6 +251,13 @@ RSpec.describe Tooling::Danger::ProjectHelper, feature_category: :tooling do
[:backend, :analytics_instrumentation] | '+ count(User.active)' | ['lib/gitlab/usage_data/topology.rb']
[:backend, :analytics_instrumentation] | '+ foo_count(User.active)' | ['lib/gitlab/usage_data.rb']
[:backend] | '+ count(User.active)' | ['user.rb']
+ [:database, :backend] | '+ User.upsert({ name: "blah" })' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ User.upsert(' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ Organizations::OrganizationUser.upsert({' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ upsert({ name: "blah" })' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ .upsert({ name: "blah" })' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ .delete_all' | ['app/foo/bar.rb']
+ [:database, :backend] | '+ .destroy_all' | ['app/foo/bar.rb']
[:import_integrate_be, :database] | '+ add_column :integrations, :foo, :text' | ['db/migrate/foo.rb']
[:import_integrate_be, :database] | '+ create_table :zentao_tracker_data do |t|' | ['ee/db/post_migrate/foo.rb']
[:import_integrate_be, :backend] | '+ Integrations::Foo' | ['app/foo/bar.rb']
diff --git a/spec/tooling/lib/tooling/find_changes_spec.rb b/spec/tooling/lib/tooling/find_changes_spec.rb
index 85e3eadac6f..be28b228edd 100644
--- a/spec/tooling/lib/tooling/find_changes_spec.rb
+++ b/spec/tooling/lib/tooling/find_changes_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe Tooling::FindChanges, feature_category: :tooling do
predictive_tests_pathname: predictive_tests_pathname,
frontend_fixtures_mapping_pathname: frontend_fixtures_mapping_pathname,
from: from,
- file_filter: file_filter)
+ file_filter: file_filter,
+ only_new_paths: only_new_paths)
end
let(:changed_files_pathname) { changed_files_file.path }
@@ -25,6 +26,7 @@ RSpec.describe Tooling::FindChanges, feature_category: :tooling do
let(:from) { :api }
let(:gitlab_client) { double('GitLab') } # rubocop:disable RSpec/VerifiedDoubles
let(:file_filter) { ->(_) { true } }
+ let(:only_new_paths) { false }
around do |example|
self.changed_files_file = Tempfile.new('changed_files_file')
@@ -122,6 +124,37 @@ RSpec.describe Tooling::FindChanges, feature_category: :tooling do
expect(File.read(changed_files_file)).to eq('doc/index.md')
end
end
+
+ context 'when used with only_new_paths' do
+ let(:only_new_paths) { true }
+
+ let(:mr_changes_array) do
+ [
+ {
+ "new_path" => "scripts/test.js",
+ "old_path" => "scripts/test.js"
+ },
+ {
+ "new_path" => "doc/renamed_index.md",
+ "old_path" => "doc/index.md"
+ }
+ ]
+ end
+
+ before do
+ # rubocop:disable RSpec/VerifiedDoubles -- The class from the GitLab gem isn't public, so we cannot use verified doubles for it.
+ allow(gitlab_client).to receive(:merge_request_changes)
+ .with('dummy-project', '1234')
+ .and_return(double(changes: mr_changes_array))
+ # rubocop:enable RSpec/VerifiedDoubles
+ end
+
+ it 'only writes new file paths to output' do
+ subject
+
+ expect(File.read(changed_files_file)).to eq('doc/renamed_index.md scripts/test.js')
+ end
+ end
end
context 'when fetching changes from changed files' do
diff --git a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
index 96413f622e8..04c6cf4bde9 100644
--- a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
+++ b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN,
subject { described_class.new(options) }
before do
- WebMock.stub_request(:get, GoogleCloud::FetchGoogleIpListService::GOOGLE_IP_RANGES_URL)
+ WebMock.stub_request(:get, CloudSeed::GoogleCloud::FetchGoogleIpListService::GOOGLE_IP_RANGES_URL)
.to_return(status: 200, body: google_cloud_ips, headers: headers)
end
diff --git a/spec/validators/ip_cidr_array_validator_spec.rb b/spec/validators/ip_cidr_array_validator_spec.rb
index 6adb0bc70db..f18005054b5 100644
--- a/spec/validators/ip_cidr_array_validator_spec.rb
+++ b/spec/validators/ip_cidr_array_validator_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe IpCidrArrayValidator, feature_category: :shared do
using RSpec::Parameterized::TableSyntax
- # noinspection RubyMismatchedArgumentType - https://handbook.gitlab.com/handbook/tools-and-tips/editors-and-ides/jetbrains-ides/tracked-jetbrains-issues/#ruby-32041
where(:cidr_array, :validity, :errors) do
# rubocop:disable Layout/LineLength -- The RSpec table syntax often requires long lines for errors
nil | false | { cidr_array: ["must be an array of CIDR values"] }
diff --git a/spec/views/admin/application_settings/network.html.haml_spec.rb b/spec/views/admin/application_settings/network.html.haml_spec.rb
index 989977bac3e..193ee8a32d5 100644
--- a/spec/views/admin/application_settings/network.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/network.html.haml_spec.rb
@@ -18,4 +18,12 @@ RSpec.describe 'admin/application_settings/network.html.haml', feature_category:
expect(rendered).to have_field('application_setting_projects_api_rate_limit_unauthenticated')
end
end
+
+ context 'for Members API rate limit' do
+ it 'renders the `members_delete_limit` field' do
+ render
+
+ expect(rendered).to have_field('application_setting_members_delete_limit')
+ end
+ end
end
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index 81275fa8750..73d6298c27e 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'admin/sessions/new.html.haml' do
expect(rendered).not_to have_content _('No authentication methods configured.')
expect(rendered).to have_css('.omniauth-divider')
expect(rendered).to have_content(_('or sign in with'))
- expect(rendered).to have_css('.omniauth-container')
+ expect(rendered).to have_css('.js-oauth-login')
end
end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index 5f611ae1d8f..61a3581852e 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -2,71 +2,36 @@
require 'spec_helper'
-RSpec.describe 'devise/sessions/new' do
- describe 'marketing text', :saas do
- subject { render(template: 'devise/sessions/new', layout: 'layouts/devise') }
+RSpec.describe 'devise/sessions/new', feature_category: :system_access do
+ describe 'ldap' do
+ include LdapHelpers
+
+ let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access }
before do
+ enable_ldap
stub_devise
disable_captcha
- stub_feature_flags(restyle_login_page: false)
+ disable_sign_up
+ disable_other_signin_methods
end
- it 'when flash is anything it renders marketing text' do
- flash[:notice] = "You can't do that"
-
- subject
+ it 'is shown when enabled' do
+ render
- expect(rendered).to have_content('A complete DevOps platform')
+ expect(rendered).to have_selector('.new-session-tabs')
+ expect(rendered).to have_selector('[data-testid="ldap-tab"]')
+ expect(rendered).to have_field(_('Username'))
end
- it 'when flash notice is devise confirmed message it hides marketing text' do
- flash[:notice] = t(:confirmed, scope: [:devise, :confirmations])
-
- subject
-
- expect(rendered).not_to have_content('A complete DevOps platform')
- end
- end
-
- flag_values = [true, false]
- flag_values.each do |val|
- context "with #{val}" do
- before do
- stub_feature_flags(restyle_login_page: val)
- end
-
- describe 'ldap' do
- include LdapHelpers
-
- let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access }
-
- before do
- enable_ldap
- stub_devise
- disable_captcha
- disable_sign_up
- disable_other_signin_methods
- end
-
- it 'is shown when enabled' do
- render
-
- expect(rendered).to have_selector('.new-session-tabs')
- expect(rendered).to have_selector('[data-testid="ldap-tab"]')
- expect(rendered).to have_field(_('Username'))
- end
-
- it 'is not shown when LDAP sign in is disabled' do
- disable_ldap_sign_in
+ it 'is not shown when LDAP sign in is disabled' do
+ disable_ldap_sign_in
- render
+ render
- expect(rendered).to have_content('No authentication methods configured')
- expect(rendered).not_to have_selector('[data-testid="ldap-tab"]')
- expect(rendered).not_to have_field(_('Username'))
- end
- end
+ expect(rendered).to have_content('No authentication methods configured')
+ expect(rendered).not_to have_selector('[data-testid="ldap-tab"]')
+ expect(rendered).not_to have_field(_('Username'))
end
end
diff --git a/spec/views/devise/shared/_footer.html.haml_spec.rb b/spec/views/devise/shared/_footer.html.haml_spec.rb
new file mode 100644
index 00000000000..58fe9c1073c
--- /dev/null
+++ b/spec/views/devise/shared/_footer.html.haml_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'devise/shared/_footer', feature_category: :system_access do
+ subject { render && rendered }
+
+ context 'when public visibility is restricted' do
+ before do
+ allow(view).to receive(:public_visibility_restricted?).and_return(true)
+ end
+
+ it { is_expected.not_to have_link(_('Explore'), href: explore_root_path) }
+ it { is_expected.not_to have_link(_('Help'), href: help_path) }
+ end
+
+ context 'when public visibility is not restricted' do
+ before do
+ allow(view).to receive(:public_visibility_restricted?).and_return(false)
+ end
+
+ it { is_expected.to have_link(_('Explore'), href: explore_root_path) }
+ it { is_expected.to have_link(_('Help'), href: help_path) }
+ end
+
+ it { is_expected.to have_link(_('About GitLab'), href: "https://#{ApplicationHelper.promo_host}") }
+ it { is_expected.to have_link(_('Community forum'), href: ApplicationHelper.community_forum) }
+
+ context 'when one trust is enabled' do
+ before do
+ allow(view).to receive(:one_trust_enabled?).and_return(true)
+ end
+
+ it { is_expected.to have_button(_('Cookie Preferences'), class: 'ot-sdk-show-settings') }
+ end
+
+ context 'when one trust is disabled' do
+ before do
+ allow(view).to receive(:one_trust_enabled?).and_return(false)
+ end
+
+ it { is_expected.not_to have_button(_('Cookie Preferences'), class: 'ot-sdk-show-settings') }
+ end
+
+ it { is_expected.to have_css('.js-language-switcher') }
+end
diff --git a/spec/views/devise/shared/_signup_box.html.haml_spec.rb b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
index eba036083be..13beb87c383 100644
--- a/spec/views/devise/shared/_signup_box.html.haml_spec.rb
+++ b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
@@ -81,22 +81,6 @@ RSpec.describe 'devise/shared/_signup_box' do
end
end
- context 'using the borderless option' do
- let(:border_css_classes) { '.gl-border-gray-100.gl-border-1.gl-border-solid.gl-rounded-base' }
-
- it 'renders with a border by default' do
- render
-
- expect(rendered).to have_selector(border_css_classes)
- end
-
- it 'renders without a border when borderless is truthy' do
- render('devise/shared/signup_box', borderless: true)
-
- expect(rendered).not_to have_selector(border_css_classes)
- end
- end
-
def stub_devise
allow(view).to receive(:devise_mapping).and_return(Devise.mappings[:user])
allow(view).to receive(:resource).and_return(spy)
diff --git a/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
index c3e4bd76b30..988f2e2df8a 100644
--- a/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
+++ b/spec/views/devise/shared/_signup_omniauth_provider_list_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'devise/shared/_signup_omniauth_provider_list', feature_category:
allow(view).to receive(:providers).and_return([provider_label])
allow(view).to receive(:tracking_label).and_return(tracking_label)
allow(view).to receive(:glm_tracking_params).and_return({})
+ render
end
shared_examples 'sso buttons have snowplow tracking' do
@@ -24,37 +25,11 @@ RSpec.describe 'devise/shared/_signup_omniauth_provider_list', feature_category:
end
end
- context 'when feature flag is true' do
- before do
- stub_feature_flags(restyle_login_page: true)
+ it { is_expected.to have_content(_("Register with:")) }
- render
- end
-
- it { is_expected.to have_content(_("Register with:")) }
-
- it_behaves_like 'sso buttons have snowplow tracking'
- end
-
- context 'when feature flag is false' do
- before do
- stub_feature_flags(restyle_login_page: false)
-
- render
- end
+ it_behaves_like 'sso buttons have snowplow tracking'
- it { is_expected.to have_content(_("Create an account using:")) }
-
- it_behaves_like 'sso buttons have snowplow tracking'
- end
-
- context 'when rendering button' do
- before do
- render
- end
-
- it 'renders button in form' do
- expect(rendered).to have_css('form[action="/users/auth/github"]')
- end
+ it 'renders button in form' do
+ expect(rendered).to have_css('form[action="/users/auth/github"]')
end
end
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index c20b4424129..c68c48a0828 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -20,14 +20,6 @@ RSpec.describe 'profiles/preferences/show' do
it 'has an id for anchoring' do
expect(rendered).to have_css('#navigation-theme')
end
-
- it 'has correct stylesheet tags' do
- Gitlab::Themes.each do |theme|
- next unless theme.css_filename
-
- expect(rendered).to have_selector("link[href*=\"themes/#{theme.css_filename}\"]", visible: false)
- end
- end
end
context 'syntax highlighting theme' do
diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb
index 4cfff00d390..7f3cecae8cb 100644
--- a/spec/views/projects/commit/show.html.haml_spec.rb
+++ b/spec/views/projects/commit/show.html.haml_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code_management do
- let(:project) { create(:project, :repository) }
+ let_it_be_with_refind(:project) { create(:project, :repository, :in_group) }
+
let(:commit) { project.commit }
before do
diff --git a/spec/views/projects/merge_requests/creations/new.html.haml_spec.rb b/spec/views/projects/merge_requests/creations/new.html.haml_spec.rb
new file mode 100644
index 00000000000..b55bf75b7b1
--- /dev/null
+++ b/spec/views/projects/merge_requests/creations/new.html.haml_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/merge_requests/creations/new.html.haml', feature_category: :code_review_workflow do
+ let_it_be(:target_project) { build_stubbed(:project, :repository) }
+
+ let(:merge_request) { build(:merge_request, source_project: source_project, target_project: target_project) }
+
+ before do
+ controller.prepend_view_path('app/views/projects')
+
+ assign(:project, source_project)
+ assign(:merge_request, merge_request)
+ end
+
+ shared_examples 'has conflicting merge request guard' do
+ context 'when there is conflicting merge request' do
+ let(:conflicting_mr) do
+ build_stubbed(
+ :merge_request,
+ source_project: source_project,
+ target_project: target_project,
+ source_branch: merge_request.source_branch,
+ target_branch: merge_request.target_branch
+ )
+ end
+
+ before do
+ allow(merge_request).to receive(:existing_mrs_targeting_same_branch).and_return([conflicting_mr])
+ end
+
+ it 'shows conflicting merge request alert' do
+ render
+
+ expected_conflicting_mr_link = link_to(
+ conflicting_mr.to_reference,
+ project_merge_request_path(conflicting_mr.target_project, conflicting_mr)
+ )
+
+ expect(flash[:alert]).to include(
+ "These branches already have an open merge request: #{expected_conflicting_mr_link}"
+ )
+ end
+ end
+
+ context 'when there is no conflicting merge request' do
+ it 'does not show conflicting merge request alert' do
+ render
+
+ expect(flash[:alert]).to be_nil
+ end
+ end
+ end
+
+ context 'when merge request is created from other project' do
+ let_it_be(:source_project) { build_stubbed(:project, :repository) }
+
+ it_behaves_like 'has conflicting merge request guard'
+ end
+
+ context 'when merge request is created from the same project' do
+ let_it_be(:source_project) { target_project }
+
+ it_behaves_like 'has conflicting merge request guard'
+ end
+end
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index 2cc6348bb27..25adaef28fd 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
context 'when something goes wrong during source id fetch' do
let(:entity_source_id) { 'invalid' }
- it 'logs the error & requests relations export using full path url' do
+ it 'logs the exception as a warning & requests relations export using full path url' do
allow(BulkImports::EntityWorker).to receive(:perform_async)
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
@@ -74,7 +74,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger).to receive(:with_entity).with(entity).and_call_original
- expect(logger).to receive(:error).with(
+ expect(logger).to receive(:warn).with(
a_hash_including(
'exception.backtrace' => anything,
'exception.class' => 'NoMethodError',
@@ -123,20 +123,6 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
described_class.new.perform(entity.id)
end
-
- context 'when bulk_imports_batched_import_export feature flag is disabled' do
- it 'requests relation export without batched param' do
- stub_feature_flags(bulk_imports_batched_import_export: false)
-
- expected_url = "/projects/#{entity.source_xid}/export_relations"
-
- expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
- expect(client).to receive(:post).with(expected_url)
- end
-
- described_class.new.perform(entity.id)
- end
- end
end
end
diff --git a/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb b/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb
index ca1d234eb5b..d48eed4ae90 100644
--- a/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb
+++ b/spec/workers/ci/unlock_pipelines_in_queue_worker_spec.rb
@@ -101,6 +101,7 @@ RSpec.describe Ci::UnlockPipelinesInQueueWorker, :unlock_pipelines, :clean_gitla
before do
stub_feature_flags(
+ ci_unlock_pipelines_extra_low: false,
ci_unlock_pipelines: false,
ci_unlock_pipelines_medium: false,
ci_unlock_pipelines_high: false
@@ -109,6 +110,14 @@ RSpec.describe Ci::UnlockPipelinesInQueueWorker, :unlock_pipelines, :clean_gitla
it { is_expected.to eq(0) }
+ context 'when ci_unlock_pipelines_extra_low flag is enabled' do
+ before do
+ stub_feature_flags(ci_unlock_pipelines_extra_low: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_EXTRA_LOW) }
+ end
+
context 'when ci_unlock_pipelines flag is enabled' do
before do
stub_feature_flags(ci_unlock_pipelines: true)
diff --git a/spec/workers/click_house/event_authors_consistency_cron_worker_spec.rb b/spec/workers/click_house/event_authors_consistency_cron_worker_spec.rb
new file mode 100644
index 00000000000..d4fa35b9b82
--- /dev/null
+++ b/spec/workers/click_house/event_authors_consistency_cron_worker_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClickHouse::EventAuthorsConsistencyCronWorker, feature_category: :value_stream_management do
+ let(:worker) { described_class.new }
+
+ context 'when ClickHouse is disabled' do
+ it 'does nothing' do
+ allow(ClickHouse::Client).to receive(:database_configured?).and_return(false)
+
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform
+ end
+ end
+
+ context 'when the event_sync_worker_for_click_house feature flag is off' do
+ it 'does nothing' do
+ allow(ClickHouse::Client).to receive(:database_configured?).and_return(true)
+ stub_feature_flags(event_sync_worker_for_click_house: false)
+
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform
+ end
+ end
+
+ context 'when ClickHouse is available', :click_house do
+ let_it_be(:connection) { ClickHouse::Connection.new(:main) }
+ let_it_be_with_reload(:user1) { create(:user) }
+ let_it_be_with_reload(:user2) { create(:user) }
+
+ let(:leftover_author_ids) { connection.select('SELECT DISTINCT author_id FROM events FINAL').pluck('author_id') }
+ let(:deleted_user_id1) { user2.id + 1 }
+ let(:deleted_user_id2) { user2.id + 2 }
+
+ before do
+ insert_query = <<~SQL
+ INSERT INTO events (id, author_id) VALUES
+ (1, #{user1.id}),
+ (2, #{user2.id}),
+ (3, #{deleted_user_id1}),
+ (4, #{deleted_user_id1}),
+ (5, #{deleted_user_id2})
+ SQL
+
+ connection.execute(insert_query)
+ end
+
+ it 'cleans up all inconsistent records in ClickHouse' do
+ worker.perform
+
+ expect(leftover_author_ids).to contain_exactly(user1.id, user2.id)
+
+ # the next job starts from the beginning of the table
+ expect(ClickHouse::SyncCursor.cursor_for(:event_authors_consistency_check)).to eq(0)
+ end
+
+ context 'when the previous job was not finished' do
+ it 'continues the processing from the cursor' do
+ ClickHouse::SyncCursor.update_cursor_for(:event_authors_consistency_check, deleted_user_id1)
+
+ worker.perform
+
+ # the previous records should remain
+ expect(leftover_author_ids).to contain_exactly(user1.id, user2.id)
+ end
+ end
+
+ context 'when processing stops due to the record clean up limit' do
+ it 'stores the last processed id value' do
+ User.where(id: [user1.id, user2.id]).delete_all
+
+ stub_const("#{described_class}::MAX_AUTHOR_DELETIONS", 2)
+ stub_const("#{described_class}::POSTGRESQL_BATCH_SIZE", 1)
+
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
+ { status: :deletion_limit_reached, deletions: 2 })
+
+ worker.perform
+
+ expect(leftover_author_ids).to contain_exactly(deleted_user_id1, deleted_user_id2)
+ expect(ClickHouse::SyncCursor.cursor_for(:event_authors_consistency_check)).to eq(user2.id)
+ end
+ end
+
+ context 'when time limit is reached' do
+ it 'stops the processing earlier' do
+ stub_const("#{described_class}::POSTGRESQL_BATCH_SIZE", 1)
+
+ # stop at the third author_id
+ allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
+ allow(runtime_limiter).to receive(:over_time?).and_return(false, false, true)
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :over_time, deletions: 1 })
+
+ worker.perform
+
+ expect(leftover_author_ids).to contain_exactly(user1.id, user2.id, deleted_user_id2)
+ end
+ end
+ end
+end
diff --git a/spec/workers/click_house/events_sync_worker_spec.rb b/spec/workers/click_house/events_sync_worker_spec.rb
index 9662f26115a..dc3dea24e37 100644
--- a/spec/workers/click_house/events_sync_worker_spec.rb
+++ b/spec/workers/click_house/events_sync_worker_spec.rb
@@ -11,176 +11,20 @@ RSpec.describe ClickHouse::EventsSyncWorker, feature_category: :value_stream_man
)
end
- it_behaves_like 'an idempotent worker' do
- context 'when the event_sync_worker_for_click_house feature flag is on', :click_house do
- before do
- stub_feature_flags(event_sync_worker_for_click_house: true)
+ context 'when worker is enqueued' do
+ it 'calls ::ClickHouse::SyncStrategies::EventSyncStrategy with correct args' do
+ expect_next_instance_of(::ClickHouse::SyncStrategies::EventSyncStrategy) do |instance|
+ expect(instance).to receive(:execute)
end
- context 'when there is nothing to sync' do
- it 'adds metadata for the worker' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 0, reached_end_of_table: true })
-
- worker.perform
-
- events = ClickHouse::Client.select('SELECT * FROM events', :main)
- expect(events).to be_empty
- end
- end
-
- context 'when syncing records' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:project_event2) { create(:event, :closed, project: project, target: issue) }
- let_it_be(:event_without_parent) { create(:event, :joined, project: nil, group: nil) }
- let_it_be(:group_event) { create(:event, :created, group: group, project: nil) }
- let_it_be(:project_event1) { create(:event, :created, project: project, target: issue) }
- # looks invalid but we have some records like this on PRD
-
- it 'inserts all records' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 4, reached_end_of_table: true })
-
- worker.perform
-
- expected_records = [
- hash_including('id' => project_event2.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
- 'target_type' => 'Issue'),
- hash_including('id' => event_without_parent.id, 'path' => '', 'target_type' => ''),
- hash_including('id' => group_event.id, 'path' => "#{group.id}/", 'target_type' => ''),
- hash_including('id' => project_event1.id, 'path' => "#{group.id}/#{project.project_namespace.id}/",
- 'target_type' => 'Issue')
- ]
-
- events = ClickHouse::Client.select('SELECT * FROM events ORDER BY id', :main)
-
- expect(events).to match(expected_records)
-
- last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
- expect(last_processed_id).to eq(project_event1.id)
- end
-
- context 'when multiple batches are needed' do
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_const("#{described_class}::INSERT_BATCH_SIZE", 1)
- end
-
- it 'inserts all records' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 4, reached_end_of_table: true })
-
- worker.perform
-
- events = ClickHouse::Client.select('SELECT * FROM events', :main)
- expect(events.size).to eq(4)
- end
-
- context 'when new records are inserted while processing' do
- it 'does not process new records created during the iteration' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 4,
- reached_end_of_table: true })
-
- # Simulating the case when there is an insert during the iteration
- call_count = 0
- allow(worker).to receive(:next_batch).and_wrap_original do |method|
- call_count += 1
- create(:event) if call_count == 3
- method.call
- end
-
- worker.perform
- end
- end
- end
-
- context 'when time limit is reached' do
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- it 'stops the processing' do
- allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
- allow(runtime_limiter).to receive(:over_time?).and_return(false, true)
- end
-
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 2, reached_end_of_table: false })
-
- worker.perform
-
- last_processed_id = ClickHouse::SyncCursor.cursor_for(:events)
- expect(last_processed_id).to eq(event_without_parent.id)
- end
- end
-
- context 'when syncing from a certain point' do
- before do
- ClickHouse::SyncCursor.update_cursor_for(:events, project_event2.id)
- end
-
- it 'syncs records after the cursor' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 3, reached_end_of_table: true })
-
- worker.perform
-
- events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
- expect(events).to eq([{ 'id' => event_without_parent.id }, { 'id' => group_event.id },
- { 'id' => project_event1.id }])
- end
-
- context 'when there is nothing to sync' do
- it 'does nothing' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result,
- { status: :processed, records_inserted: 0, reached_end_of_table: true })
-
- ClickHouse::SyncCursor.update_cursor_for(:events, project_event1.id)
- worker.perform
-
- events = ClickHouse::Client.select('SELECT id FROM events ORDER BY id', :main)
- expect(events).to be_empty
- end
- end
- end
- end
- end
-
- context 'when clickhouse is not configured' do
- before do
- allow(ClickHouse::Client).to receive(:database_configured?).and_return(false)
- end
-
- it 'skips execution' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
-
- worker.perform
- end
- end
- end
-
- context 'when exclusive lease error happens' do
- it 'skips execution' do
- stub_feature_flags(event_sync_worker_for_click_house: true)
- allow(ClickHouse::Client).to receive(:database_configured?).with(:main).and_return(true)
-
- expect(worker).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :skipped })
-
worker.perform
end
- end
- context 'when the event_sync_worker_for_click_house feature flag is off' do
- before do
- stub_feature_flags(event_sync_worker_for_click_house: false)
- end
-
- it 'skips execution' do
- expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :disabled })
+ it 'correctly logs the metadata on done' do
+ expect_next_instance_of(::ClickHouse::SyncStrategies::EventSyncStrategy) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :ok })
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { status: :ok })
worker.perform
end
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index bba855f5095..f62c08cb7da 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -269,6 +269,33 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
expect(import_failures.first.external_identifiers).to eq(github_identifiers.with_indifferent_access)
end
end
+
+ context 'when FailedToObtainLockError is raised' do
+ let(:exception) { Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError.new }
+
+ before do
+ expect(importer_class).to receive(:new)
+ .with(instance_of(MockRepresentation), project, client)
+ .and_return(importer_instance)
+
+ expect(importer_instance).to receive(:execute).and_raise(exception)
+ end
+
+ it 'logs the error and raises an exception' do
+ expect(Gitlab::GithubImport::Logger).to receive(:warn).with(
+ {
+ external_identifiers: github_identifiers,
+ message: 'Failed to obtaing lock for user finder. Retrying later.',
+ project_id: project.id,
+ importer: 'klass_name'
+ }
+ )
+
+ expect do
+ worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
+ end.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
end
describe '#increment_object_counter?' do
diff --git a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
index c76ce6b555f..7b8c4fab0c6 100644
--- a/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/rescheduling_methods_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::GithubImport::ReschedulingMethods, feature_category: :imp
expect(worker.class)
.to receive(:perform_in)
- .with(15, project.id, { 'number' => 2 }, '123')
+ .with(15.012, project.id, { 'number' => 2 }, '123')
worker.perform(project.id, { 'number' => 2 }, '123')
end
@@ -100,6 +100,15 @@ RSpec.describe Gitlab::GithubImport::ReschedulingMethods, feature_category: :imp
expect(worker.try_import(10, 20)).to eq(false)
end
+
+ it 'returns false when the import fails due to the FailedToObtainLockError' do
+ expect(worker)
+ .to receive(:import)
+ .with(10, 20)
+ .and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+
+ expect(worker.try_import(10, 20)).to eq(false)
+ end
end
describe '#notify_waiter' do
diff --git a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
index 37e686f9f92..a88d819843a 100644
--- a/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/stage_methods_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
+# Most tests of StageMethods should not go here but in the shared examples instead:
+# spec/support/shared_examples/workers/gitlab/github_import/stage_methods_shared_examples.rb
RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers do
- let_it_be(:project) { create(:project, :import_started, import_url: 'https://t0ken@github.com/repo/repo.git') }
-
let(:worker) do
Class.new do
def self.name
@@ -15,200 +15,17 @@ RSpec.describe Gitlab::GithubImport::StageMethods, feature_category: :importers
end.new
end
- it 'has a Sidekiq retry of 6' do
- expect(worker.class.sidekiq_options['retry']).to eq(6)
- end
-
- describe '#perform' do
- it 'returns if no project could be found' do
- expect(worker).not_to receive(:try_import)
-
- worker.perform(-1)
- end
-
- it 'returns if the import state is no longer in progress' do
- allow(project.import_state).to receive(:status).and_return('failed')
-
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
-
- expect(worker).not_to receive(:try_import)
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'starting stage',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'Project import is no longer running. Stopping worker.',
- project_id: project.id,
- import_stage: 'DummyStage',
- import_status: 'failed'
- }
- )
-
- worker.perform(project.id)
- end
-
- it 'imports the data when the project exists' do
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
-
- expect(worker)
- .to receive(:try_import)
- .with(
- an_instance_of(Gitlab::GithubImport::Client),
- an_instance_of(Project)
- )
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'starting stage',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'stage finished',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
-
- worker.perform(project.id)
- end
-
- it 'logs error when import fails' do
- exception = StandardError.new('some error')
-
- allow(worker)
- .to receive(:find_project)
- .with(project.id)
- .and_return(project)
-
- expect(worker)
- .to receive(:try_import)
- .and_raise(exception)
-
- expect(Gitlab::GithubImport::Logger)
- .to receive(:info)
- .with(
- {
- message: 'starting stage',
- project_id: project.id,
- import_stage: 'DummyStage'
- }
- )
-
- expect(Gitlab::Import::ImportFailureService)
- .to receive(:track)
- .with(
- {
- project_id: project.id,
- exception: exception,
- error_source: 'DummyStage',
- fail_import: false,
- metrics: true
- }
- ).and_call_original
-
- expect { worker.perform(project.id) }
- .to raise_error(exception)
-
- expect(project.import_state.reload.status).to eq('started')
-
- expect(project.import_failures).not_to be_empty
- expect(project.import_failures.last.exception_class).to eq('StandardError')
- expect(project.import_failures.last.exception_message).to eq('some error')
- end
- end
-
- describe '#try_import' do
- before do
- allow(worker).to receive(:jid).and_return('jid')
- end
-
- it 'imports the project' do
- client = double(:client)
-
- expect(worker)
- .to receive(:import)
- .with(client, project)
-
- expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
-
- worker.try_import(client, project)
- end
-
- it 'reschedules the worker if RateLimitError was raised' do
- client = double(:client, rate_limit_resets_in: 10)
-
- expect(Gitlab::GithubImport::RefreshImportJidWorker).to receive(:perform_in_the_future).with(project.id, 'jid')
-
- expect(worker)
- .to receive(:import)
- .with(client, project)
- .and_raise(Gitlab::GithubImport::RateLimitError)
-
- expect(worker.class)
- .to receive(:perform_in)
- .with(10, project.id)
-
- worker.try_import(client, project)
- end
- end
-
- describe '#find_project' do
- it 'returns a Project for an existing ID' do
- project.import_state.update_column(:status, 'started')
-
- expect(worker.find_project(project.id)).to eq(project)
- end
-
- it 'returns nil for a project that failed importing' do
- project.import_state.update_column(:status, 'failed')
-
- expect(worker.find_project(project.id)).to be_nil
- end
-
- it 'returns nil for a non-existing project ID' do
- expect(worker.find_project(-1)).to be_nil
- end
- end
-
- describe '.sidekiq_options!' do
- subject(:sidekiq_options) { worker.class.sidekiq_options }
+ describe '.max_retries_after_interruption!' do
+ subject(:max_retries_after_interruption) { worker.class.sidekiq_options['max_retries_after_interruption'] }
it 'does not set the `max_retries_after_interruption` if not called' do
- is_expected.not_to have_key('max_retries_after_interruption')
+ is_expected.to be_nil
end
it 'sets the `max_retries_after_interruption`' do
worker.class.resumes_work_when_interrupted!
- is_expected.to include('max_retries_after_interruption' => 20)
- end
-
- it 'sets the status_expiration' do
- is_expected.to include('status_expiration' => Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
+ is_expected.to eq(20)
end
end
end
diff --git a/spec/workers/emails_on_push_worker_spec.rb b/spec/workers/emails_on_push_worker_spec.rb
index 9e8fad19c20..7f400ce791d 100644
--- a/spec/workers/emails_on_push_worker_spec.rb
+++ b/spec/workers/emails_on_push_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe EmailsOnPushWorker, :mailer, feature_category: :source_code_manag
describe "#perform" do
context "when push is a new branch" do
before do
- data_new_branch = data.stringify_keys.merge("before" => Gitlab::Git::BLANK_SHA)
+ data_new_branch = data.stringify_keys.merge("before" => Gitlab::Git::SHA1_BLANK_SHA)
subject.perform(project.id, recipients, data_new_branch)
end
@@ -34,7 +34,7 @@ RSpec.describe EmailsOnPushWorker, :mailer, feature_category: :source_code_manag
context "when push is a deleted branch" do
before do
- data_deleted_branch = data.stringify_keys.merge("after" => Gitlab::Git::BLANK_SHA)
+ data_deleted_branch = data.stringify_keys.merge("after" => Gitlab::Git::SHA1_BLANK_SHA)
subject.perform(project.id, recipients, data_deleted_branch)
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index c60e8d37c2e..4067bce8d8d 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -163,7 +163,6 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Ci::InitialPipelineProcessWorker' => 3,
'Ci::MergeRequests::AddTodoWhenBuildFailsWorker' => 3,
'Ci::Minutes::UpdateProjectAndNamespaceUsageWorker' => 3,
- 'Ci::Llm::GenerateConfigWorker' => 3,
'Ci::PipelineArtifacts::CoverageReportWorker' => 3,
'Ci::PipelineArtifacts::CreateQualityReportWorker' => 3,
'Ci::PipelineCleanupRefWorker' => 3,
@@ -280,6 +279,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
+ 'Gitlab::GithubImport::ReplayEventsWorker' => 5,
'Gitlab::GithubImport::Stage::FinishImportWorker' => 6,
'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6,
'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 6,
diff --git a/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
index d4cd1b82349..1141d08729d 100644
--- a/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
+++ b/spec/workers/gitlab/bitbucket_server_import/stage/import_users_worker_spec.rb
@@ -3,7 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportUsersWorker, feature_category: :importers do
- let_it_be(:project) { create(:project, :import_started) }
+ let_it_be(:project) do
+ create(:project, :import_started,
+ import_data_attributes: {
+ data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
+ credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
+ }
+ )
+ end
let(:worker) { described_class.new }
@@ -15,6 +22,12 @@ RSpec.describe Gitlab::BitbucketServerImport::Stage::ImportUsersWorker, feature_
allow_next_instance_of(Gitlab::BitbucketServerImport::Importers::UsersImporter) do |importer|
allow(importer).to receive(:execute)
end
+
+ allow(Gitlab::BitbucketServerImport::Stage::ImportPullRequestsWorker).to receive(:perform_async).and_return(nil)
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id] }
end
it 'schedules the next stage' do
diff --git a/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb
index aa8243154ef..cbe27934bd5 100644
--- a/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_event_worker_spec.rb
@@ -5,16 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ImportIssueEventWorker, feature_category: :importers do
subject(:worker) { described_class.new }
- describe '#import' do
- let(:import_state) { create(:import_state, :started) }
-
- let(:project) do
- instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ describe '#execute' do
+ let_it_be(:project) do
+ create(:project, import_url: 'https://github.com/foo/bar.git', import_state: create(:import_state, :started))
end
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:importer) { instance_double('Gitlab::GithubImport::Importer::IssueEventImporter') }
-
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ let(:extended_events) { true }
let(:event_hash) do
{
'id' => 6501124486,
@@ -29,23 +26,55 @@ RSpec.describe Gitlab::GithubImport::ImportIssueEventWorker, feature_category: :
}
end
- it 'imports an issue event' do
- expect(Gitlab::GithubImport::Importer::IssueEventImporter)
- .to receive(:new)
- .with(
- an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
- project,
- client
- )
- .and_return(importer)
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
+ allow(setting).to receive(:extended_events?).and_return(extended_events)
+ end
+ end
- expect(importer).to receive(:execute)
+ it 'imports an issue event and increase importer counter' do
+ expect_next_instance_of(Gitlab::GithubImport::Importer::IssueEventImporter,
+ an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
+ project,
+ client
+ ) do |importer|
+ expect(importer).to receive(:execute)
+ end
expect(Gitlab::GithubImport::ObjectCounter)
.to receive(:increment)
+ .with(project, :issue_event, :imported)
.and_call_original
worker.import(project, client, event_hash)
end
+
+ context 'when event should increment a mapped importer counter' do
+ before do
+ stub_const('Gitlab::GithubImport::Importer::IssueEventImporter::EVENT_COUNTER_MAP', {
+ 'closed' => 'custom_type'
+ })
+
+ allow_next_instance_of(Gitlab::GithubImport::Importer::IssueEventImporter) do |importer|
+ allow(importer).to receive(:execute)
+ end
+ end
+
+ it 'increments the mapped importer counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, 'custom_type', :imported)
+
+ worker.import(project, client, event_hash)
+ end
+
+ context 'when extended_events is disabled' do
+ let(:extended_events) { false }
+
+ it 'increments the issue_event importer counter' do
+ expect(Gitlab::GithubImport::ObjectCounter).to receive(:increment).with(project, :issue_event, :imported)
+
+ worker.import(project, client, event_hash)
+ end
+ end
+ end
end
end
diff --git a/spec/workers/gitlab/github_import/replay_events_worker_spec.rb b/spec/workers/gitlab/github_import/replay_events_worker_spec.rb
new file mode 100644
index 00000000000..99c9e838bbf
--- /dev/null
+++ b/spec/workers/gitlab/github_import/replay_events_worker_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::ReplayEventsWorker, feature_category: :importers do
+ let_it_be(:project) { create(:project, import_state: create(:import_state, :started)) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ let(:worker) { described_class.new }
+
+ describe '#import' do
+ it 'call replay events importer' do
+ hash = {
+ 'issuable_iid' => 1,
+ 'issuable_type' => 'Issue'
+ }
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::ReplayEventsImporter,
+ an_instance_of(Gitlab::GithubImport::Representation::ReplayEvent), project, client) do |importer|
+ expect(importer).to receive(:execute)
+ end
+
+ expect(Gitlab::GithubImport::ObjectCounter).not_to receive(:increment)
+
+ worker.import(project, client, hash)
+ end
+ end
+
+ describe '#object_type' do
+ it { expect(worker.object_type).to eq(:replay_event) }
+ end
+end
diff --git a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
index 6d8fa29bd27..ba8fcdb2406 100644
--- a/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/finish_import_worker_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::FinishImportWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
+
+ subject(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
diff --git a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
index c8b528593b9..75c9ab96751 100644
--- a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker, feature_category: :importers do
- subject(:worker) { described_class.new }
-
let_it_be(:project) { create(:project) }
- let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project.reload) }
let(:stage_enabled) { true }
+ subject(:worker) { described_class.new }
+
before do
settings.write({ optional_stages: { attachments_import: stage_enabled } })
end
diff --git a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
index b8f2db8e2d9..49dc905f430 100644
--- a/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_base_data_worker_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportBaseDataWorker, feature_category: :importers do
let_it_be(:project) { create(:project) }
- let_it_be(:import_state) { create(:import_state, project: project) }
- let(:worker) { described_class.new }
let(:importer) { double(:importer) }
let(:client) { double(:client) }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
index 6a55f575da8..7a085227b36 100644
--- a/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_collaborators_worker_spec.rb
@@ -4,14 +4,14 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportCollaboratorsWorker, feature_category: :importers do
let_it_be(:project) { create(:project) }
- let_it_be(:import_state) { create(:import_state, project: project) }
+
let(:settings) { Gitlab::GithubImport::Settings.new(project) }
let(:stage_enabled) { true }
-
- let(:worker) { described_class.new }
let(:importer) { instance_double(Gitlab::GithubImport::Importer::CollaboratorsImporter) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
index bad3a5beb0e..6b01f2825e4 100644
--- a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -3,15 +3,19 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_category: :importers do
- subject(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
- let(:project) { create(:project) }
let!(:group) { create(:group, projects: [project]) }
- let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project.reload) }
let(:stage_enabled) { true }
+ let(:extended_events) { false }
+
+ subject(:worker) { described_class.new }
before do
- settings.write({ optional_stages: { single_endpoint_issue_events_import: stage_enabled } })
+ settings.write({
+ optional_stages: { single_endpoint_issue_events_import: stage_enabled }, extended_events: extended_events
+ })
end
it_behaves_like Gitlab::GithubImport::StageMethods
@@ -48,6 +52,18 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_cat
worker.import(client, project)
end
+
+ context 'when extended_events is enabled' do
+ let(:extended_events) { true }
+
+ it 'does not skip the stage' do
+ expect_next_instance_of(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter) do |importer|
+ expect(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new)
+ end
+
+ worker.import(client, project)
+ end
+ end
end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
index 10f6ebfbab9..bc829e6124c 100644
--- a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
- let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project.reload) }
let(:single_endpoint_optional_stage) { true }
+ subject(:worker) { described_class.new }
+
before do
settings.write({ optional_stages: { single_endpoint_notes_import: single_endpoint_optional_stage } })
end
diff --git a/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
index 40194a91b3a..a0b2f82f378 100644
--- a/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_lfs_objects_worker_spec.rb
@@ -3,14 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportLfsObjectsWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
+
+ subject(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
it 'imports all the lfs objects' do
- importer = double(:importer)
+ importer = instance_double(Gitlab::GithubImport::Importer::LfsObjectsImporter)
+ client = instance_double(Gitlab::GithubImport::Client)
waiter = Gitlab::JobWaiter.new(2, '123')
expect(Gitlab::GithubImport::Importer::LfsObjectsImporter)
@@ -26,7 +28,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportLfsObjectsWorker, feature_cate
.to receive(:perform_async)
.with(project.id, { '123' => 2 }, 'finish')
- worker.import(project)
+ worker.import(client, project)
end
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
index 69078a666a5..a8b40ff43d2 100644
--- a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportNotesWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
- let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project.reload) }
let(:single_endpoint_optional_stage) { true }
+ subject(:worker) { described_class.new }
+
before do
settings.write({ optional_stages: { single_endpoint_notes_import: single_endpoint_optional_stage } })
end
diff --git a/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
index b73f8c6524d..dcc6b3d2311 100644
--- a/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_protected_branches_worker_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportProtectedBranchesWorker, feature_category: :importers do
let_it_be(:project) { create(:project) }
- let_it_be(:import_state) { create(:import_state, project: project) }
- let(:worker) { described_class.new }
let(:importer) { instance_double('Gitlab::GithubImport::Importer::ProtectedBranchImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
index b214f6a97d4..b3cb73c5fa0 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_merged_by_worker_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
+
+ subject(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
index 4468de7e691..c7b73357e76 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_review_requests_worker_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewRequestsWorker, feature_category: :importers do
- subject(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
- let(:project) { instance_double(Project, id: 1, import_state: import_state) }
- let(:import_state) { instance_double(ProjectImportState) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:importer) { instance_double(Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter) }
let(:waiter) { Gitlab::JobWaiter.new(2, '123') }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
index 48b41435adb..ab3f0b43304 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker, feature_category: :importers do
- let(:project) { create(:project) }
- let(:import_state) { create(:import_state, project: project) }
- let(:worker) { described_class.new }
+ let_it_be(:project) { create(:project) }
+
let(:client) { double(:client) }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
index 2ea66d8cdf3..2c1beb29fa1 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_worker_spec.rb
@@ -4,13 +4,13 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsWorker, feature_category: :importers do
let_it_be(:project) { create(:project) }
- let_it_be(:import_state) { create(:import_state, project: project) }
- let(:options) { { state: 'all', sort: 'number', direction: 'desc', per_page: '1' } }
- let(:worker) { described_class.new }
+ let(:options) { { state: 'all', sort: 'number', direction: 'desc', per_page: '1' } }
let(:importer) { double(:importer) }
let(:client) { double(:client) }
+ subject(:worker) { described_class.new }
+
it_behaves_like Gitlab::GithubImport::StageMethods
describe '#import' do
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 020f7539bf4..e61b46124b3 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker, feature_category: :importers do
- let_it_be(:project) { create(:project, :import_started) }
+ let_it_be(:project) { create(:project) }
- let(:worker) { described_class.new }
+ subject(:worker) { described_class.new }
it_behaves_like Gitlab::GithubImport::StageMethods
diff --git a/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
index 7aea40807e8..e86d6771386 100644
--- a/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
+++ b/spec/workers/google_cloud/create_cloudsql_instance_worker_spec.rb
@@ -23,15 +23,15 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceWorker, feature_category: :sha
described_class.new.perform(user_id, project_id, worker_options)
end
- it 'calls GoogleCloud::SetupCloudsqlInstanceService' do
- allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ it 'calls CloudSeed::GoogleCloud::SetupCloudsqlInstanceService' do
+ allow_next_instance_of(CloudSeed::GoogleCloud::SetupCloudsqlInstanceService) do |service|
expect(service).to receive(:execute).and_return({ status: :success })
end
subject
end
- context 'when GoogleCloud::SetupCloudsqlInstanceService fails' do
+ context 'when CloudSeed::GoogleCloud::SetupCloudsqlInstanceService fails' do
subject do
user_id = random_user.id
project_id = project.id
@@ -39,7 +39,7 @@ RSpec.describe GoogleCloud::CreateCloudsqlInstanceWorker, feature_category: :sha
end
it 'raises error' do
- allow_next_instance_of(GoogleCloud::SetupCloudsqlInstanceService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::SetupCloudsqlInstanceService) do |service|
expect(service).to receive(:execute).and_return({ status: :error })
end
diff --git a/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb b/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
index bdafc076465..2a7d52d987f 100644
--- a/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
+++ b/spec/workers/google_cloud/fetch_google_ip_list_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe GoogleCloud::FetchGoogleIpListWorker, feature_category: :build_artifacts do
describe '#perform' do
it 'returns success' do
- allow_next_instance_of(GoogleCloud::FetchGoogleIpListService) do |service|
+ allow_next_instance_of(CloudSeed::GoogleCloud::FetchGoogleIpListService) do |service|
expect(service).to receive(:execute).and_return({ status: :success })
end
diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb
index b617508bb3a..83bce97cd51 100644
--- a/spec/workers/jira_connect/sync_project_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_project_worker_spec.rb
@@ -51,11 +51,11 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep, feature_c
end
it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new { perform(project.id, update_sequence_id) }.count
+ control = ActiveRecord::QueryRecorder.new { perform(project.id, update_sequence_id) }
create(:merge_request, :unique_branches, title: 'TEST-123')
- expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control_count)
+ expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control)
end
context 'with branches to sync' do
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
index 540296374ef..b9cbf974a69 100644
--- a/spec/workers/new_issue_worker_spec.rb
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -99,6 +99,14 @@ RSpec.describe NewIssueWorker, feature_category: :team_planning do
expect(Event.last).to have_attributes(target_id: issue.id, target_type: 'WorkItem')
end
end
+
+ context 'when skip_notifications is true' do
+ it 'does not call NotificationService' do
+ expect(NotificationService).not_to receive(:new)
+
+ worker.perform(issue.id, user.id, issue.class.name, true)
+ end
+ end
end
end
end
diff --git a/spec/workers/releases/publish_event_worker_spec.rb b/spec/workers/releases/publish_event_worker_spec.rb
new file mode 100644
index 00000000000..86dd09a756f
--- /dev/null
+++ b/spec/workers/releases/publish_event_worker_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Releases::PublishEventWorker, feature_category: :release_evidence do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:release) { create(:release, project: project, released_at: Time.current) }
+
+ before do
+ allow(Gitlab::EventStore).to receive(:publish).and_return(true)
+ end
+
+ describe 'when the releases feature is not disabled' do
+ before do
+ project.update!(releases_access_level: 'enabled')
+ described_class.new.perform
+ end
+
+ it 'broadcasts the published event' do
+ expect(Gitlab::EventStore).to have_received(:publish).with(Projects::ReleasePublishedEvent)
+ end
+
+ it 'sets the release as published' do
+ expect(release.release_published_at).not_to be_nil
+ end
+ end
+
+ describe 'when the releases feature is disabled' do
+ before do
+ project.update!(releases_access_level: 'disabled')
+ described_class.new.perform
+ end
+
+ it 'does not broadcasts the published event' do
+ expect(Gitlab::EventStore).not_to have_received(:publish).with(Projects::ReleasePublishedEvent)
+ end
+
+ # Having a release created with the releases feature disabled is a bogus state anyway.
+ # Setting it as published prevents having such releases piling up forever in the
+ # `unpublished` scope.
+ it 'sets the release as published' do
+ expect(release.release_published_at).not_to be_nil
+ end
+ end
+end